From 1ff7c2febe0bb1d901b5d54a64306da741c0d50f Mon Sep 17 00:00:00 2001 From: Valerio Crini Date: Sun, 3 May 2015 21:10:14 +0200 Subject: [PATCH 001/246] removing misstype --- doc/pymode.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/pymode.txt b/doc/pymode.txt index ae2a5d27..6f5bd5fc 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -443,7 +443,7 @@ imported) from project *'g:pymode_rope_autoimport'* Load modules to autoimport by default *'g:pymode_rope_autoimport_modules'* > - let g:pymode_rope_autoimport_modules = ['os', 'shutil', 'datetime']) + let g:pymode_rope_autoimport_modules = ['os', 'shutil', 'datetime'] Offer to unresolved import object after completion. > From a81cdc4f4db86821b20930107d3c214a1b685fb2 Mon Sep 17 00:00:00 2001 From: monteiro Date: Fri, 18 Nov 2016 08:23:25 -0200 Subject: [PATCH 002/246] improved folding in various cases --- autoload/pymode/folding.vim | 120 +++++++++++++++++++++++++++++++----- 1 file changed, 104 insertions(+), 16 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 3ed61bc5..b8362722 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -1,16 +1,25 @@ " Python-mode folding functions +" Notice that folding is based on single line so complex regular expressions +" that take previous line into consideration are not fit for the job. +" Regex definitions for correct folding let s:def_regex = g:pymode_folding_regex let s:blank_regex = '^\s*$' -let s:decorator_regex = '^\s*@' -let s:doc_begin_regex = '^\s*\%("""\|''''''\)' +" Spyder, a very popular IDE for python has a template which includes +" '@author:' ; thus the regex below. +let s:decorator_regex = '^\s*@\(author:\)\@!' +let s:doc_begin_regex = '^\s*[uU]\=\%("""\|''''''\)' let s:doc_end_regex = '\%("""\|''''''\)\s*$' -let s:doc_line_regex = '^\s*\("""\|''''''\).\+\1\s*$' +" This one is needed for the while loop to count for opening and closing +" docstrings. +let s:doc_general_regex = '\%("""\|''''''\)' +let s:doc_line_regex = '^\s*[uU]\=\("""\|''''''\).\+\1\s*$' let s:symbol = matchstr(&fillchars, 'fold:\zs.') " handles multibyte characters if s:symbol == '' let s:symbol = ' ' endif +" '''''''' fun! pymode#folding#text() " {{{ @@ -33,24 +42,29 @@ fun! pymode#folding#text() " {{{ let line = substitute(line, '\t', onetab, 'g') let line = strpart(line, 0, windowwidth - 2 -len(foldedlinecount)) - let line = substitute(line, '\%("""\|''''''\)', '', '') + let line = substitute(line, '[uU]\=\%("""\|''''''\)', '', '') let fillcharcount = windowwidth - len(line) - len(foldedlinecount) + 1 return line . ' ' . repeat(s:symbol, fillcharcount) . ' ' . foldedlinecount endfunction "}}} - fun! pymode#folding#expr(lnum) "{{{ let line = getline(a:lnum) let indent = indent(a:lnum) let prev_line = getline(a:lnum - 1) + let next_line = getline(a:lnum + 1) + " Decorators {{{ if line =~ s:decorator_regex return ">".(indent / &shiftwidth + 1) - endif + endif "}}} + " Definition {{{ if line =~ s:def_regex - " single line def + " If indent of this line is greater or equal than line below + " and previous non blank line does not end with : (that is, is not a + " definition) + " Keep the same indentation if indent(a:lnum) >= indent(a:lnum+1) && getline(prevnonblank(a:lnum)) !~ ':\s*$' return '=' endif @@ -71,16 +85,35 @@ fun! pymode#folding#expr(lnum) "{{{ else return ">".(indent / &shiftwidth + 1) endif - endif + endif "}}} - if line =~ s:doc_begin_regex && line !~ s:doc_line_regex && prev_line =~ s:def_regex - return ">".(indent / &shiftwidth + 1) + " Docstrings {{{ + + " TODO: A while loop now counts the number of open and closed folding in + " order to determine if it is a closing or opening folding. + " It is working but looks like it is an overkill. + + " Notice that an effect of this is that other docstring matches will not + " be one liners. + if line =~ s:doc_line_regex + return "=" endif - if line =~ s:doc_end_regex && line !~ s:doc_line_regex - return "<".(indent / &shiftwidth + 1) + if line =~ s:doc_begin_regex + " echom 'just entering' + if s:Is_opening_folding(a:lnum) + " echom 'entering at line ' . a:lnum + return ">".(indent / &shiftwidth + 1) + endif endif + if line =~ s:doc_end_regex + if !s:Is_opening_folding(a:lnum) + " echom 'leaving at line ' . a:lnum + return "<".(indent / &shiftwidth + 1) + endif + endif "}}} + " Nested Definitions {{{ " Handle nested defs but only for files shorter than " g:pymode_folding_nest_limit lines due to performance concerns if line('$') < g:pymode_folding_nest_limit && indent(prevnonblank(a:lnum)) @@ -125,18 +158,25 @@ fun! pymode#folding#expr(lnum) "{{{ finally call setpos('.', curpos) endtry - endif + endif " }}} + " Blank Line {{{ if line =~ s:blank_regex if prev_line =~ s:blank_regex - if indent(a:lnum + 1) == 0 && getline(a:lnum + 1) !~ s:blank_regex - return 0 + if indent(a:lnum + 1) == 0 && next_line !~ s:blank_regex && next_line !~ s:doc_general_regex + if s:Is_opening_folding(a:lnum) + " echom a:lnum + return "=" + else + " echom "not " . a:lnum + return 0 + endif endif return -1 else return '=' endif - endif + endif " }}} return '=' @@ -174,4 +214,52 @@ fun! s:BlockEnd(lnum) "{{{ return searchpos('\v^\s{,'.indent('.').'}\S', 'nW')[0] - 1 endfunction "}}} +function! s:Is_opening_folding(lnum) "{{{ + " Helper function to see if docstring is opening or closing + let number_of_folding = 0 " To be analized if odd/even to inform if it is opening or closing. + let has_open_docstring = 0 " To inform is already has an open docstring. + let extra_docstrings = 0 " To help skipping ''' and """ which are not docstrings + + " The idea of this part of the function is to identify real docstrings and + " not just triple quotes (that could be a regular string). + " + " Iterater over all lines from the start until current line (inclusive) + for i in range(1, a:lnum) + let i_line = getline(i) + + if i_line =~ s:doc_line_regex + " echom "case 00 on line " . i + continue + endif + + if i_line =~ s:doc_begin_regex && ! has_open_docstring + " echom "case 01 on line " . i + " This causes the loop to continue if there is a triple quote which + " is not a docstring. + if extra_docstrings > 0 + let extra_docstrings = extra_docstrings - 1 + continue + else + let has_open_docstring = 1 + let number_of_folding = number_of_folding + 1 + endif + " If it is an end doc and has an open docstring. + elseif i_line =~ s:doc_end_regex && has_open_docstring + " echom "case 02 on line " . i + let has_open_docstring = 0 + let number_of_folding = number_of_folding + 1 + + elseif i_line =~ s:doc_general_regex + " echom "extra docstrings on line " . i + let extra_docstrings = extra_docstrings + 1 + endif + endfor + + if fmod(number_of_folding, 2) == 1 "If odd then it is an opening + return 1 + else + return 0 + endif +endfunction "}}} + " vim: fdm=marker:fdl=0 From 2c6e8981fdf051800d6d86f2491e6ecd22a1de42 Mon Sep 17 00:00:00 2001 From: monteiro Date: Fri, 18 Nov 2016 08:53:25 -0200 Subject: [PATCH 003/246] commiting to trigger travis support --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index e10ed9f1..7097078e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,3 +4,4 @@ rvm: - 1.9.3 script: - make travis + From 26353f3ad45897f87c65b407bcc2c148799389b7 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 16 Dec 2016 17:15:00 -0800 Subject: [PATCH 004/246] Update readme. --- README.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 5c897bc1..7045f93c 100644 --- a/README.rst +++ b/README.rst @@ -8,8 +8,7 @@ *The project needs maintainers and contributors* -Actually, I have no time for support the project, so if you feel yourself as -strong don't be hesitate to contact me. +Slack Channel: https://python-mode.herokuapp.com/ ----- From 6e417691cf4441300ee23b5be7d1242b9e3cb977 Mon Sep 17 00:00:00 2001 From: Bryce Guinta Date: Fri, 16 Dec 2016 21:25:21 -0700 Subject: [PATCH 005/246] breakpoint: Use tabs if expandtab is false. #718 --- autoload/pymode/breakpoint.vim | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index cf7b95be..c3189aad 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -35,7 +35,13 @@ fun! pymode#breakpoint#operate(lnum) "{{{ normal dd else let plnum = prevnonblank(a:lnum) - call append(line('.')-1, repeat(' ', indent(plnum)).g:pymode_breakpoint_cmd) + if &expandtab + let indents = repeat(' ', indent(plnum)) + else + let indents = repeat("\t", plnum / &shiftwidth) + endif + + call append(line('.')-1, indents.g:pymode_breakpoint_cmd) normal k endif From df2dff9afa294e37c508aeb29f354241845792d0 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 17 Dec 2016 13:53:06 -0800 Subject: [PATCH 006/246] Update Rope refactoring library. --- README.rst | 9 +- pylama.ini | 2 +- pymode/{libs2 => libs}/rope/__init__.py | 3 +- pymode/{libs2 => libs}/rope/base/__init__.py | 0 pymode/{libs2 => libs}/rope/base/arguments.py | 0 pymode/{libs2 => libs}/rope/base/ast.py | 15 +- pymode/{libs3 => libs}/rope/base/astutils.py | 3 + pymode/{libs2 => libs}/rope/base/builtins.py | 45 +- pymode/{libs2 => libs}/rope/base/change.py | 2 +- .../{libs2 => libs}/rope/base/codeanalyze.py | 40 +- .../rope/base/default_config.py | 11 +- pymode/{libs2 => libs}/rope/base/evaluate.py | 7 +- .../{libs2 => libs}/rope/base/exceptions.py | 0 .../{libs2 => libs}/rope/base/fscommands.py | 34 +- pymode/{libs2 => libs}/rope/base/history.py | 0 pymode/{libs2 => libs}/rope/base/libutils.py | 0 .../{libs2 => libs}/rope/base/oi/__init__.py | 0 pymode/{libs2 => libs}/rope/base/oi/doa.py | 9 +- pymode/libs/rope/base/oi/docstrings.py | 226 +++++ .../{libs2 => libs}/rope/base/oi/memorydb.py | 21 + .../{libs2 => libs}/rope/base/oi/objectdb.py | 12 +- .../rope/base/oi/objectinfo.py | 0 pymode/{libs2 => libs}/rope/base/oi/runmod.py | 32 +- pymode/{libs2 => libs}/rope/base/oi/soa.py | 0 pymode/{libs2 => libs}/rope/base/oi/soi.py | 52 +- .../{libs2 => libs}/rope/base/oi/transform.py | 0 pymode/{libs2 => libs}/rope/base/prefs.py | 0 pymode/{libs2 => libs}/rope/base/project.py | 18 +- pymode/{libs2 => libs}/rope/base/pycore.py | 0 pymode/{libs2 => libs}/rope/base/pynames.py | 0 .../{libs2 => libs}/rope/base/pynamesdef.py | 0 pymode/{libs2 => libs}/rope/base/pyobjects.py | 2 +- .../{libs2 => libs}/rope/base/pyobjectsdef.py | 54 +- pymode/{libs2 => libs}/rope/base/pyscopes.py | 0 .../rope/base/resourceobserver.py | 0 pymode/{libs2 => libs}/rope/base/resources.py | 2 +- pymode/{libs2 => libs}/rope/base/simplify.py | 0 pymode/{libs2 => libs}/rope/base/stdmods.py | 25 +- .../{libs2 => libs}/rope/base/taskhandle.py | 0 .../rope/base/utils/__init__.py} | 0 pymode/libs/rope/base/utils/datastructures.py | 67 ++ pymode/libs/rope/base/utils/pycompat.py | 45 + pymode/{libs2 => libs}/rope/base/worder.py | 0 .../{libs2 => libs}/rope/contrib/__init__.py | 0 .../rope/contrib/autoimport.py | 0 .../rope/contrib/changestack.py | 0 .../rope/contrib/codeassist.py | 35 +- .../rope/contrib/finderrors.py | 2 +- pymode/{libs2 => libs}/rope/contrib/findit.py | 0 .../rope/contrib/fixmodnames.py | 0 .../{libs2 => libs}/rope/contrib/fixsyntax.py | 6 +- .../{libs2 => libs}/rope/contrib/generate.py | 0 .../{libs2 => libs}/rope/refactor/__init__.py | 0 .../rope/refactor/change_signature.py | 4 +- .../rope/refactor/encapsulate_field.py | 0 .../{libs2 => libs}/rope/refactor/extract.py | 44 +- .../rope/refactor/functionutils.py | 0 .../rope/refactor/importutils/__init__.py | 9 +- .../rope/refactor/importutils/actions.py | 0 .../rope/refactor/importutils/importinfo.py | 0 .../refactor/importutils/module_imports.py | 97 +-- .../{libs2 => libs}/rope/refactor/inline.py | 2 +- .../rope/refactor/introduce_factory.py | 0 .../rope/refactor/introduce_parameter.py | 0 .../rope/refactor/localtofield.py | 0 .../rope/refactor/method_object.py | 0 pymode/{libs2 => libs}/rope/refactor/move.py | 140 +++- .../rope/refactor/multiproject.py | 0 .../rope/refactor/occurrences.py | 20 +- .../rope/refactor/patchedast.py | 120 ++- .../{libs2 => libs}/rope/refactor/rename.py | 0 .../rope/refactor/restructure.py | 0 .../rope/refactor/similarfinder.py | 2 +- .../rope/refactor/sourceutils.py | 0 .../{libs2 => libs}/rope/refactor/suites.py | 21 +- .../rope/refactor/topackage.py | 0 .../rope/refactor/usefunction.py | 0 .../rope/refactor/wildcards.py | 0 pymode/libs2/rope/base/astutils.py | 61 -- pymode/libs3/rope/__init__.py | 18 - pymode/libs3/rope/base/__init__.py | 8 - pymode/libs3/rope/base/arguments.py | 109 --- pymode/libs3/rope/base/ast.py | 68 -- pymode/libs3/rope/base/builtins.py | 782 ----------------- pymode/libs3/rope/base/change.py | 448 ---------- pymode/libs3/rope/base/codeanalyze.py | 358 -------- pymode/libs3/rope/base/default_config.py | 86 -- pymode/libs3/rope/base/evaluate.py | 325 -------- pymode/libs3/rope/base/exceptions.py | 61 -- pymode/libs3/rope/base/fscommands.py | 267 ------ pymode/libs3/rope/base/history.py | 235 ------ pymode/libs3/rope/base/libutils.py | 65 -- pymode/libs3/rope/base/oi/__init__.py | 38 - pymode/libs3/rope/base/oi/doa.py | 162 ---- pymode/libs3/rope/base/oi/memorydb.py | 106 --- pymode/libs3/rope/base/oi/objectdb.py | 192 ----- pymode/libs3/rope/base/oi/objectinfo.py | 232 ----- pymode/libs3/rope/base/oi/runmod.py | 215 ----- pymode/libs3/rope/base/oi/soa.py | 136 --- pymode/libs3/rope/base/oi/soi.py | 186 ----- pymode/libs3/rope/base/oi/transform.py | 285 ------- pymode/libs3/rope/base/prefs.py | 41 - pymode/libs3/rope/base/project.py | 375 --------- pymode/libs3/rope/base/pycore.py | 410 --------- pymode/libs3/rope/base/pynames.py | 199 ----- pymode/libs3/rope/base/pynamesdef.py | 55 -- pymode/libs3/rope/base/pyobjects.py | 311 ------- pymode/libs3/rope/base/pyobjectsdef.py | 555 ------------ pymode/libs3/rope/base/pyscopes.py | 313 ------- pymode/libs3/rope/base/resourceobserver.py | 271 ------ pymode/libs3/rope/base/resources.py | 212 ----- pymode/libs3/rope/base/simplify.py | 55 -- pymode/libs3/rope/base/stdmods.py | 43 - pymode/libs3/rope/base/taskhandle.py | 133 --- pymode/libs3/rope/base/utils.py | 78 -- pymode/libs3/rope/base/worder.py | 524 ------------ pymode/libs3/rope/contrib/__init__.py | 7 - pymode/libs3/rope/contrib/autoimport.py | 217 ----- pymode/libs3/rope/contrib/changestack.py | 52 -- pymode/libs3/rope/contrib/codeassist.py | 648 -------------- pymode/libs3/rope/contrib/finderrors.py | 91 -- pymode/libs3/rope/contrib/findit.py | 110 --- pymode/libs3/rope/contrib/fixmodnames.py | 69 -- pymode/libs3/rope/contrib/fixsyntax.py | 178 ---- pymode/libs3/rope/contrib/generate.py | 355 -------- pymode/libs3/rope/refactor/__init__.py | 55 -- .../libs3/rope/refactor/change_signature.py | 340 -------- .../libs3/rope/refactor/encapsulate_field.py | 202 ----- pymode/libs3/rope/refactor/extract.py | 789 ------------------ pymode/libs3/rope/refactor/functionutils.py | 222 ----- .../rope/refactor/importutils/__init__.py | 299 ------- .../rope/refactor/importutils/actions.py | 359 -------- .../rope/refactor/importutils/importinfo.py | 201 ----- .../refactor/importutils/module_imports.py | 451 ---------- pymode/libs3/rope/refactor/inline.py | 615 -------------- .../libs3/rope/refactor/introduce_factory.py | 133 --- .../rope/refactor/introduce_parameter.py | 95 --- pymode/libs3/rope/refactor/localtofield.py | 50 -- pymode/libs3/rope/refactor/method_object.py | 87 -- pymode/libs3/rope/refactor/move.py | 628 -------------- pymode/libs3/rope/refactor/multiproject.py | 78 -- pymode/libs3/rope/refactor/occurrences.py | 334 -------- pymode/libs3/rope/refactor/patchedast.py | 734 ---------------- pymode/libs3/rope/refactor/rename.py | 216 ----- pymode/libs3/rope/refactor/restructure.py | 307 ------- pymode/libs3/rope/refactor/similarfinder.py | 362 -------- pymode/libs3/rope/refactor/sourceutils.py | 92 -- pymode/libs3/rope/refactor/suites.py | 142 ---- pymode/libs3/rope/refactor/topackage.py | 32 - pymode/libs3/rope/refactor/usefunction.py | 171 ---- pymode/libs3/rope/refactor/wildcards.py | 176 ---- pymode/rope.py | 32 +- pymode/utils.py | 3 +- 153 files changed, 1000 insertions(+), 17188 deletions(-) rename pymode/{libs2 => libs}/rope/__init__.py (92%) rename pymode/{libs2 => libs}/rope/base/__init__.py (100%) rename pymode/{libs2 => libs}/rope/base/arguments.py (100%) rename pymode/{libs2 => libs}/rope/base/ast.py (89%) rename pymode/{libs3 => libs}/rope/base/astutils.py (95%) rename pymode/{libs2 => libs}/rope/base/builtins.py (94%) rename pymode/{libs2 => libs}/rope/base/change.py (99%) rename pymode/{libs2 => libs}/rope/base/codeanalyze.py (91%) rename pymode/{libs2 => libs}/rope/base/default_config.py (91%) rename pymode/{libs2 => libs}/rope/base/evaluate.py (98%) rename pymode/{libs2 => libs}/rope/base/exceptions.py (100%) rename pymode/{libs2 => libs}/rope/base/fscommands.py (89%) rename pymode/{libs2 => libs}/rope/base/history.py (100%) rename pymode/{libs2 => libs}/rope/base/libutils.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/__init__.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/doa.py (96%) create mode 100644 pymode/libs/rope/base/oi/docstrings.py rename pymode/{libs2 => libs}/rope/base/oi/memorydb.py (87%) rename pymode/{libs2 => libs}/rope/base/oi/objectdb.py (93%) rename pymode/{libs2 => libs}/rope/base/oi/objectinfo.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/runmod.py (88%) rename pymode/{libs2 => libs}/rope/base/oi/soa.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/soi.py (78%) rename pymode/{libs2 => libs}/rope/base/oi/transform.py (100%) rename pymode/{libs2 => libs}/rope/base/prefs.py (100%) rename pymode/{libs2 => libs}/rope/base/project.py (97%) rename pymode/{libs2 => libs}/rope/base/pycore.py (100%) rename pymode/{libs2 => libs}/rope/base/pynames.py (100%) rename pymode/{libs2 => libs}/rope/base/pynamesdef.py (100%) rename pymode/{libs2 => libs}/rope/base/pyobjects.py (99%) rename pymode/{libs2 => libs}/rope/base/pyobjectsdef.py (92%) rename pymode/{libs2 => libs}/rope/base/pyscopes.py (100%) rename pymode/{libs2 => libs}/rope/base/resourceobserver.py (100%) rename pymode/{libs2 => libs}/rope/base/resources.py (99%) rename pymode/{libs2 => libs}/rope/base/simplify.py (100%) rename pymode/{libs2 => libs}/rope/base/stdmods.py (60%) rename pymode/{libs2 => libs}/rope/base/taskhandle.py (100%) rename pymode/{libs2/rope/base/utils.py => libs/rope/base/utils/__init__.py} (100%) create mode 100644 pymode/libs/rope/base/utils/datastructures.py create mode 100644 pymode/libs/rope/base/utils/pycompat.py rename pymode/{libs2 => libs}/rope/base/worder.py (100%) rename pymode/{libs2 => libs}/rope/contrib/__init__.py (100%) rename pymode/{libs2 => libs}/rope/contrib/autoimport.py (100%) rename pymode/{libs2 => libs}/rope/contrib/changestack.py (100%) rename pymode/{libs2 => libs}/rope/contrib/codeassist.py (96%) rename pymode/{libs2 => libs}/rope/contrib/finderrors.py (98%) rename pymode/{libs2 => libs}/rope/contrib/findit.py (100%) rename pymode/{libs2 => libs}/rope/contrib/fixmodnames.py (100%) rename pymode/{libs2 => libs}/rope/contrib/fixsyntax.py (97%) rename pymode/{libs2 => libs}/rope/contrib/generate.py (100%) rename pymode/{libs2 => libs}/rope/refactor/__init__.py (100%) rename pymode/{libs2 => libs}/rope/refactor/change_signature.py (98%) rename pymode/{libs2 => libs}/rope/refactor/encapsulate_field.py (100%) rename pymode/{libs2 => libs}/rope/refactor/extract.py (95%) rename pymode/{libs2 => libs}/rope/refactor/functionutils.py (100%) rename pymode/{libs2 => libs}/rope/refactor/importutils/__init__.py (97%) rename pymode/{libs2 => libs}/rope/refactor/importutils/actions.py (100%) rename pymode/{libs2 => libs}/rope/refactor/importutils/importinfo.py (100%) rename pymode/{libs2 => libs}/rope/refactor/importutils/module_imports.py (89%) rename pymode/{libs2 => libs}/rope/refactor/inline.py (99%) rename pymode/{libs2 => libs}/rope/refactor/introduce_factory.py (100%) rename pymode/{libs2 => libs}/rope/refactor/introduce_parameter.py (100%) rename pymode/{libs2 => libs}/rope/refactor/localtofield.py (100%) rename pymode/{libs2 => libs}/rope/refactor/method_object.py (100%) rename pymode/{libs2 => libs}/rope/refactor/move.py (85%) rename pymode/{libs2 => libs}/rope/refactor/multiproject.py (100%) rename pymode/{libs2 => libs}/rope/refactor/occurrences.py (95%) rename pymode/{libs2 => libs}/rope/refactor/patchedast.py (87%) rename pymode/{libs2 => libs}/rope/refactor/rename.py (100%) rename pymode/{libs2 => libs}/rope/refactor/restructure.py (100%) rename pymode/{libs2 => libs}/rope/refactor/similarfinder.py (99%) rename pymode/{libs2 => libs}/rope/refactor/sourceutils.py (100%) rename pymode/{libs2 => libs}/rope/refactor/suites.py (86%) rename pymode/{libs2 => libs}/rope/refactor/topackage.py (100%) rename pymode/{libs2 => libs}/rope/refactor/usefunction.py (100%) rename pymode/{libs2 => libs}/rope/refactor/wildcards.py (100%) delete mode 100644 pymode/libs2/rope/base/astutils.py delete mode 100644 pymode/libs3/rope/__init__.py delete mode 100644 pymode/libs3/rope/base/__init__.py delete mode 100644 pymode/libs3/rope/base/arguments.py delete mode 100644 pymode/libs3/rope/base/ast.py delete mode 100644 pymode/libs3/rope/base/builtins.py delete mode 100644 pymode/libs3/rope/base/change.py delete mode 100644 pymode/libs3/rope/base/codeanalyze.py delete mode 100644 pymode/libs3/rope/base/default_config.py delete mode 100644 pymode/libs3/rope/base/evaluate.py delete mode 100644 pymode/libs3/rope/base/exceptions.py delete mode 100644 pymode/libs3/rope/base/fscommands.py delete mode 100644 pymode/libs3/rope/base/history.py delete mode 100644 pymode/libs3/rope/base/libutils.py delete mode 100644 pymode/libs3/rope/base/oi/__init__.py delete mode 100644 pymode/libs3/rope/base/oi/doa.py delete mode 100644 pymode/libs3/rope/base/oi/memorydb.py delete mode 100644 pymode/libs3/rope/base/oi/objectdb.py delete mode 100644 pymode/libs3/rope/base/oi/objectinfo.py delete mode 100644 pymode/libs3/rope/base/oi/runmod.py delete mode 100644 pymode/libs3/rope/base/oi/soa.py delete mode 100644 pymode/libs3/rope/base/oi/soi.py delete mode 100644 pymode/libs3/rope/base/oi/transform.py delete mode 100644 pymode/libs3/rope/base/prefs.py delete mode 100644 pymode/libs3/rope/base/project.py delete mode 100644 pymode/libs3/rope/base/pycore.py delete mode 100644 pymode/libs3/rope/base/pynames.py delete mode 100644 pymode/libs3/rope/base/pynamesdef.py delete mode 100644 pymode/libs3/rope/base/pyobjects.py delete mode 100644 pymode/libs3/rope/base/pyobjectsdef.py delete mode 100644 pymode/libs3/rope/base/pyscopes.py delete mode 100644 pymode/libs3/rope/base/resourceobserver.py delete mode 100644 pymode/libs3/rope/base/resources.py delete mode 100644 pymode/libs3/rope/base/simplify.py delete mode 100644 pymode/libs3/rope/base/stdmods.py delete mode 100644 pymode/libs3/rope/base/taskhandle.py delete mode 100644 pymode/libs3/rope/base/utils.py delete mode 100644 pymode/libs3/rope/base/worder.py delete mode 100644 pymode/libs3/rope/contrib/__init__.py delete mode 100644 pymode/libs3/rope/contrib/autoimport.py delete mode 100644 pymode/libs3/rope/contrib/changestack.py delete mode 100644 pymode/libs3/rope/contrib/codeassist.py delete mode 100644 pymode/libs3/rope/contrib/finderrors.py delete mode 100644 pymode/libs3/rope/contrib/findit.py delete mode 100644 pymode/libs3/rope/contrib/fixmodnames.py delete mode 100644 pymode/libs3/rope/contrib/fixsyntax.py delete mode 100644 pymode/libs3/rope/contrib/generate.py delete mode 100644 pymode/libs3/rope/refactor/__init__.py delete mode 100644 pymode/libs3/rope/refactor/change_signature.py delete mode 100644 pymode/libs3/rope/refactor/encapsulate_field.py delete mode 100644 pymode/libs3/rope/refactor/extract.py delete mode 100644 pymode/libs3/rope/refactor/functionutils.py delete mode 100644 pymode/libs3/rope/refactor/importutils/__init__.py delete mode 100644 pymode/libs3/rope/refactor/importutils/actions.py delete mode 100644 pymode/libs3/rope/refactor/importutils/importinfo.py delete mode 100644 pymode/libs3/rope/refactor/importutils/module_imports.py delete mode 100644 pymode/libs3/rope/refactor/inline.py delete mode 100644 pymode/libs3/rope/refactor/introduce_factory.py delete mode 100644 pymode/libs3/rope/refactor/introduce_parameter.py delete mode 100644 pymode/libs3/rope/refactor/localtofield.py delete mode 100644 pymode/libs3/rope/refactor/method_object.py delete mode 100644 pymode/libs3/rope/refactor/move.py delete mode 100644 pymode/libs3/rope/refactor/multiproject.py delete mode 100644 pymode/libs3/rope/refactor/occurrences.py delete mode 100644 pymode/libs3/rope/refactor/patchedast.py delete mode 100644 pymode/libs3/rope/refactor/rename.py delete mode 100644 pymode/libs3/rope/refactor/restructure.py delete mode 100644 pymode/libs3/rope/refactor/similarfinder.py delete mode 100644 pymode/libs3/rope/refactor/sourceutils.py delete mode 100644 pymode/libs3/rope/refactor/suites.py delete mode 100644 pymode/libs3/rope/refactor/topackage.py delete mode 100644 pymode/libs3/rope/refactor/usefunction.py delete mode 100644 pymode/libs3/rope/refactor/wildcards.py diff --git a/README.rst b/README.rst index 7045f93c..a47d460e 100644 --- a/README.rst +++ b/README.rst @@ -6,9 +6,9 @@ ----- -*The project needs maintainers and contributors* +*The project needs contributors* -Slack Channel: https://python-mode.herokuapp.com/ +** Python-mode Slack Channel is here: https://python-mode.herokuapp.com/ ** ----- @@ -180,7 +180,10 @@ at https://github.com/klen/python-mode/issues Contributing ============ -See the `AUTHORS` file. +* Kirill Klenov (horneds@gmail.com) +* Bryce Guinta (https://github.com/brycepg) + +Also see the `AUTHORS` file. Development of python-mode happens at github: https://github.com/klen/python-mode diff --git a/pylama.ini b/pylama.ini index b8d3f375..0394772f 100644 --- a/pylama.ini +++ b/pylama.ini @@ -5,4 +5,4 @@ linters=pep8,pyflakes,pylint skip=1 [pylama:pylint] -disable=E1120,E1130,E1103,W1401 +disable=E1120,E1130,E1103,W1401,F0001 diff --git a/pymode/libs2/rope/__init__.py b/pymode/libs/rope/__init__.py similarity index 92% rename from pymode/libs2/rope/__init__.py rename to pymode/libs/rope/__init__.py index c8e11f68..624b6279 100644 --- a/pymode/libs2/rope/__init__.py +++ b/pymode/libs/rope/__init__.py @@ -1,8 +1,9 @@ """rope, a python refactoring library""" INFO = __doc__ -VERSION = '0.10.2' +VERSION = '0.10.3' COPYRIGHT = """\ +Copyright (C) 2014-2015 Matej Cepl Copyright (C) 2006-2012 Ali Gholami Rudi Copyright (C) 2009-2012 Anton Gritsay diff --git a/pymode/libs2/rope/base/__init__.py b/pymode/libs/rope/base/__init__.py similarity index 100% rename from pymode/libs2/rope/base/__init__.py rename to pymode/libs/rope/base/__init__.py diff --git a/pymode/libs2/rope/base/arguments.py b/pymode/libs/rope/base/arguments.py similarity index 100% rename from pymode/libs2/rope/base/arguments.py rename to pymode/libs/rope/base/arguments.py diff --git a/pymode/libs2/rope/base/ast.py b/pymode/libs/rope/base/ast.py similarity index 89% rename from pymode/libs2/rope/base/ast.py rename to pymode/libs/rope/base/ast.py index f6a9d88d..d43c83c5 100644 --- a/pymode/libs2/rope/base/ast.py +++ b/pymode/libs/rope/base/ast.py @@ -3,18 +3,23 @@ from rope.base import fscommands +try: + unicode +except NameError: + unicode = str + def parse(source, filename=''): # NOTE: the raw string should be given to `compile` function if isinstance(source, unicode): source = fscommands.unicode_to_file_data(source) - if '\r' in source: - source = source.replace('\r\n', '\n').replace('\r', '\n') - if not source.endswith('\n'): - source += '\n' + if b'\r' in source: + source = source.replace(b'\r\n', b'\n').replace(b'\r', b'\n') + if not source.endswith(b'\n'): + source += b'\n' try: return compile(source, filename, 'exec', _ast.PyCF_ONLY_AST) - except (TypeError, ValueError), e: + except (TypeError, ValueError) as e: error = SyntaxError() error.lineno = 1 error.filename = filename diff --git a/pymode/libs3/rope/base/astutils.py b/pymode/libs/rope/base/astutils.py similarity index 95% rename from pymode/libs3/rope/base/astutils.py rename to pymode/libs/rope/base/astutils.py index 8ace1a92..6c0b3d78 100644 --- a/pymode/libs3/rope/base/astutils.py +++ b/pymode/libs/rope/base/astutils.py @@ -40,6 +40,9 @@ def _added(self, node, levels): def _Name(self, node): self._add_node(node) + def _ExceptHandler(self, node): + self.names.append((node.name, [])) + def _Tuple(self, node): new_levels = [] if self.levels is not None: diff --git a/pymode/libs2/rope/base/builtins.py b/pymode/libs/rope/base/builtins.py similarity index 94% rename from pymode/libs2/rope/base/builtins.py rename to pymode/libs/rope/base/builtins.py index 5bb84859..bc42f720 100644 --- a/pymode/libs2/rope/base/builtins.py +++ b/pymode/libs/rope/base/builtins.py @@ -1,8 +1,13 @@ """This module trys to support builtin types and functions.""" import inspect +try: + raw_input +except NameError: + raw_input = input import rope.base.evaluate -from rope.base import pynames, pyobjects, arguments, utils, ast +from rope.base.utils import pycompat +from rope.base import pynames, pyobjects, arguments, utils class BuiltinModule(pyobjects.AbstractModule): @@ -32,7 +37,7 @@ def attributes(self): result.update(self.initial) if self.pycore is not None: submodules = self.pycore._builtin_submodules(self.name) - for name, module in submodules.iteritems(): + for name, module in submodules.items(): result[name] = rope.base.builtins.BuiltinName(module) return result @@ -266,7 +271,10 @@ def __init__(self, holding=None): # Getting methods collector('__getitem__', function=self._list_get) collector('pop', function=self._list_get) - collector('__getslice__', function=self._self_get) + try: + collector('__getslice__', function=self._list_get) + except AttributeError: + pass super(List, self).__init__(list, collector.attributes) @@ -290,6 +298,10 @@ def _self_set(self, context): def _list_get(self, context): if self.holding is not None: + args = context.get_arguments(['self', 'key']) + if (len(args) > 1 and args[1] is not None and + args[1].get_type() == builtins['slice'].get_object()): + return get_list(self.holding) return self.holding return context.get_per_name() @@ -407,7 +419,7 @@ def __init__(self, *objects): if objects: first = objects[0] attributes = { - '__getitem__': BuiltinName(BuiltinFunction(first)), + '__getitem__': BuiltinName(BuiltinFunction(first)), # TODO: add slice support '__getslice__': BuiltinName(BuiltinFunction(pyobjects.PyObject(self))), '__new__': BuiltinName(BuiltinFunction(function=self._new_tuple)), @@ -487,14 +499,21 @@ def __init__(self): collector = _AttributeCollector(str) collector('__iter__', get_iterator(self_object), check_existence=False) - self_methods = ['__getitem__', '__getslice__', 'capitalize', 'center', - 'decode', 'encode', 'expandtabs', 'join', 'ljust', + self_methods = ['__getitem__', 'capitalize', 'center', + 'encode', 'expandtabs', 'join', 'ljust', 'lower', 'lstrip', 'replace', 'rjust', 'rstrip', 'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill'] for method in self_methods: collector(method, self_object) + py2_self_methods = ["__getslice__", "decode"] + for method in py2_self_methods: + try: + collector(method, self_object) + except AttributeError: + pass + for method in ['rsplit', 'split', 'splitlines']: collector(method, get_list(self_object)) @@ -568,7 +587,7 @@ def __init__(self): attributes = {} def add(name, returned=None, function=None): - builtin = getattr(file, name, None) + builtin = getattr(open, name, None) attributes[name] = BuiltinName( BuiltinFunction(returned=returned, function=function, builtin=builtin)) @@ -578,7 +597,7 @@ def add(name, returned=None, function=None): for method in ['close', 'flush', 'lineno', 'isatty', 'seek', 'tell', 'truncate', 'write', 'writelines']: add(method) - super(File, self).__init__(file, attributes) + super(File, self).__init__(open, attributes) get_file = _create_builtin_getter(File) @@ -642,12 +661,12 @@ def get_name(self): return 'lambda' def get_param_names(self, special_args=True): - result = [node.id for node in self.arguments.args - if isinstance(node, ast.Name)] + result = [pycompat.get_ast_arg_arg(node) for node in self.arguments.args + if isinstance(node, pycompat.ast_arg_type)] if self.arguments.vararg: - result.append('*' + self.arguments.vararg) + result.append('*' + pycompat.get_ast_arg_arg(self.arguments.vararg)) if self.arguments.kwarg: - result.append('**' + self.arguments.kwarg) + result.append('**' + pycompat.get_ast_arg_arg(self.arguments.kwarg)) return result @property @@ -787,4 +806,4 @@ def _input_function(args): builtin=raw_input)), } -builtins = BuiltinModule('__builtin__', initial=_initial_builtins) +builtins = BuiltinModule(pycompat.builtins.__name__, initial=_initial_builtins) diff --git a/pymode/libs2/rope/base/change.py b/pymode/libs/rope/base/change.py similarity index 99% rename from pymode/libs2/rope/base/change.py rename to pymode/libs/rope/base/change.py index e9764484..fe2ebf43 100644 --- a/pymode/libs2/rope/base/change.py +++ b/pymode/libs/rope/base/change.py @@ -369,7 +369,7 @@ def _create_resource(self, file_name, kind='file'): fscommands.create_file(resource_path) else: fscommands.create_folder(resource_path) - except IOError, e: + except IOError as e: raise exceptions.RopeError(e) diff --git a/pymode/libs2/rope/base/codeanalyze.py b/pymode/libs/rope/base/codeanalyze.py similarity index 91% rename from pymode/libs2/rope/base/codeanalyze.py rename to pymode/libs/rope/base/codeanalyze.py index 87061912..1704e9ad 100644 --- a/pymode/libs2/rope/base/codeanalyze.py +++ b/pymode/libs/rope/base/codeanalyze.py @@ -19,9 +19,7 @@ def get_changed(self): if not self.changes: return None - def compare_changes(change1, change2): - return cmp(change1[:2], change2[:2]) - self.changes.sort(compare_changes) + self.changes.sort(key=lambda x: x[:2]) pieces = [] last_changed = 0 for change in self.changes: @@ -131,31 +129,31 @@ def __call__(self): i += 1 return result - _main_chars = re.compile(r'[\'|"|#|\\|\[|\]|\{|\}|\(|\)]') + # Matches all backslashes before the token, to detect escaped quotes + _main_tokens = re.compile(r'(\\*)((\'\'\'|"""|\'|")|#|\[|\]|\{|\}|\(|\))') def _analyze_line(self, line): - char = None - for match in self._main_chars.finditer(line): - char = match.group() - i = match.start() - if char in '\'"': + token = None + for match in self._main_tokens.finditer(line): + prefix = match.group(1) + token = match.group(2) + # Skip any tokens which are escaped + if len(prefix) % 2 == 1: + continue + if token in ["'''", '"""', "'", '"']: if not self.in_string: - self.in_string = char - if char * 3 == line[i:i + 3]: - self.in_string = char * 3 - elif self.in_string == line[i:i + len(self.in_string)] and \ - not (i > 0 and line[i - 1] == '\\' and - not (i > 1 and line[i - 2] == '\\')): + self.in_string = token + elif self.in_string == token: self.in_string = '' if self.in_string: continue - if char == '#': + if token == '#': break - if char in '([{': + if token in '([{': self.open_count += 1 - elif char in ')]}': + elif token in ')]}': self.open_count -= 1 - if line and char != '#' and line.endswith('\\'): + if line and token != '#' and line.endswith('\\'): self.continuation = True else: self.continuation = False @@ -177,7 +175,7 @@ def logical_line_in(self, line_number): block_start = get_block_start(self.lines, line_number, indents) try: return self._block_logical_line(block_start, line_number) - except IndentationError, e: + except IndentationError as e: tries += 1 if tries == 5: raise e @@ -222,7 +220,7 @@ def _calculate_logical(self, readline, line_number): if line_number <= end: return (start, end) last_end = end + 1 - except tokenize.TokenError, e: + except tokenize.TokenError as e: current = e.args[1][0] return (last_end, max(last_end, current - 1)) return (last_end, None) diff --git a/pymode/libs2/rope/base/default_config.py b/pymode/libs/rope/base/default_config.py similarity index 91% rename from pymode/libs2/rope/base/default_config.py rename to pymode/libs/rope/base/default_config.py index 3745e306..45e1fb46 100644 --- a/pymode/libs2/rope/base/default_config.py +++ b/pymode/libs/rope/base/default_config.py @@ -1,4 +1,5 @@ # The default ``config.py`` +# flake8: noqa def set_prefs(prefs): @@ -14,8 +15,10 @@ def set_prefs(prefs): # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' prefs['ignored_resources'] = [ - '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', '.git', - '.tox', '.env', 'node_modules', 'bower_components'] + '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', + '.git', '.tox', '.env', 'env', 'venv', 'node_modules', + 'bower_components' + ] # Specifies which files should be considered python files. It is # useful when you have scripts inside your project. Only files @@ -80,6 +83,10 @@ def set_prefs(prefs): # appear in the importing namespace. prefs['ignore_bad_imports'] = False + # If `True`, rope will insert new module imports as + # `from import ` by default. + prefs['prefer_module_from_imports'] = False + # If `True`, rope will transform a comma list of imports into # multiple separate import statements when organizing # imports. diff --git a/pymode/libs2/rope/base/evaluate.py b/pymode/libs/rope/base/evaluate.py similarity index 98% rename from pymode/libs2/rope/base/evaluate.py rename to pymode/libs/rope/base/evaluate.py index faf09407..f4323923 100644 --- a/pymode/libs2/rope/base/evaluate.py +++ b/pymode/libs/rope/base/evaluate.py @@ -2,6 +2,7 @@ import rope.base.pynames import rope.base.pyobjects from rope.base import ast, astutils, exceptions, pyobjects, arguments, worder +from rope.base.utils import pycompat BadIdentifierError = exceptions.BadIdentifierError @@ -290,7 +291,11 @@ def _Subscript(self, node): self._call_function(node.value, '__getitem__', [node.slice.value]) elif isinstance(node.slice, ast.Slice): - self._call_function(node.value, '__getslice__') + self._call_function(node.value, '__getitem__', + [node.slice]) + + def _Slice(self, node): + self.result = self._get_builtin_name('slice') def _call_function(self, node, function_name, other_args=None): pyname = eval_node(self.scope, node) diff --git a/pymode/libs2/rope/base/exceptions.py b/pymode/libs/rope/base/exceptions.py similarity index 100% rename from pymode/libs2/rope/base/exceptions.py rename to pymode/libs/rope/base/exceptions.py diff --git a/pymode/libs2/rope/base/fscommands.py b/pymode/libs/rope/base/fscommands.py similarity index 89% rename from pymode/libs2/rope/base/fscommands.py rename to pymode/libs/rope/base/fscommands.py index daf118a0..3564ed91 100644 --- a/pymode/libs2/rope/base/fscommands.py +++ b/pymode/libs/rope/base/fscommands.py @@ -10,6 +10,12 @@ import shutil import subprocess +import rope.base.utils.pycompat as pycompat + +try: + unicode +except NameError: + unicode = str def create_fscommands(root): dirlist = os.listdir(root) @@ -240,29 +246,43 @@ def read_file_coding(path): def read_str_coding(source): + if type(source) == bytes: + newline = b'\n' + else: + newline = '\n' + #try: + # source = source.decode("utf-8") + #except AttributeError: + # pass try: - first = source.index('\n') + 1 - second = source.index('\n', first) + 1 + first = source.index(newline) + 1 + second = source.index(newline, first) + 1 except ValueError: second = len(source) return _find_coding(source[:second]) def _find_coding(text): - coding = 'coding' + if isinstance(text, pycompat.str): + text = text.encode('utf-8') + coding = b'coding' + to_chr = chr if pycompat.PY3 else lambda x: x try: start = text.index(coding) + len(coding) - if text[start] not in '=:': + if text[start] not in b'=:': return start += 1 - while start < len(text) and text[start].isspace(): + while start < len(text) and to_chr(text[start]).isspace(): start += 1 end = start while end < len(text): c = text[end] - if not c.isalnum() and c not in '-_': + if not to_chr(c).isalnum() and c not in b'-_': break end += 1 - return text[start:end] + result = text[start:end] + if isinstance(result, bytes): + result = result.decode('utf-8') + return result except ValueError: pass diff --git a/pymode/libs2/rope/base/history.py b/pymode/libs/rope/base/history.py similarity index 100% rename from pymode/libs2/rope/base/history.py rename to pymode/libs/rope/base/history.py diff --git a/pymode/libs2/rope/base/libutils.py b/pymode/libs/rope/base/libutils.py similarity index 100% rename from pymode/libs2/rope/base/libutils.py rename to pymode/libs/rope/base/libutils.py diff --git a/pymode/libs2/rope/base/oi/__init__.py b/pymode/libs/rope/base/oi/__init__.py similarity index 100% rename from pymode/libs2/rope/base/oi/__init__.py rename to pymode/libs/rope/base/oi/__init__.py diff --git a/pymode/libs2/rope/base/oi/doa.py b/pymode/libs/rope/base/oi/doa.py similarity index 96% rename from pymode/libs2/rope/base/oi/doa.py rename to pymode/libs/rope/base/oi/doa.py index 1b2a00fc..3f314c66 100644 --- a/pymode/libs2/rope/base/oi/doa.py +++ b/pymode/libs/rope/base/oi/doa.py @@ -1,4 +1,7 @@ -import cPickle as pickle +try: + import pickle +except ImportError: + import cPickle as pickle import marshal import os import socket @@ -62,7 +65,7 @@ def _init_data_receiving(self): self.receiving_thread.start() def _receive_information(self): - #temp = open('/dev/shm/info', 'w') + #temp = open('/dev/shm/info', 'wb') for data in self.receiver.receive_data(): self.analyze_data(data) #temp.write(str(data) + '\n') @@ -125,7 +128,7 @@ def get_send_info(self): def receive_data(self): conn, addr = self.server_socket.accept() self.server_socket.close() - my_file = conn.makefile('r') + my_file = conn.makefile('rb') while True: try: yield pickle.load(my_file) diff --git a/pymode/libs/rope/base/oi/docstrings.py b/pymode/libs/rope/base/oi/docstrings.py new file mode 100644 index 00000000..4519e126 --- /dev/null +++ b/pymode/libs/rope/base/oi/docstrings.py @@ -0,0 +1,226 @@ +""" +Hinting the type using docstring of class/function. + +It's an irreplaceable thing if you are using Dependency Injection with passive class: +http://www.martinfowler.com/articles/injection.html + +Some code extracted (or based on code) from: +https://github.com/davidhalter/jedi/blob/b489019f5bd5750051122b94cc767df47751ecb7/jedi/evaluate/docstrings.py +Thanks to @davidhalter for this utils under MIT License. + +Similar solutions: + + - https://www.jetbrains.com/pycharm/help/type-hinting-in-pycharm.html + - https://www.python.org/dev/peps/pep-0484/#type-comments + - http://www.pydev.org/manual_adv_type_hints.html + - https://jedi.readthedocs.org/en/latest/docs/features.html#type-hinting + +Discussions: + + - https://groups.google.com/d/topic/rope-dev/JlAzmZ83K1M/discussion + - https://groups.google.com/d/topic/rope-dev/LCFNN98vckI/discussion + +""" +import re +from ast import literal_eval + +from rope.base.exceptions import AttributeNotFoundError +from rope.base.evaluate import ScopeNameFinder +from rope.base.pyobjects import PyClass, PyFunction + +PEP0484_PATTERNS = [ + re.compile(r'type:\s*([^\n, ]+)'), +] + +DOCSTRING_PARAM_PATTERNS = [ + r'\s*:type\s+%s:\s*([^\n, ]+)', # Sphinx + r'\s*:param\s+(\w+)\s+%s:[^\n]+', # Sphinx param with type + r'\s*@type\s+%s:\s*([^\n, ]+)', # Epydoc +] + +DOCSTRING_RETURN_PATTERNS = [ + re.compile(r'\s*:rtype:\s*([^\n, ]+)', re.M), # Sphinx + re.compile(r'\s*@rtype:\s*([^\n, ]+)', re.M), # Epydoc +] + +REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`') + +try: + from numpydoc.docscrape import NumpyDocString +except ImportError: + def _search_param_in_numpydocstr(docstr, param_str): + return [] +else: + def _search_param_in_numpydocstr(docstr, param_str): + """Search `docstr` (in numpydoc format) for type(-s) of `param_str`.""" + params = NumpyDocString(docstr)._parsed_data['Parameters'] + for p_name, p_type, p_descr in params: + if p_name == param_str: + m = re.match('([^,]+(,[^,]+)*?)(,[ ]*optional)?$', p_type) + if m: + p_type = m.group(1) + + if p_type.startswith('{'): + types = set(type(x).__name__ for x in literal_eval(p_type)) + return list(types) + else: + return [p_type] + return [] + + +def hint_pep0484(pyname): + from rope.base.oi.soi import _get_lineno_for_node + lineno = _get_lineno_for_node(pyname.assignments[0].ast_node) + holding_scope = pyname.module.get_scope().get_inner_scope_for_line(lineno) + line = holding_scope._get_global_scope()._scope_finder.lines.get_line(lineno) + if '#' in line: + type_strs = _search_type_in_pep0484(line.split('#', 1)[1]) + if type_strs: + return _resolve_type(type_strs[0], holding_scope.pyobject) + + +def _search_type_in_pep0484(code): + """ For more info see: + https://www.python.org/dev/peps/pep-0484/#type-comments + + >>> _search_type_in_pep0484('type: int') + ['int'] + """ + for p in PEP0484_PATTERNS: + match = p.search(code) + if match: + return [match.group(1)] + + +def hint_param(pyfunc, param_name): + type_strs = None + func = pyfunc + while not type_strs and func: + if func.get_doc(): + type_strs = _search_param_in_docstr(func.get_doc(), param_name) + func = _get_superfunc(func) + + if type_strs: + return _resolve_type(type_strs[0], pyfunc) + + +def _get_superfunc(pyfunc): + + if not isinstance(pyfunc.parent, PyClass): + return + + for cls in _get_mro(pyfunc.parent)[1:]: + try: + superfunc = cls.get_attribute(pyfunc.get_name()).get_object() + except AttributeNotFoundError: + pass + else: + if isinstance(superfunc, PyFunction): + return superfunc + + +def _get_mro(pyclass): + # FIXME: to use real mro() result + l = [pyclass] + for cls in l: + for super_cls in cls.get_superclasses(): + if isinstance(super_cls, PyClass) and super_cls not in l: + l.append(super_cls) + return l + + +def _resolve_type(type_name, pyobj): + type_ = None + if '.' not in type_name: + try: + type_ = pyobj.get_module().get_scope().get_name(type_name).get_object() + except Exception: + pass + else: + mod_name, attr_name = type_name.rsplit('.', 1) + try: + mod_finder = ScopeNameFinder(pyobj.get_module()) + mod = mod_finder._find_module(mod_name).get_object() + type_ = mod.get_attribute(attr_name).get_object() + except Exception: + pass + return type_ + + +def _search_param_in_docstr(docstr, param_str): + """ + Search `docstr` for type(-s) of `param_str`. + + >>> _search_param_in_docstr(':type param: int', 'param') + ['int'] + >>> _search_param_in_docstr('@type param: int', 'param') + ['int'] + >>> _search_param_in_docstr( + ... ':type param: :class:`threading.Thread`', 'param') + ['threading.Thread'] + >>> bool(_search_param_in_docstr('no document', 'param')) + False + >>> _search_param_in_docstr(':param int param: some description', 'param') + ['int'] + + """ + patterns = [re.compile(p % re.escape(param_str)) + for p in DOCSTRING_PARAM_PATTERNS] + for pattern in patterns: + match = pattern.search(docstr) + if match: + return [_strip_rst_role(match.group(1))] + + return (_search_param_in_numpydocstr(docstr, param_str) or + []) + + +def _strip_rst_role(type_str): + """ + Strip off the part looks like a ReST role in `type_str`. + + >>> _strip_rst_role(':class:`ClassName`') # strip off :class: + 'ClassName' + >>> _strip_rst_role(':py:obj:`module.Object`') # works with domain + 'module.Object' + >>> _strip_rst_role('ClassName') # do nothing when not ReST role + 'ClassName' + + See also: + http://sphinx-doc.org/domains.html#cross-referencing-python-objects + + """ + match = REST_ROLE_PATTERN.match(type_str) + if match: + return match.group(1) + else: + return type_str + + +def hint_return(pyfunc): + type_str = None + func = pyfunc + while not type_str and func: + if func.get_doc(): + type_str = _search_return_in_docstr(func.get_doc()) + func = _get_superfunc(func) + if type_str: + return _resolve_type(type_str, pyfunc) + + +def _search_return_in_docstr(code): + for p in DOCSTRING_RETURN_PATTERNS: + match = p.search(code) + if match: + return _strip_rst_role(match.group(1)) + + +def hint_attr(pyclass, attr_name): + type_strs = None + for cls in _get_mro(pyclass): + if cls.get_doc(): + type_strs = _search_param_in_docstr(cls.get_doc(), attr_name) + if type_strs: + break + if type_strs: + return _resolve_type(type_strs[0], pyclass) diff --git a/pymode/libs2/rope/base/oi/memorydb.py b/pymode/libs/rope/base/oi/memorydb.py similarity index 87% rename from pymode/libs2/rope/base/oi/memorydb.py rename to pymode/libs/rope/base/oi/memorydb.py index f49075ca..01c814ce 100644 --- a/pymode/libs2/rope/base/oi/memorydb.py +++ b/pymode/libs/rope/base/oi/memorydb.py @@ -21,6 +21,16 @@ def _load_files(self): def keys(self): return self._files.keys() + def __iter__(self): + for f in self._files: + yield f + + def __len__(self): + return len(self._files) + + def __setitem__(self): + raise NotImplementedError() + def __contains__(self, key): return key in self._files @@ -76,6 +86,17 @@ def __getitem__(self, key): def __delitem__(self, key): del self.scopes[key] + def __iter__(self): + for s in self.scopes: + yield s + + def __len__(self): + return len(self.scopes) + + def __setitem__(self): + raise NotImplementedError() + + class ScopeInfo(objectdb.ScopeInfo): diff --git a/pymode/libs2/rope/base/oi/objectdb.py b/pymode/libs/rope/base/oi/objectdb.py similarity index 93% rename from pymode/libs2/rope/base/oi/objectdb.py rename to pymode/libs/rope/base/oi/objectdb.py index 6f988add..61f2711d 100644 --- a/pymode/libs2/rope/base/oi/objectdb.py +++ b/pymode/libs/rope/base/oi/objectdb.py @@ -1,4 +1,8 @@ -import UserDict +from __future__ import print_function +try: + from collections import MutableMapping +except ImportError: + from UserDict import DictMixin as MutableMapping class ObjectDB(object): @@ -78,7 +82,7 @@ def _get_scope_info(self, path, key, readonly=True): self.files[path].create_scope(key) result = self.files[path][key] if isinstance(result, dict): - print self.files, self.files[path], self.files[path][key] + print(self.files, self.files[path], self.files[path][key]) return result def _file_removed(self, path): @@ -120,13 +124,13 @@ def add_call(self, parameters, returned): raise NotImplementedError() -class FileInfo(UserDict.DictMixin): +class FileInfo(MutableMapping): def create_scope(self, key): pass -class FileDict(UserDict.DictMixin): +class FileDict(MutableMapping): def create(self, key): pass diff --git a/pymode/libs2/rope/base/oi/objectinfo.py b/pymode/libs/rope/base/oi/objectinfo.py similarity index 100% rename from pymode/libs2/rope/base/oi/objectinfo.py rename to pymode/libs/rope/base/oi/objectinfo.py diff --git a/pymode/libs2/rope/base/oi/runmod.py b/pymode/libs/rope/base/oi/runmod.py similarity index 88% rename from pymode/libs2/rope/base/oi/runmod.py rename to pymode/libs/rope/base/oi/runmod.py index e332d7e6..ba0184c1 100644 --- a/pymode/libs2/rope/base/oi/runmod.py +++ b/pymode/libs/rope/base/oi/runmod.py @@ -1,13 +1,16 @@ - def __rope_start_everything(): import os import sys import socket - import cPickle as pickle + try: + import pickle + except ImportError: + import cPickle as pickle import marshal import inspect import types import threading + import rope.base.utils.pycompat as pycompat class _MessageSender(object): @@ -19,7 +22,7 @@ class _SocketSender(_MessageSender): def __init__(self, port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(('127.0.0.1', port)) - self.my_file = s.makefile('w') + self.my_file = s.makefile('wb') def send_data(self, data): if not self.my_file.closed: @@ -76,8 +79,9 @@ def on_function_call(self, frame, event, arg): code = frame.f_code for argname in code.co_varnames[:code.co_argcount]: try: - args.append(self._object_to_persisted_form( - frame.f_locals[argname])) + argvalue = self._object_to_persisted_form( + frame.f_locals[argname]) + args.append(argvalue) except (TypeError, AttributeError): args.append(('unknown',)) try: @@ -97,7 +101,6 @@ def _is_an_interesting_call(self, frame): # return False #return not frame.f_back or # not self._is_code_inside_project(frame.f_back.f_code) - if not self._is_code_inside_project(frame.f_code) and \ (not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code)): @@ -125,7 +128,7 @@ def _get_persisted_class(self, object_): return ('unknown',) def _get_persisted_builtin(self, object_): - if isinstance(object_, (str, unicode)): + if isinstance(object_, pycompat.string_types): return ('builtin', 'str') if isinstance(object_, list): holding = None @@ -137,7 +140,9 @@ def _get_persisted_builtin(self, object_): keys = None values = None if len(object_) > 0: - keys = object_.keys()[0] + # @todo - fix it properly, why is __locals__ being + # duplicated ? + keys = [key for key in object_.keys() if key != '__locals__'][0] values = object_[keys] return ('builtin', 'dict', self._object_to_persisted_form(keys), @@ -166,14 +171,14 @@ def _object_to_persisted_form(self, object_): if isinstance(object_, types.CodeType): return self._get_persisted_code(object_) if isinstance(object_, types.FunctionType): - return self._get_persisted_code(object_.func_code) + return self._get_persisted_code(object_.__code__) if isinstance(object_, types.MethodType): - return self._get_persisted_code(object_.im_func.func_code) + return self._get_persisted_code(object_.__func__.__code__) if isinstance(object_, types.ModuleType): return self._get_persisted_module(object_) - if isinstance(object_, (str, unicode, list, dict, tuple, set)): + if isinstance(object_, pycompat.string_types + (list, dict, tuple, set)): return self._get_persisted_builtin(object_) - if isinstance(object_, (types.TypeType, types.ClassType)): + if isinstance(object_, type): return self._get_persisted_class(object_) return ('instance', self._get_persisted_class(type(object_))) @@ -204,10 +209,11 @@ def _realpath(path): run_globals.update({'__name__': '__main__', '__builtins__': __builtins__, '__file__': file_to_run}) + if send_info != '-': data_sender = _FunctionCallDataSender(send_info, project_root) del sys.argv[1:4] - execfile(file_to_run, run_globals) + pycompat.execfile(file_to_run, run_globals) if send_info != '-': data_sender.close() diff --git a/pymode/libs2/rope/base/oi/soa.py b/pymode/libs/rope/base/oi/soa.py similarity index 100% rename from pymode/libs2/rope/base/oi/soa.py rename to pymode/libs/rope/base/oi/soa.py diff --git a/pymode/libs2/rope/base/oi/soi.py b/pymode/libs/rope/base/oi/soi.py similarity index 78% rename from pymode/libs2/rope/base/oi/soi.py rename to pymode/libs/rope/base/oi/soi.py index 5a11b5ef..f39e6a8a 100644 --- a/pymode/libs2/rope/base/oi/soi.py +++ b/pymode/libs/rope/base/oi/soi.py @@ -8,6 +8,7 @@ import rope.base.pynames import rope.base.pyobjects from rope.base import evaluate, utils, arguments +from rope.base.oi.docstrings import hint_return, hint_param, hint_attr, hint_pep0484 _ignore_inferred = utils.ignore_exception( @@ -28,7 +29,12 @@ def infer_returned_object(pyfunction, args): pyfunction.get_param_names(special_args=False)) object_info.function_called(pyfunction, params, result) return result - return object_info.get_returned(pyfunction, args) + result = object_info.get_returned(pyfunction, args) + if result is not None: + return result + type_ = hint_return(pyfunction) + if type_ is not None: + return rope.base.pyobjects.PyObject(type_) @_ignore_inferred @@ -62,9 +68,42 @@ def infer_assigned_object(pyname): return for assignment in reversed(pyname.assignments): result = _infer_assignment(assignment, pyname.module) - if result is not None: + if isinstance(result, rope.base.builtins.BuiltinUnknown) and result.get_name() == 'NotImplementedType': + break + elif result == rope.base.pyobjects.get_unknown(): + break + elif result is not None: return result + hinting_result = hint_pep0484(pyname) + if hinting_result is not None: + return hinting_result + + hinting_result = _infer_assigned_object_by_hint(pyname) + if hinting_result is not None: + return hinting_result + + return result + + +def _infer_assigned_object_by_hint(pyname): + lineno = _get_lineno_for_node(pyname.assignments[0].ast_node) + holding_scope = pyname.module.get_scope().get_inner_scope_for_line(lineno) + pyobject = holding_scope.pyobject + if isinstance(pyobject, rope.base.pyobjects.PyClass): + pyclass = pyobject + elif (isinstance(pyobject, rope.base.pyobjectsdef.PyFunction) and + isinstance(pyobject.parent, rope.base.pyobjects.PyClass)): + pyclass = pyobject.parent + else: + return + for name, attr in pyclass.get_attributes().items(): + if attr is pyname: + type_ = hint_attr(pyclass, name) + if type_ is not None: + return rope.base.pyobjects.PyObject(type_) + break + def get_passed_objects(pyfunction, parameter_index): object_info = pyfunction.pycore.object_info @@ -106,8 +145,15 @@ def _infer_returned(pyobject, args): def _parameter_objects(pyobject): + result = [] params = pyobject.get_param_names(special_args=False) - return [rope.base.pyobjects.get_unknown()] * len(params) + for name in params: + type_ = hint_param(pyobject, name) + if type_ is not None: + result.append(rope.base.pyobjects.PyObject(type_)) + else: + result.append(rope.base.pyobjects.get_unknown()) + return result # handling `rope.base.pynames.AssignmentValue` diff --git a/pymode/libs2/rope/base/oi/transform.py b/pymode/libs/rope/base/oi/transform.py similarity index 100% rename from pymode/libs2/rope/base/oi/transform.py rename to pymode/libs/rope/base/oi/transform.py diff --git a/pymode/libs2/rope/base/prefs.py b/pymode/libs/rope/base/prefs.py similarity index 100% rename from pymode/libs2/rope/base/prefs.py rename to pymode/libs/rope/base/prefs.py diff --git a/pymode/libs2/rope/base/project.py b/pymode/libs/rope/base/project.py similarity index 97% rename from pymode/libs2/rope/base/project.py rename to pymode/libs/rope/base/project.py index 23597f8c..2feef36c 100644 --- a/pymode/libs2/rope/base/project.py +++ b/pymode/libs/rope/base/project.py @@ -1,14 +1,19 @@ -import cPickle as pickle import os import shutil import sys import warnings import rope.base.fscommands -from rope.base import exceptions, taskhandle, prefs, history, pycore, utils import rope.base.resourceobserver as resourceobserver -from rope.base.resources import File, Folder, _ResourceMatcher +import rope.base.utils.pycompat as pycompat +from rope.base import exceptions, taskhandle, prefs, history, pycore, utils from rope.base.exceptions import ModuleNotFoundError +from rope.base.resources import File, Folder, _ResourceMatcher + +try: + import pickle +except ImportError: + import cPickle as pickle class _Project(object): @@ -257,7 +262,7 @@ def _init_prefs(self, prefs): '__file__': config.real_path}) if config.exists(): config = self.ropefolder.get_child('config.py') - execfile(config.real_path, run_globals) + pycompat.execfile(config.real_path, run_globals) else: exec(self._default_config(), run_globals) if 'set_prefs' in run_globals: @@ -453,6 +458,11 @@ def _realpath(path): Is equivalent to ``realpath(abspath(expanduser(path)))``. + Of the particular notice is the hack dealing with the unfortunate + sitaution of running native-Windows python (os.name == 'nt') inside + of Cygwin (abspath starts with '/'), which apparently normal + os.path.realpath completely messes up. + """ # there is a bug in cygwin for os.path.abspath() for abs paths if sys.platform == 'cygwin': diff --git a/pymode/libs2/rope/base/pycore.py b/pymode/libs/rope/base/pycore.py similarity index 100% rename from pymode/libs2/rope/base/pycore.py rename to pymode/libs/rope/base/pycore.py diff --git a/pymode/libs2/rope/base/pynames.py b/pymode/libs/rope/base/pynames.py similarity index 100% rename from pymode/libs2/rope/base/pynames.py rename to pymode/libs/rope/base/pynames.py diff --git a/pymode/libs2/rope/base/pynamesdef.py b/pymode/libs/rope/base/pynamesdef.py similarity index 100% rename from pymode/libs2/rope/base/pynamesdef.py rename to pymode/libs/rope/base/pynamesdef.py diff --git a/pymode/libs2/rope/base/pyobjects.py b/pymode/libs/rope/base/pyobjects.py similarity index 99% rename from pymode/libs2/rope/base/pyobjects.py rename to pymode/libs/rope/base/pyobjects.py index 76be3040..fd4d1c82 100644 --- a/pymode/libs2/rope/base/pyobjects.py +++ b/pymode/libs/rope/base/pyobjects.py @@ -106,7 +106,7 @@ def get_unknown(): write:: if pyname.get_object() == get_unknown(): - print 'cannot determine what this pyname holds' + print('cannot determine what this pyname holds') Rope could have used `None` for indicating unknown objects but we had to check that in many places. So actually this method diff --git a/pymode/libs2/rope/base/pyobjectsdef.py b/pymode/libs/rope/base/pyobjectsdef.py similarity index 92% rename from pymode/libs2/rope/base/pyobjectsdef.py rename to pymode/libs/rope/base/pyobjectsdef.py index a738b4de..b7aef627 100644 --- a/pymode/libs2/rope/base/pyobjectsdef.py +++ b/pymode/libs/rope/base/pyobjectsdef.py @@ -1,11 +1,17 @@ +import rope.base.builtins import rope.base.codeanalyze import rope.base.evaluate -import rope.base.builtins +import rope.base.libutils import rope.base.oi.soi import rope.base.pyscopes -import rope.base.libutils from rope.base import (pynamesdef as pynames, exceptions, ast, astutils, pyobjects, fscommands, arguments, utils) +from rope.base.utils import pycompat + +try: + unicode +except NameError: + unicode = str class PyFunction(pyobjects.PyFunction): @@ -71,13 +77,13 @@ def get_name(self): def get_param_names(self, special_args=True): # TODO: handle tuple parameters - result = [node.id for node in self.arguments.args - if isinstance(node, ast.Name)] + result = [pycompat.get_ast_arg_arg(node) for node in self.arguments.args + if isinstance(node, pycompat.ast_arg_type)] if special_args: if self.arguments.vararg: - result.append(self.arguments.vararg) + result.append(pycompat.get_ast_arg_arg(self.arguments.vararg)) if self.arguments.kwarg: - result.append(self.arguments.kwarg) + result.append(pycompat.get_ast_arg_arg(self.arguments.kwarg)) return result def get_kind(self): @@ -181,9 +187,9 @@ def _init_source(self, pycore, source_code, resource): else: source_bytes = source_code ast_node = ast.parse(source_bytes, filename=filename) - except SyntaxError, e: + except SyntaxError as e: raise exceptions.ModuleSyntaxError(filename, e.lineno, e.msg) - except UnicodeDecodeError, e: + except UnicodeDecodeError as e: raise exceptions.ModuleSyntaxError(filename, 1, '%s' % (e.reason)) return source_code, ast_node @@ -226,7 +232,7 @@ def _create_structural_attributes(self): result = {} modname = rope.base.libutils.modname(self.resource) extension_submodules = self.pycore._builtin_submodules(modname) - for name, module in extension_submodules.iteritems(): + for name, module in extension_submodules.items(): result[name] = rope.base.builtins.BuiltinName(module) if self.resource is None: return result @@ -370,26 +376,34 @@ def _assigned(self, name, assignment): def _update_evaluated(self, targets, assigned, evaluation='', eval_type=False): result = {} - names = astutils.get_name_levels(targets) - for name, levels in names: - assignment = pynames.AssignmentValue(assigned, levels, + if isinstance(targets, str): + assignment = pynames.AssignmentValue(assigned, [], evaluation, eval_type) - self._assigned(name, assignment) + self._assigned(targets, assignment) + else: + names = astutils.get_name_levels(targets) + for name, levels in names: + assignment = pynames.AssignmentValue(assigned, levels, + evaluation, eval_type) + self._assigned(name, assignment) return result def _With(self, node): - if node.optional_vars: - self._update_evaluated(node.optional_vars, - node.context_expr, '.__enter__()') + for item in pycompat.get_ast_with_items(node): + if item.optional_vars: + self._update_evaluated(item.optional_vars, + item.context_expr, '.__enter__()') for child in node.body: ast.walk(child, self) def _excepthandler(self, node): - if node.name is not None and isinstance(node.name, ast.Name): + node_name_type = str if pycompat.PY3 else ast.Name + if node.name is not None and isinstance(node.name, node_name_type): type_node = node.type if isinstance(node.type, ast.Tuple) and type_node.elts: type_node = type_node.elts[0] self._update_evaluated(node.name, type_node, eval_type=True) + for child in node.body: ast.walk(child, self) @@ -465,8 +479,10 @@ def _FunctionDef(self, node): _ScopeVisitor._FunctionDef(self, node) if len(node.args.args) > 0: first = node.args.args[0] - if isinstance(first, ast.Name): - new_visitor = _ClassInitVisitor(self, first.id) + new_visitor = None + if isinstance(first, pycompat.ast_arg_type): + new_visitor = _ClassInitVisitor(self, pycompat.get_ast_arg_arg(first)) + if new_visitor is not None: for child in ast.get_child_nodes(node): ast.walk(child, new_visitor) diff --git a/pymode/libs2/rope/base/pyscopes.py b/pymode/libs/rope/base/pyscopes.py similarity index 100% rename from pymode/libs2/rope/base/pyscopes.py rename to pymode/libs/rope/base/pyscopes.py diff --git a/pymode/libs2/rope/base/resourceobserver.py b/pymode/libs/rope/base/resourceobserver.py similarity index 100% rename from pymode/libs2/rope/base/resourceobserver.py rename to pymode/libs/rope/base/resourceobserver.py diff --git a/pymode/libs2/rope/base/resources.py b/pymode/libs/rope/base/resources.py similarity index 99% rename from pymode/libs2/rope/base/resources.py rename to pymode/libs/rope/base/resources.py index aac755f0..af8ac0a2 100644 --- a/pymode/libs2/rope/base/resources.py +++ b/pymode/libs/rope/base/resources.py @@ -109,7 +109,7 @@ def read(self): data = self.read_bytes() try: return fscommands.file_data_to_unicode(data) - except UnicodeDecodeError, e: + except UnicodeDecodeError as e: raise exceptions.ModuleDecodeError(self.path, e.reason) def read_bytes(self): diff --git a/pymode/libs2/rope/base/simplify.py b/pymode/libs/rope/base/simplify.py similarity index 100% rename from pymode/libs2/rope/base/simplify.py rename to pymode/libs/rope/base/simplify.py diff --git a/pymode/libs2/rope/base/stdmods.py b/pymode/libs/rope/base/stdmods.py similarity index 60% rename from pymode/libs2/rope/base/stdmods.py rename to pymode/libs/rope/base/stdmods.py index 457a4fac..5e868c2a 100644 --- a/pymode/libs2/rope/base/stdmods.py +++ b/pymode/libs/rope/base/stdmods.py @@ -1,13 +1,19 @@ import os +import re import sys from rope.base import utils +from rope.base.utils import pycompat def _stdlib_path(): - import distutils.sysconfig - return distutils.sysconfig.get_python_lib(standard_lib=True, - plat_specific=True) + if pycompat.PY2: + from distutils import sysconfig + return sysconfig.get_python_lib(standard_lib=True, + plat_specific=True) + elif pycompat.PY3: + import inspect + return os.path.dirname(inspect.getsourcefile(inspect)) @utils.cached(1) @@ -31,6 +37,15 @@ def python_modules(): return result +def normalize_so_name(name): + """ + Handle different types of python installations + """ + if "cpython" in name: + return os.path.splitext(os.path.splitext(name)[0])[0] + return os.path.splitext(name)[0] + + @utils.cached(1) def dynload_modules(): result = set(sys.builtin_module_names) @@ -40,7 +55,7 @@ def dynload_modules(): path = os.path.join(dynload_path, name) if os.path.isfile(path): if name.endswith('.dll'): - result.add(os.path.splitext(name)[0]) + result.add(normalize_so_name(name)) if name.endswith('.so'): - result.add(os.path.splitext(name)[0].replace('module', '')) + result.add(normalize_so_name(name)) return result diff --git a/pymode/libs2/rope/base/taskhandle.py b/pymode/libs/rope/base/taskhandle.py similarity index 100% rename from pymode/libs2/rope/base/taskhandle.py rename to pymode/libs/rope/base/taskhandle.py diff --git a/pymode/libs2/rope/base/utils.py b/pymode/libs/rope/base/utils/__init__.py similarity index 100% rename from pymode/libs2/rope/base/utils.py rename to pymode/libs/rope/base/utils/__init__.py diff --git a/pymode/libs/rope/base/utils/datastructures.py b/pymode/libs/rope/base/utils/datastructures.py new file mode 100644 index 00000000..0cb16cf2 --- /dev/null +++ b/pymode/libs/rope/base/utils/datastructures.py @@ -0,0 +1,67 @@ +# this snippet was taken from this link +# http://code.activestate.com/recipes/576694/ + +import collections + + +class OrderedSet(collections.MutableSet): + + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel + # node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def intersection(self, set_b): + return OrderedSet([item for item in self if item in set_b]) + + def discard(self, key): + if key in self.map: + key, prev, next = self.map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def pop(self, last=True): + if not self: + raise KeyError('set is empty') + key = self.end[1][0] if last else self.end[2][0] + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) diff --git a/pymode/libs/rope/base/utils/pycompat.py b/pymode/libs/rope/base/utils/pycompat.py new file mode 100644 index 00000000..367cf092 --- /dev/null +++ b/pymode/libs/rope/base/utils/pycompat.py @@ -0,0 +1,45 @@ +import sys +import _ast +# from rope.base import ast + +PY2 = sys.version_info[0] == 2 +PY27 = sys.version_info[0:2] >= (2, 7) +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +try: + str = unicode +except NameError: # PY3 + + str = str + string_types = (str,) + import builtins + ast_arg_type = _ast.arg + + def execfile(fn, global_vars=None, local_vars=None): + with open(fn) as f: + code = compile(f.read(), fn, 'exec') + exec(code, global_vars or {}, local_vars) + + def get_ast_arg_arg(node): + if isinstance(node, string_types): # TODO: G21: Understand the Algorithm (Where it's used?) + return node + return node.arg + + def get_ast_with_items(node): + return node.items + +else: # PY2 + + string_types = (basestring,) + builtins = __import__('__builtin__') + ast_arg_type = _ast.Name + execfile = execfile + + def get_ast_arg_arg(node): + if isinstance(node, string_types): # Python2 arguments.vararg, arguments.kwarg + return node + return node.id + + def get_ast_with_items(node): + return [node] diff --git a/pymode/libs2/rope/base/worder.py b/pymode/libs/rope/base/worder.py similarity index 100% rename from pymode/libs2/rope/base/worder.py rename to pymode/libs/rope/base/worder.py diff --git a/pymode/libs2/rope/contrib/__init__.py b/pymode/libs/rope/contrib/__init__.py similarity index 100% rename from pymode/libs2/rope/contrib/__init__.py rename to pymode/libs/rope/contrib/__init__.py diff --git a/pymode/libs2/rope/contrib/autoimport.py b/pymode/libs/rope/contrib/autoimport.py similarity index 100% rename from pymode/libs2/rope/contrib/autoimport.py rename to pymode/libs/rope/contrib/autoimport.py diff --git a/pymode/libs2/rope/contrib/changestack.py b/pymode/libs/rope/contrib/changestack.py similarity index 100% rename from pymode/libs2/rope/contrib/changestack.py rename to pymode/libs/rope/contrib/changestack.py diff --git a/pymode/libs2/rope/contrib/codeassist.py b/pymode/libs/rope/contrib/codeassist.py similarity index 96% rename from pymode/libs2/rope/contrib/codeassist.py rename to pymode/libs/rope/contrib/codeassist.py index 48b4a813..92c1bfc2 100644 --- a/pymode/libs2/rope/contrib/codeassist.py +++ b/pymode/libs/rope/contrib/codeassist.py @@ -527,28 +527,21 @@ def get_sorted_proposal_list(self): scope_proposals = proposals.get(scope, []) scope_proposals = [proposal for proposal in scope_proposals if proposal.type in self.typerank] - scope_proposals.sort(self._proposal_cmp) + scope_proposals.sort(key=self._proposal_key) result.extend(scope_proposals) return result - def _proposal_cmp(self, proposal1, proposal2): - if proposal1.type != proposal2.type: - return cmp(self.typerank.get(proposal1.type, 100), - self.typerank.get(proposal2.type, 100)) - return self._compare_underlined_names(proposal1.name, - proposal2.name) - - def _compare_underlined_names(self, name1, name2): - def underline_count(name): - result = 0 - while result < len(name) and name[result] == '_': - result += 1 - return result - underline_count1 = underline_count(name1) - underline_count2 = underline_count(name2) - if underline_count1 != underline_count2: - return cmp(underline_count1, underline_count2) - return cmp(name1, name2) + def _proposal_key(self, proposal1): + def _underline_count(name): + return sum(1 for c in name if c == "_") + return (self.typerank.get(proposal1.type, 100), + _underline_count(proposal1.name), + proposal1.name) + #if proposal1.type != proposal2.type: + # return cmp(self.typerank.get(proposal1.type, 100), + # self.typerank.get(proposal2.type, 100)) + #return self._compare_underlined_names(proposal1.name, + # proposal2.name) class PyDocExtractor(object): @@ -656,14 +649,14 @@ def _trim_docstring(self, docstring, indents=0): # and split into a list of lines: lines = docstring.expandtabs().splitlines() # Determine minimum indentation (first line doesn't count): - indent = sys.maxint + indent = sys.maxsize for line in lines[1:]: stripped = line.lstrip() if stripped: indent = min(indent, len(line) - len(stripped)) # Remove indentation (first line is special): trimmed = [lines[0].strip()] - if indent < sys.maxint: + if indent < sys.maxsize: for line in lines[1:]: trimmed.append(line[indent:].rstrip()) # Strip off trailing and leading blank lines: diff --git a/pymode/libs2/rope/contrib/finderrors.py b/pymode/libs/rope/contrib/finderrors.py similarity index 98% rename from pymode/libs2/rope/contrib/finderrors.py rename to pymode/libs/rope/contrib/finderrors.py index 9ee7dd15..109a3e8a 100644 --- a/pymode/libs2/rope/contrib/finderrors.py +++ b/pymode/libs/rope/contrib/finderrors.py @@ -5,7 +5,7 @@ errors = find_errors(project, project.get_resource('mod.py')) for error in errors: - print '%s: %s' % (error.lineno, error.error) + print('%s: %s' % (error.lineno, error.error)) prints possible errors for ``mod.py`` file. diff --git a/pymode/libs2/rope/contrib/findit.py b/pymode/libs/rope/contrib/findit.py similarity index 100% rename from pymode/libs2/rope/contrib/findit.py rename to pymode/libs/rope/contrib/findit.py diff --git a/pymode/libs2/rope/contrib/fixmodnames.py b/pymode/libs/rope/contrib/fixmodnames.py similarity index 100% rename from pymode/libs2/rope/contrib/fixmodnames.py rename to pymode/libs/rope/contrib/fixmodnames.py diff --git a/pymode/libs2/rope/contrib/fixsyntax.py b/pymode/libs/rope/contrib/fixsyntax.py similarity index 97% rename from pymode/libs2/rope/contrib/fixsyntax.py rename to pymode/libs/rope/contrib/fixsyntax.py index aab5c78c..fa2a17d9 100644 --- a/pymode/libs2/rope/contrib/fixsyntax.py +++ b/pymode/libs/rope/contrib/fixsyntax.py @@ -30,7 +30,7 @@ def get_pymodule(self): return libutils.get_string_module( self.project, code, resource=self.resource, force_errors=True) - except exceptions.ModuleSyntaxError, e: + except exceptions.ModuleSyntaxError as e: if msg is None: msg = '%s:%s %s' % (e.filename, e.lineno, e.message_) if tries < self.maxfixes: @@ -40,7 +40,7 @@ def get_pymodule(self): else: raise exceptions.ModuleSyntaxError( e.filename, e.lineno, - 'Failed to fix error: {}'.format(msg)) + 'Failed to fix error: {0}'.format(msg)) @property @utils.saveit @@ -76,7 +76,7 @@ def __init__(self, code): self.code = code self.lines = self.code.split('\n') self.lines.append('\n') - self.origs = range(len(self.lines) + 1) + self.origs = list(range(len(self.lines) + 1)) self.diffs = [0] * (len(self.lines) + 1) def comment(self, lineno): diff --git a/pymode/libs2/rope/contrib/generate.py b/pymode/libs/rope/contrib/generate.py similarity index 100% rename from pymode/libs2/rope/contrib/generate.py rename to pymode/libs/rope/contrib/generate.py diff --git a/pymode/libs2/rope/refactor/__init__.py b/pymode/libs/rope/refactor/__init__.py similarity index 100% rename from pymode/libs2/rope/refactor/__init__.py rename to pymode/libs/rope/refactor/__init__.py diff --git a/pymode/libs2/rope/refactor/change_signature.py b/pymode/libs/rope/refactor/change_signature.py similarity index 98% rename from pymode/libs2/rope/refactor/change_signature.py rename to pymode/libs/rope/refactor/change_signature.py index 4279d9cf..b5ba1856 100644 --- a/pymode/libs2/rope/refactor/change_signature.py +++ b/pymode/libs/rope/refactor/change_signature.py @@ -347,8 +347,6 @@ def find_occurrences(self, resource=None, pymodule=None): all_occurrences = [] for finder in self.finders: all_occurrences.extend(finder.find_occurrences(resource, pymodule)) - all_occurrences.sort(self._cmp_occurrences) + all_occurrences.sort(key=lambda x: x.get_primary_range()) return all_occurrences - def _cmp_occurrences(self, o1, o2): - return cmp(o1.get_primary_range(), o2.get_primary_range()) diff --git a/pymode/libs2/rope/refactor/encapsulate_field.py b/pymode/libs/rope/refactor/encapsulate_field.py similarity index 100% rename from pymode/libs2/rope/refactor/encapsulate_field.py rename to pymode/libs/rope/refactor/encapsulate_field.py diff --git a/pymode/libs2/rope/refactor/extract.py b/pymode/libs/rope/refactor/extract.py similarity index 95% rename from pymode/libs2/rope/refactor/extract.py rename to pymode/libs/rope/refactor/extract.py index be541bb5..80e74317 100644 --- a/pymode/libs2/rope/refactor/extract.py +++ b/pymode/libs/rope/refactor/extract.py @@ -1,8 +1,10 @@ import re +from rope.base.utils.datastructures import OrderedSet from rope.base import ast, codeanalyze from rope.base.change import ChangeSet, ChangeContents from rope.base.exceptions import RefactoringError +from rope.base.utils import pycompat from rope.refactor import (sourceutils, similarfinder, patchedast, suites, usefunction) @@ -598,19 +600,20 @@ def __init__(self, start, end, is_global): self.start = start self.end = end self.is_global = is_global - self.prewritten = set() - self.maybe_written = set() - self.written = set() - self.read = set() - self.postread = set() - self.postwritten = set() + self.prewritten = OrderedSet() + self.maybe_written = OrderedSet() + self.written = OrderedSet() + self.read = OrderedSet() + self.postread = OrderedSet() + self.postwritten = OrderedSet() self.host_function = True self.conditional = False def _read_variable(self, name, lineno): if self.start <= lineno <= self.end: if name not in self.written: - self.read.add(name) + if not self.conditional or name not in self.maybe_written: + self.read.add(name) if self.end < lineno: if name not in self.postwritten: self.postread.add(name) @@ -670,16 +673,27 @@ def _While(self, node): self._handle_conditional_node(node) def _For(self, node): - self._handle_conditional_node(node) + self.conditional = True + try: + # iter has to be checked before the target variables + ast.walk(node.iter, self) + ast.walk(node.target, self) + + for child in node.body: + ast.walk(child, self) + for child in node.orelse: + ast.walk(child, self) + finally: + self.conditional = False def _get_argnames(arguments): - result = [node.id for node in arguments.args - if isinstance(node, ast.Name)] + result = [pycompat.get_ast_arg_arg(node) for node in arguments.args + if isinstance(node, pycompat.ast_arg_type)] if arguments.vararg: - result.append(arguments.vararg) + result.append(pycompat.get_ast_arg_arg(arguments.vararg)) if arguments.kwarg: - result.append(arguments.kwarg) + result.append(pycompat.get_ast_arg_arg(arguments.kwarg)) return result @@ -744,7 +758,11 @@ def loop_encountered(self, node): ast.walk(child, self) self.loop_count -= 1 if node.orelse: - ast.walk(node.orelse, self) + if isinstance(node.orelse,(list,tuple)): + for node_ in node.orelse: + ast.walk(node_, self) + else: + ast.walk(node.orelse, self) def _Break(self, node): self.check_loop() diff --git a/pymode/libs2/rope/refactor/functionutils.py b/pymode/libs/rope/refactor/functionutils.py similarity index 100% rename from pymode/libs2/rope/refactor/functionutils.py rename to pymode/libs/rope/refactor/functionutils.py diff --git a/pymode/libs2/rope/refactor/importutils/__init__.py b/pymode/libs/rope/refactor/importutils/__init__.py similarity index 97% rename from pymode/libs2/rope/refactor/importutils/__init__.py rename to pymode/libs/rope/refactor/importutils/__init__.py index 4871faf3..6a44f01b 100644 --- a/pymode/libs2/rope/refactor/importutils/__init__.py +++ b/pymode/libs/rope/refactor/importutils/__init__.py @@ -278,6 +278,7 @@ def add_import(project, pymodule, module_name, name=None): imports = get_module_imports(project, pymodule) candidates = [] names = [] + selected_import = None # from mod import name if name is not None: from_import = FromImport(module_name, 0, [(name, None)]) @@ -286,7 +287,10 @@ def add_import(project, pymodule, module_name, name=None): # from pkg import mod if '.' in module_name: pkg, mod = module_name.rsplit('.', 1) - candidates.append(FromImport(pkg, 0, [(mod, None)])) + from_import = FromImport(pkg, 0, [(mod, None)]) + if project.prefs.get('prefer_module_from_imports'): + selected_import = from_import + candidates.append(from_import) if name: names.append(mod + '.' + name) else: @@ -301,7 +305,8 @@ def add_import(project, pymodule, module_name, name=None): candidates.append(normal_import) visitor = actions.AddingVisitor(project, candidates) - selected_import = normal_import + if selected_import is None: + selected_import = normal_import for import_statement in imports.imports: if import_statement.accept(visitor): selected_import = visitor.import_info diff --git a/pymode/libs2/rope/refactor/importutils/actions.py b/pymode/libs/rope/refactor/importutils/actions.py similarity index 100% rename from pymode/libs2/rope/refactor/importutils/actions.py rename to pymode/libs/rope/refactor/importutils/actions.py diff --git a/pymode/libs2/rope/refactor/importutils/importinfo.py b/pymode/libs/rope/refactor/importutils/importinfo.py similarity index 100% rename from pymode/libs2/rope/refactor/importutils/importinfo.py rename to pymode/libs/rope/refactor/importutils/importinfo.py diff --git a/pymode/libs2/rope/refactor/importutils/module_imports.py b/pymode/libs/rope/refactor/importutils/module_imports.py similarity index 89% rename from pymode/libs2/rope/refactor/importutils/module_imports.py rename to pymode/libs/rope/refactor/importutils/module_imports.py index b96eebc4..26059a49 100644 --- a/pymode/libs2/rope/refactor/importutils/module_imports.py +++ b/pymode/libs/rope/refactor/importutils/module_imports.py @@ -68,7 +68,7 @@ def get_changed_source(self): # Writing module docs result.extend(after_removing[first_non_blank:first_import]) # Writing imports - sorted_imports = sorted(imports, self._compare_import_locations) + sorted_imports = sorted(imports, key=self._get_location) for stmt in sorted_imports: if stmt != sorted_imports[0]: result.append('\n' * stmt.blank_lines) @@ -88,35 +88,31 @@ def _get_import_location(self, stmt): start = stmt.get_old_location()[0] return start - def _compare_import_locations(self, stmt1, stmt2): - def get_location(stmt): - if stmt.get_new_start() is not None: - return stmt.get_new_start() - else: - return stmt.get_old_location()[0] - return cmp(get_location(stmt1), get_location(stmt2)) + def _get_location(self, stmt): + if stmt.get_new_start() is not None: + return stmt.get_new_start() + else: + return stmt.get_old_location()[0] def _remove_imports(self, imports): lines = self.pymodule.source_code.splitlines(True) after_removing = [] + first_import_line = self._first_import_line() last_index = 0 for stmt in imports: start, end = stmt.get_old_location() - after_removing.extend(lines[last_index:start - 1]) + blank_lines = 0 + if start != first_import_line: + blank_lines = _count_blank_lines(lines.__getitem__, start - 2, + last_index - 1, -1) + after_removing.extend(lines[last_index:start - 1 - blank_lines]) last_index = end - 1 - for i in range(start, end): - after_removing.append('') after_removing.extend(lines[last_index:]) return after_removing def _first_non_blank_line(self, lines, lineno): - result = lineno - for line in lines[lineno:]: - if line.strip() == '': - result += 1 - else: - break - return result + return lineno + _count_blank_lines(lines.__getitem__, lineno, + len(lines)) def add_import(self, import_info): visitor = actions.AddingVisitor(self.project, [import_info]) @@ -166,7 +162,7 @@ def force_single_imports(self): """force a single import per statement""" for import_stmt in self.imports[:]: import_info = import_stmt.import_info - if import_info.is_empty(): + if import_info.is_empty() or import_stmt.readonly: continue if len(import_info.names_and_aliases) > 1: for name_and_alias in import_info.names_and_aliases: @@ -202,7 +198,7 @@ def sort_imports(self): if self.project.prefs.get("sort_imports_alphabetically"): sort_kwargs = dict(key=self._get_import_name) else: - sort_kwargs = dict(cmp=self._compare_imports) + sort_kwargs = dict(key=self._key_imports) # IDEA: Sort from import list visitor = actions.SortingVisitor(self.project, self._current_folder()) @@ -225,17 +221,16 @@ def _first_import_line(self): if self.pymodule.get_doc() is not None: lineno = 1 if len(nodes) > lineno: + if (isinstance(nodes[lineno], ast.Import) or + isinstance(nodes[lineno], ast.ImportFrom)): + return nodes[lineno].lineno lineno = self.pymodule.logical_lines.logical_line_in( nodes[lineno].lineno)[0] else: lineno = self.pymodule.lines.length() - while lineno > 1: - line = self.pymodule.lines.get_line(lineno - 1) - if line.strip() == '': - lineno -= 1 - else: - break - return lineno + + return lineno - _count_blank_lines(self.pymodule.lines.get_line, + lineno - 1, 1, -1) def _get_import_name(self, import_stmt): import_info = import_stmt.import_info @@ -245,14 +240,17 @@ def _get_import_name(self, import_stmt): else: return import_info.names_and_aliases[0][0] - def _compare_imports(self, stmt1, stmt2): - str1 = stmt1.get_import_statement() - str2 = stmt2.get_import_statement() - if str1.startswith('from ') and not str2.startswith('from '): - return 1 - if not str1.startswith('from ') and str2.startswith('from '): - return -1 - return cmp(str1, str2) + def _key_imports(self, stm1): + str1 = stm1.get_import_statement() + return str1.startswith("from "), str1 + + #str1 = stmt1.get_import_statement() + #str2 = stmt2.get_import_statement() + #if str1.startswith('from ') and not str2.startswith('from '): + # return 1 + #if not str1.startswith('from ') and str2.startswith('from '): + # return -1 + #return cmp(str1, str2) def _move_imports(self, imports, index, blank_lines): if imports: @@ -282,6 +280,16 @@ def remove_pyname(self, pyname): import_stmt.accept(visitor) +def _count_blank_lines(get_line, start, end, step=1): + count = 0 + for idx in range(start, end, step): + if get_line(idx).strip() == '': + count += 1 + else: + break + return count + + class _OneTimeSelector(object): def __init__(self, names): @@ -429,24 +437,11 @@ def visit_import(self, node, end_line): self.imports.append(import_statement) def _count_empty_lines_before(self, lineno): - result = 0 - for current in range(lineno - 1, 0, -1): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result + return _count_blank_lines(self.lines.get_line, lineno - 1, 0, -1) def _count_empty_lines_after(self, lineno): - result = 0 - for current in range(lineno + 1, self.lines.length()): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result + return _count_blank_lines(self.lines.get_line, lineno + 1, + self.lines.length()) def get_separating_line_count(self): if not self.imports: diff --git a/pymode/libs2/rope/refactor/inline.py b/pymode/libs/rope/refactor/inline.py similarity index 99% rename from pymode/libs2/rope/refactor/inline.py rename to pymode/libs/rope/refactor/inline.py index 0ae1f8f4..467edefa 100644 --- a/pymode/libs2/rope/refactor/inline.py +++ b/pymode/libs/rope/refactor/inline.py @@ -398,7 +398,7 @@ def _calculate_definition(self, primary, pyname, call, host_vars, returns): # inside the inlined function are renamed if len(set(all_names).intersection(set(host_vars))) > 0: - prefix = _DefinitionGenerator.unique_prefix.next() + prefix = next(_DefinitionGenerator.unique_prefix) guest = libutils.get_string_module(self.project, source, self.resource) diff --git a/pymode/libs2/rope/refactor/introduce_factory.py b/pymode/libs/rope/refactor/introduce_factory.py similarity index 100% rename from pymode/libs2/rope/refactor/introduce_factory.py rename to pymode/libs/rope/refactor/introduce_factory.py diff --git a/pymode/libs2/rope/refactor/introduce_parameter.py b/pymode/libs/rope/refactor/introduce_parameter.py similarity index 100% rename from pymode/libs2/rope/refactor/introduce_parameter.py rename to pymode/libs/rope/refactor/introduce_parameter.py diff --git a/pymode/libs2/rope/refactor/localtofield.py b/pymode/libs/rope/refactor/localtofield.py similarity index 100% rename from pymode/libs2/rope/refactor/localtofield.py rename to pymode/libs/rope/refactor/localtofield.py diff --git a/pymode/libs2/rope/refactor/method_object.py b/pymode/libs/rope/refactor/method_object.py similarity index 100% rename from pymode/libs2/rope/refactor/method_object.py rename to pymode/libs/rope/refactor/method_object.py diff --git a/pymode/libs2/rope/refactor/move.py b/pymode/libs/rope/refactor/move.py similarity index 85% rename from pymode/libs2/rope/refactor/move.py rename to pymode/libs/rope/refactor/move.py index 60df493e..ce618277 100644 --- a/pymode/libs2/rope/refactor/move.py +++ b/pymode/libs/rope/refactor/move.py @@ -22,21 +22,21 @@ def create_move(project, resource, offset=None): return MoveModule(project, resource) this_pymodule = project.get_pymodule(resource) pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None: - raise exceptions.RefactoringError( - 'Move only works on classes, functions, modules and methods.') - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.PyModule) or \ - isinstance(pyobject, pyobjects.PyPackage): - return MoveModule(project, pyobject.get_resource()) - if isinstance(pyobject, pyobjects.PyFunction) and \ - isinstance(pyobject.parent, pyobjects.PyClass): - return MoveMethod(project, resource, offset) - if isinstance(pyobject, pyobjects.PyDefinedObject) and \ - isinstance(pyobject.parent, pyobjects.PyModule): - return MoveGlobal(project, resource, offset) + if pyname is not None: + pyobject = pyname.get_object() + if isinstance(pyobject, pyobjects.PyModule) or \ + isinstance(pyobject, pyobjects.PyPackage): + return MoveModule(project, pyobject.get_resource()) + if isinstance(pyobject, pyobjects.PyFunction) and \ + isinstance(pyobject.parent, pyobjects.PyClass): + return MoveMethod(project, resource, offset) + if isinstance(pyobject, pyobjects.PyDefinedObject) and \ + isinstance(pyobject.parent, pyobjects.PyModule) or \ + isinstance(pyname, pynames.AssignedName): + return MoveGlobal(project, resource, offset) raise exceptions.RefactoringError( - 'Move only works on global classes/functions, modules and methods.') + 'Move only works on global classes/functions/variables, modules and ' + 'methods.') class MoveMethod(object): @@ -203,29 +203,63 @@ def __init__(self, project, resource, offset): self.project = project this_pymodule = self.project.get_pymodule(resource) self.old_pyname = evaluate.eval_location(this_pymodule, offset) - self.old_name = self.old_pyname.get_object().get_name() - pymodule = self.old_pyname.get_object().get_module() + if self.old_pyname is None: + raise exceptions.RefactoringError( + 'Move refactoring should be performed on a ' + 'class/function/variable.') + if self._is_variable(self.old_pyname): + self.old_name = worder.get_name_at(resource, offset) + pymodule = this_pymodule + else: + self.old_name = self.old_pyname.get_object().get_name() + pymodule = self.old_pyname.get_object().get_module() + self._check_exceptional_conditions() self.source = pymodule.get_resource() self.tools = _MoveTools(self.project, self.source, self.old_pyname, self.old_name) self.import_tools = self.tools.import_tools - self._check_exceptional_conditions() + + def _import_filter(self, stmt): + module_name = libutils.modname(self.source) + + if isinstance(stmt.import_info, importutils.NormalImport): + # Affect any statement that imports the source module + return any(module_name == name + for name, alias in stmt.import_info.names_and_aliases) + elif isinstance(stmt.import_info, importutils.FromImport): + # Affect statements importing from the source package + if '.' in module_name: + package_name, basename = module_name.rsplit('.', 1) + if (stmt.import_info.module_name == package_name and + any(basename == name + for name, alias in stmt.import_info.names_and_aliases)): + return True + return stmt.import_info.module_name == module_name + return False def _check_exceptional_conditions(self): - if self.old_pyname is None or \ - not isinstance(self.old_pyname.get_object(), - pyobjects.PyDefinedObject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed on a class/function.') - moving_pyobject = self.old_pyname.get_object() - if not self._is_global(moving_pyobject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed ' + - 'on a global class/function.') + if self._is_variable(self.old_pyname): + pymodule = self.old_pyname.get_definition_location()[0] + try: + pymodule.get_scope().get_name(self.old_name) + except exceptions.NameNotFoundError: + self._raise_refactoring_error() + elif not (isinstance(self.old_pyname.get_object(), + pyobjects.PyDefinedObject) and + self._is_global(self.old_pyname.get_object())): + self._raise_refactoring_error() + + def _raise_refactoring_error(self): + raise exceptions.RefactoringError( + 'Move refactoring should be performed on a global class, function ' + 'or variable.') def _is_global(self, pyobject): return pyobject.get_scope().parent == pyobject.get_module().get_scope() + def _is_variable(self, pyname): + return isinstance(pyname, pynames.AssignedName) + def get_changes(self, dest, resources=None, task_handle=taskhandle.NullTaskHandle()): if resources is None: @@ -262,7 +296,8 @@ def _calculate_changes(self, dest, resources, task_handle): should_import = source is not None # Removing out of date imports pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.remove_old_imports(pymodule) + source = self.import_tools.organize_imports( + pymodule, sort=False, import_filter=self._import_filter) # Adding new import if should_import: pymodule = self.tools.new_pymodule(pymodule, source) @@ -285,6 +320,8 @@ def _source_module_changes(self, dest): renamer = ModuleSkipRenamer(occurrence_finder, self.source, handle, start, end) source = renamer.get_changed_module() + pymodule = libutils.get_string_module(self.project, source, self.source) + source = self.import_tools.organize_imports(pymodule, sort=False) if handle.occurred: pymodule = libutils.get_string_module( self.project, source, self.source) @@ -304,8 +341,6 @@ def _dest_module_changes(self, dest): pymodule = self.tools.new_pymodule(pymodule, source) moving, imports = self._get_moving_element_with_imports() - source = self.tools.remove_old_imports(pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) pymodule, has_changed = self._add_imports2(pymodule, imports) module_with_imports = self.import_tools.module_imports(pymodule) @@ -329,6 +364,11 @@ def _dest_module_changes(self, dest): pymodule = libutils.get_string_module(self.project, source, dest) source = self.import_tools.organize_imports(pymodule, sort=False, unused=False) + # Remove unused imports of the old module + pymodule = libutils.get_string_module(self.project, source, dest) + source = self.import_tools.organize_imports( + pymodule, sort=False, selfs=False, unused=True, + import_filter=self._import_filter) return ChangeContents(dest, source) def _get_moving_element_with_imports(self): @@ -348,9 +388,23 @@ def _get_moving_element(self): def _get_moving_region(self): pymodule = self.project.get_pymodule(self.source) lines = pymodule.lines - scope = self.old_pyname.get_object().get_scope() - start = lines.get_line_start(scope.get_start()) - end_line = scope.get_end() + if self._is_variable(self.old_pyname): + logical_lines = pymodule.logical_lines + lineno = logical_lines.logical_line_in( + self.old_pyname.get_definition_location()[1])[0] + start = lines.get_line_start(lineno) + end_line = logical_lines.logical_line_in(lineno)[1] + else: + scope = self.old_pyname.get_object().get_scope() + start = lines.get_line_start(scope.get_start()) + end_line = scope.get_end() + + # Include comment lines before the definition + start_line = lines.get_line_number(start) + while start_line > 1 and lines.get_line(start_line - 1).startswith('#'): + start_line -= 1 + start = lines.get_line_start(start_line) + while end_line < lines.length() and \ lines.get_line(end_line + 1).strip() == '': end_line += 1 @@ -446,7 +500,6 @@ def _change_occurrences_in_module(self, dest, pymodule=None, new_name = self._new_modname(dest) module_imports = importutils.get_module_imports(self.project, pymodule) changed = False - source = None if libutils.modname(dest): changed = self._change_import_statements(dest, new_name, @@ -472,7 +525,6 @@ def _change_occurrences_in_module(self, dest, pymodule=None, return source return None - def _change_import_statements(self, dest, new_name, module_imports): moving_module = self.source parent_module = moving_module.parent @@ -605,7 +657,8 @@ def occurs_in_module(self, pymodule=None, resource=None, imports=True): def _create_finder(self, imports): return occurrences.create_finder(self.project, self.old_name, - self.old_pyname, imports=imports) + self.old_pyname, imports=imports, + keywords=False) def new_pymodule(self, pymodule, source): if source is not None: @@ -632,6 +685,17 @@ def _add_imports_to_module(import_tools, pymodule, new_imports): def moving_code_with_imports(project, resource, source): import_tools = importutils.ImportTools(project) pymodule = libutils.get_string_module(project, source, resource) + + # Strip comment prefix, if any. These need to stay before the moving + # section, but imports would be added between them. + lines = codeanalyze.SourceLinesAdapter(source) + start = 1 + while start < lines.length() and lines.get_line(start).startswith('#'): + start += 1 + moving_prefix = source[:lines.get_line_start(start)] + pymodule = libutils.get_string_module( + project, source[lines.get_line_start(start):], resource) + origin = project.get_pymodule(resource) imports = [] @@ -662,7 +726,9 @@ def moving_code_with_imports(project, resource, source): lines = codeanalyze.SourceLinesAdapter(source) while start < lines.length() and not lines.get_line(start).strip(): start += 1 - moving = source[lines.get_line_start(start):] + + # Reinsert the prefix which was removed at the beginning + moving = moving_prefix + source[lines.get_line_start(start):] return moving, imports diff --git a/pymode/libs2/rope/refactor/multiproject.py b/pymode/libs/rope/refactor/multiproject.py similarity index 100% rename from pymode/libs2/rope/refactor/multiproject.py rename to pymode/libs/rope/refactor/multiproject.py diff --git a/pymode/libs2/rope/refactor/occurrences.py b/pymode/libs/rope/refactor/occurrences.py similarity index 95% rename from pymode/libs2/rope/refactor/occurrences.py rename to pymode/libs/rope/refactor/occurrences.py index 14a2d7de..dfc2d685 100644 --- a/pymode/libs2/rope/refactor/occurrences.py +++ b/pymode/libs/rope/refactor/occurrences.py @@ -30,6 +30,9 @@ * `instance`: Used only when you want implicit interfaces to be considered. + + * `keywords`: If False, don't return instances that are the names of keyword + arguments """ import re @@ -81,7 +84,8 @@ def find_occurrences(self, resource=None, pymodule=None): def create_finder(project, name, pyname, only_calls=False, imports=True, - unsure=None, docs=False, instance=None, in_hierarchy=False): + unsure=None, docs=False, instance=None, in_hierarchy=False, + keywords=True): """A factory for `Finder` Based on the arguments it creates a list of filters. `instance` @@ -95,6 +99,8 @@ def create_finder(project, name, pyname, only_calls=False, imports=True, filters.append(CallsFilter()) if not imports: filters.append(NoImportsFilter()) + if not keywords: + filters.append(NoKeywordsFilter()) if isinstance(instance, pynames.ParameterName): for pyobject in instance.get_objects(): try: @@ -163,6 +169,10 @@ def is_written(self): def is_unsure(self): return unsure_pyname(self.get_pyname()) + def is_function_keyword_parameter(self): + return self.tools.word_finder.is_function_keyword_parameter( + self.offset) + @property @utils.saveit def lineno(self): @@ -274,6 +284,14 @@ def __call__(self, occurrence): return False +class NoKeywordsFilter(object): + """Filter out keyword parameters.""" + + def __call__(self, occurrence): + if occurrence.is_function_keyword_parameter(): + return False + + class _TextualFinder(object): def __init__(self, name, docs=False): diff --git a/pymode/libs2/rope/refactor/patchedast.py b/pymode/libs/rope/refactor/patchedast.py similarity index 87% rename from pymode/libs2/rope/refactor/patchedast.py rename to pymode/libs/rope/refactor/patchedast.py index 28d36d5a..10f0a05c 100644 --- a/pymode/libs2/rope/refactor/patchedast.py +++ b/pymode/libs/rope/refactor/patchedast.py @@ -3,6 +3,12 @@ import warnings from rope.base import ast, codeanalyze, exceptions +from rope.base.utils import pycompat + +try: + basestring +except NameError: + basestring = (str, bytes) def get_patched_ast(source, sorted_children=False): @@ -265,11 +271,11 @@ def _Call(self, node): children = [node.func, '('] args = list(node.args) + node.keywords children.extend(self._child_nodes(args, ',')) - if node.starargs is not None: + if getattr(node, 'starargs', None): if args: children.append(',') children.extend(['*', node.starargs]) - if node.kwargs is not None: + if getattr(node, 'kwargs', None): if args or node.starargs is not None: children.append(',') children.extend(['**', node.kwargs]) @@ -396,11 +402,11 @@ def _arguments(self, node): if node.vararg is not None: if args: children.append(',') - children.extend(['*', node.vararg]) + children.extend(['*', pycompat.get_ast_arg_arg(node.vararg)]) if node.kwarg is not None: if args or node.vararg is not None: children.append(',') - children.extend(['**', node.kwarg]) + children.extend(['**', pycompat.get_ast_arg_arg(node.kwarg)]) self._handle(node, children) def _add_args_to_children(self, children, arg, default): @@ -475,7 +481,12 @@ def _Import(self, node): self._handle(node, children) def _keyword(self, node): - self._handle(node, [node.arg, '=', node.value]) + children = [] + if node.arg is None: + children.append(node.value) + else: + children.extend([node.arg, '=', node.value]) + self._handle(node, children) def _Lambda(self, node): self._handle(node, ['lambda', node.args, ':', node.body]) @@ -489,12 +500,41 @@ def _ListComp(self, node): children.append(']') self._handle(node, children) + def _Set(self, node): + if node.elts: + self._handle(node, + ['{'] + self._child_nodes(node.elts, ',') + ['}']) + return + # Python doesn't have empty set literals + warnings.warn('Tried to handle empty literal; please report!', + RuntimeWarning) + self._handle(node, ['set(', ')']) + + def _SetComp(self, node): + children = ['{', node.elt] + children.extend(node.generators) + children.append('}') + self._handle(node, children) + + def _DictComp(self, node): + children = ['{'] + children.extend([node.key, ':', node.value]) + children.extend(node.generators) + children.append('}') + self._handle(node, children) + def _Module(self, node): self._handle(node, list(node.body), eat_spaces=True) def _Name(self, node): self._handle(node, [node.id]) + def _NameConstant(self, node): + self._handle(node, [str(node.value)]) + + def _arg(self, node): + self._handle(node, [node.arg]) + def _Pass(self, node): self._handle(node, ['pass']) @@ -510,15 +550,30 @@ def _Print(self, node): self._handle(node, children) def _Raise(self, node): - children = ['raise'] - if node.type: - children.append(node.type) - if node.inst: - children.append(',') - children.append(node.inst) - if node.tback: - children.append(',') - children.append(node.tback) + + def get_python3_raise_children(node): + children = ['raise'] + if node.exc: + children.append(node.exc) + if node.cause: + children.append(node.cause) + return children + + def get_python2_raise_children(node): + children = ['raise'] + if node.type: + children.append(node.type) + if node.inst: + children.append(',') + children.append(node.inst) + if node.tback: + children.append(',') + children.append(node.tback) + return children + if pycompat.PY2: + children = get_python2_raise_children(node) + else: + children = get_python3_raise_children(node) self._handle(node, children) def _Return(self, node): @@ -555,10 +610,25 @@ def _Slice(self, node): self._handle(node, children) def _TryFinally(self, node): + # @todo fixme + is_there_except_handler = False + not_empty_body = True + if len(node.finalbody) == 1: + if pycompat.PY2: + is_there_except_handler = isinstance(node.body[0], ast.TryExcept) + not_empty_body = not bool(len(node.body)) + elif pycompat.PY3: + try: + is_there_except_handler = isinstance(node.handlers[0], ast.ExceptHandler) + not_empty_body = True + except IndexError: + pass children = [] - if len(node.body) != 1 or not isinstance(node.body[0], ast.TryExcept): + if not_empty_body or not is_there_except_handler: children.extend(['try', ':']) children.extend(node.body) + if pycompat.PY3: + children.extend(node.handlers) children.extend(['finally', ':']) children.extend(node.finalbody) self._handle(node, children) @@ -572,6 +642,12 @@ def _TryExcept(self, node): children.extend(node.orelse) self._handle(node, children) + def _Try(self, node): + if len(node.finalbody): + self._TryFinally(node) + else: + self._TryExcept(node) + def _ExceptHandler(self, node): self._excepthandler(node) @@ -615,9 +691,11 @@ def _While(self, node): self._handle(node, children) def _With(self, node): - children = ['with', node.context_expr] - if node.optional_vars: - children.extend(['as', node.optional_vars]) + children = [] + for item in pycompat.get_ast_with_items(node): + children.extend(['with', item.context_expr]) + if item.optional_vars: + children.extend(['as', item.optional_vars]) children.append(':') children.extend(node.body) self._handle(node, children) @@ -630,6 +708,8 @@ def _child_nodes(self, nodes, separator): children.append(separator) return children + def _Starred(self, node): + self._handle(node, [node.value]) class _Source(object): @@ -741,8 +821,8 @@ def __getslice__(self, i, j): def _get_number_pattern(self): # HACK: It is merely an approaximation and does the job - integer = r'(0|0x)?[\da-fA-F]+[lL]?' - return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d*)?[jJ]?' % integer + integer = r'\-?(0x[\da-fA-F]+|\d+)[lL]?' + return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d+)?[jJ]?' % integer _string_pattern = None _number_pattern = None diff --git a/pymode/libs2/rope/refactor/rename.py b/pymode/libs/rope/refactor/rename.py similarity index 100% rename from pymode/libs2/rope/refactor/rename.py rename to pymode/libs/rope/refactor/rename.py diff --git a/pymode/libs2/rope/refactor/restructure.py b/pymode/libs/rope/refactor/restructure.py similarity index 100% rename from pymode/libs2/rope/refactor/restructure.py rename to pymode/libs/rope/refactor/restructure.py diff --git a/pymode/libs2/rope/refactor/similarfinder.py b/pymode/libs/rope/refactor/similarfinder.py similarity index 99% rename from pymode/libs2/rope/refactor/similarfinder.py rename to pymode/libs/rope/refactor/similarfinder.py index f1a7d42d..425f9ed9 100644 --- a/pymode/libs2/rope/refactor/similarfinder.py +++ b/pymode/libs/rope/refactor/similarfinder.py @@ -28,7 +28,7 @@ def __init__(self, pymodule, wildcards=None): self.raw_finder = RawSimilarFinder( pymodule.source_code, pymodule.get_ast(), self._does_match) except MismatchedTokenError: - print "in file %s" % pymodule.resource.path + print("in file %s" % pymodule.resource.path) raise self.pymodule = pymodule if wildcards is None: diff --git a/pymode/libs2/rope/refactor/sourceutils.py b/pymode/libs/rope/refactor/sourceutils.py similarity index 100% rename from pymode/libs2/rope/refactor/sourceutils.py rename to pymode/libs/rope/refactor/sourceutils.py diff --git a/pymode/libs2/rope/refactor/suites.py b/pymode/libs/rope/refactor/suites.py similarity index 86% rename from pymode/libs2/rope/refactor/suites.py rename to pymode/libs/rope/refactor/suites.py index 4f9a8c71..68785080 100644 --- a/pymode/libs2/rope/refactor/suites.py +++ b/pymode/libs/rope/refactor/suites.py @@ -1,4 +1,5 @@ from rope.base import ast +from rope.base.utils import pycompat def find_visible(node, lines): @@ -115,13 +116,27 @@ def _With(self, node): self.suites.append(Suite(node.body, node.lineno, self.suite)) def _TryFinally(self, node): - if len(node.finalbody) == 1 and \ - isinstance(node.body[0], ast.TryExcept): - self._TryExcept(node.body[0]) + proceed_to_except_handler = False + if len(node.finalbody) == 1: + if pycompat.PY2: + proceed_to_except_handler = isinstance(node.body[0], ast.TryExcept) + elif pycompat.PY3: + try: + proceed_to_except_handler = isinstance(node.handlers[0], ast.ExceptHandler) + except IndexError: + pass + if proceed_to_except_handler: + self._TryExcept(node if pycompat.PY3 else node.body[0]) else: self.suites.append(Suite(node.body, node.lineno, self.suite)) self.suites.append(Suite(node.finalbody, node.lineno, self.suite)) + def _Try(self, node): + if len(node.finalbody) == 1: + self._TryFinally(node) + else: + self._TryExcept(node) + def _TryExcept(self, node): self.suites.append(Suite(node.body, node.lineno, self.suite)) for handler in node.handlers: diff --git a/pymode/libs2/rope/refactor/topackage.py b/pymode/libs/rope/refactor/topackage.py similarity index 100% rename from pymode/libs2/rope/refactor/topackage.py rename to pymode/libs/rope/refactor/topackage.py diff --git a/pymode/libs2/rope/refactor/usefunction.py b/pymode/libs/rope/refactor/usefunction.py similarity index 100% rename from pymode/libs2/rope/refactor/usefunction.py rename to pymode/libs/rope/refactor/usefunction.py diff --git a/pymode/libs2/rope/refactor/wildcards.py b/pymode/libs/rope/refactor/wildcards.py similarity index 100% rename from pymode/libs2/rope/refactor/wildcards.py rename to pymode/libs/rope/refactor/wildcards.py diff --git a/pymode/libs2/rope/base/astutils.py b/pymode/libs2/rope/base/astutils.py deleted file mode 100644 index 8ace1a92..00000000 --- a/pymode/libs2/rope/base/astutils.py +++ /dev/null @@ -1,61 +0,0 @@ -from rope.base import ast - - -def get_name_levels(node): - """Return a list of ``(name, level)`` tuples for assigned names - - The `level` is `None` for simple assignments and is a list of - numbers for tuple assignments for example in:: - - a, (b, c) = x - - The levels for for `a` is ``[0]``, for `b` is ``[1, 0]`` and for - `c` is ``[1, 1]``. - - """ - visitor = _NodeNameCollector() - ast.walk(node, visitor) - return visitor.names - - -class _NodeNameCollector(object): - - def __init__(self, levels=None): - self.names = [] - self.levels = levels - self.index = 0 - - def _add_node(self, node): - new_levels = [] - if self.levels is not None: - new_levels = list(self.levels) - new_levels.append(self.index) - self.index += 1 - self._added(node, new_levels) - - def _added(self, node, levels): - if hasattr(node, 'id'): - self.names.append((node.id, levels)) - - def _Name(self, node): - self._add_node(node) - - def _Tuple(self, node): - new_levels = [] - if self.levels is not None: - new_levels = list(self.levels) - new_levels.append(self.index) - self.index += 1 - visitor = _NodeNameCollector(new_levels) - for child in ast.get_child_nodes(node): - ast.walk(child, visitor) - self.names.extend(visitor.names) - - def _Subscript(self, node): - self._add_node(node) - - def _Attribute(self, node): - self._add_node(node) - - def _Slice(self, node): - self._add_node(node) diff --git a/pymode/libs3/rope/__init__.py b/pymode/libs3/rope/__init__.py deleted file mode 100644 index a936fe29..00000000 --- a/pymode/libs3/rope/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -"""rope, a python refactoring library""" - -INFO = __doc__ -VERSION = '0.9.4-1' -COPYRIGHT = """\ -Copyright (C) 2006-2010 Ali Gholami Rudi -Copyright (C) 2009-2010 Anton Gritsay -Copyright (C) 2011 Dmitriy Zhukov - -This program is free software; you can redistribute it and/or modify it -under the terms of GNU General Public License as published by the -Free Software Foundation; either version 2 of the license, or (at your -opinion) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details.""" diff --git a/pymode/libs3/rope/base/__init__.py b/pymode/libs3/rope/base/__init__.py deleted file mode 100644 index ff5f8c63..00000000 --- a/pymode/libs3/rope/base/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Base rope package - -This package contains rope core modules that are used by other modules -and packages. - -""" - -__all__ = ['project', 'libutils', 'exceptions'] diff --git a/pymode/libs3/rope/base/arguments.py b/pymode/libs3/rope/base/arguments.py deleted file mode 100644 index 342e2ae5..00000000 --- a/pymode/libs3/rope/base/arguments.py +++ /dev/null @@ -1,109 +0,0 @@ -import rope.base.evaluate -from rope.base import ast - - -class Arguments(object): - """A class for evaluating parameters passed to a function - - You can use the `create_arguments` factory. It handles implicit - first arguments. - - """ - - def __init__(self, args, scope): - self.args = args - self.scope = scope - self.instance = None - - def get_arguments(self, parameters): - result = [] - for pyname in self.get_pynames(parameters): - if pyname is None: - result.append(None) - else: - result.append(pyname.get_object()) - return result - - def get_pynames(self, parameters): - result = [None] * max(len(parameters), len(self.args)) - for index, arg in enumerate(self.args): - if isinstance(arg, ast.keyword) and arg.arg in parameters: - result[parameters.index(arg.arg)] = self._evaluate(arg.value) - else: - result[index] = self._evaluate(arg) - return result - - def get_instance_pyname(self): - if self.args: - return self._evaluate(self.args[0]) - - def _evaluate(self, ast_node): - return rope.base.evaluate.eval_node(self.scope, ast_node) - - -def create_arguments(primary, pyfunction, call_node, scope): - """A factory for creating `Arguments`""" - args = list(call_node.args) - args.extend(call_node.keywords) - called = call_node.func - # XXX: Handle constructors - if _is_method_call(primary, pyfunction) and \ - isinstance(called, ast.Attribute): - args.insert(0, called.value) - return Arguments(args, scope) - - -class ObjectArguments(object): - - def __init__(self, pynames): - self.pynames = pynames - - def get_arguments(self, parameters): - result = [] - for pyname in self.pynames: - if pyname is None: - result.append(None) - else: - result.append(pyname.get_object()) - return result - - def get_pynames(self, parameters): - return self.pynames - - def get_instance_pyname(self): - return self.pynames[0] -class MixedArguments(object): - - def __init__(self, pyname, arguments, scope): - """`argumens` is an instance of `Arguments`""" - self.pyname = pyname - self.args = arguments - - def get_pynames(self, parameters): - return [self.pyname] + self.args.get_pynames(parameters[1:]) - - def get_arguments(self, parameters): - result = [] - for pyname in self.get_pynames(parameters): - if pyname is None: - result.append(None) - else: - result.append(pyname.get_object()) - return result - - def get_instance_pyname(self): - return self.pyname - - -def _is_method_call(primary, pyfunction): - if primary is None: - return False - pyobject = primary.get_object() - if isinstance(pyobject.get_type(), rope.base.pyobjects.PyClass) and \ - isinstance(pyfunction, rope.base.pyobjects.PyFunction) and \ - isinstance(pyfunction.parent, rope.base.pyobjects.PyClass): - return True - if isinstance(pyobject.get_type(), rope.base.pyobjects.AbstractClass) and \ - isinstance(pyfunction, rope.base.builtins.BuiltinFunction): - return True - return False diff --git a/pymode/libs3/rope/base/ast.py b/pymode/libs3/rope/base/ast.py deleted file mode 100644 index 680a4ba3..00000000 --- a/pymode/libs3/rope/base/ast.py +++ /dev/null @@ -1,68 +0,0 @@ -import _ast -from _ast import * - -from rope.base import fscommands - - -def parse(source, filename=''): - # NOTE: the raw string should be given to `compile` function - if isinstance(source, str): - source = fscommands.unicode_to_file_data(source) - source = source.decode() - if '\r' in source: - source = source.replace('\r\n', '\n').replace('\r', '\n') - if not source.endswith('\n'): - source += '\n' - try: - return compile(source.encode(), filename, 'exec', _ast.PyCF_ONLY_AST) - except (TypeError, ValueError) as e: - error = SyntaxError() - error.lineno = 1 - error.filename = filename - error.msg = str(e) - raise error - - -def walk(node, walker): - """Walk the syntax tree""" - method_name = '_' + node.__class__.__name__ - method = getattr(walker, method_name, None) - if method is not None: - return method(node) - for child in get_child_nodes(node): - walk(child, walker) - - -def get_child_nodes(node): - if isinstance(node, _ast.Module): - return node.body - result = [] - if node._fields is not None: - for name in node._fields: - child = getattr(node, name) - if isinstance(child, list): - for entry in child: - if isinstance(entry, _ast.AST): - result.append(entry) - if isinstance(child, _ast.AST): - result.append(child) - return result - - -def call_for_nodes(node, callback, recursive=False): - """If callback returns `True` the child nodes are skipped""" - result = callback(node) - if recursive and not result: - for child in get_child_nodes(node): - call_for_nodes(child, callback, recursive) - - -def get_children(node): - result = [] - if node._fields is not None: - for name in node._fields: - if name in ['lineno', 'col_offset']: - continue - child = getattr(node, name) - result.append(child) - return result diff --git a/pymode/libs3/rope/base/builtins.py b/pymode/libs3/rope/base/builtins.py deleted file mode 100644 index 3101631a..00000000 --- a/pymode/libs3/rope/base/builtins.py +++ /dev/null @@ -1,782 +0,0 @@ -"""This module trys to support builtin types and functions.""" -import inspect - -import rope.base.evaluate -from rope.base import pynames, pyobjects, arguments, utils, ast - - -class BuiltinModule(pyobjects.AbstractModule): - - def __init__(self, name, pycore=None, initial={}): - super(BuiltinModule, self).__init__() - self.name = name - self.pycore = pycore - self.initial = initial - - parent = None - - def get_attributes(self): - return self.attributes - - def get_doc(self): - if self.module: - return self.module.__doc__ - - def get_name(self): - return self.name.split('.')[-1] - - @property - @utils.saveit - def attributes(self): - result = _object_attributes(self.module, self) - result.update(self.initial) - if self.pycore is not None: - submodules = self.pycore._builtin_submodules(self.name) - for name, module in submodules.items(): - result[name] = rope.base.builtins.BuiltinName(module) - return result - - @property - @utils.saveit - def module(self): - try: - result = __import__(self.name) - for token in self.name.split('.')[1:]: - result = getattr(result, token, None) - return result - except ImportError: - return - - -class _BuiltinElement(object): - - def __init__(self, builtin, parent=None): - self.builtin = builtin - self._parent = parent - - def get_doc(self): - if self.builtin: - return getattr(self.builtin, '__doc__', None) - - def get_name(self): - if self.builtin: - return getattr(self.builtin, '__name__', None) - - @property - def parent(self): - if self._parent is None: - return builtins - return self._parent - - -class BuiltinClass(_BuiltinElement, pyobjects.AbstractClass): - - def __init__(self, builtin, attributes, parent=None): - _BuiltinElement.__init__(self, builtin, parent) - pyobjects.AbstractClass.__init__(self) - self.initial = attributes - - @utils.saveit - def get_attributes(self): - result = _object_attributes(self.builtin, self) - result.update(self.initial) - return result - - -class BuiltinFunction(_BuiltinElement, pyobjects.AbstractFunction): - - def __init__(self, returned=None, function=None, builtin=None, - argnames=[], parent=None): - _BuiltinElement.__init__(self, builtin, parent) - pyobjects.AbstractFunction.__init__(self) - self.argnames = argnames - self.returned = returned - self.function = function - - def get_returned_object(self, args): - if self.function is not None: - return self.function(_CallContext(self.argnames, args)) - else: - return self.returned - - def get_param_names(self, special_args=True): - return self.argnames - - @utils.saveit - def get_attributes(self): - result = _object_attributes(self.builtin.__class__, self) - return result - - -class BuiltinUnknown(_BuiltinElement, pyobjects.PyObject): - - def __init__(self, builtin): - super(BuiltinUnknown, self).__init__(pyobjects.get_unknown()) - self.builtin = builtin - self.type = pyobjects.get_unknown() - - def get_name(self): - return getattr(type(self.builtin), '__name__', None) - - @utils.saveit - def get_attributes(self): - return _object_attributes(self.builtin, self) - - -def _object_attributes(obj, parent): - attributes = {} - for name in dir(obj): - if name == 'None': - continue - try: - child = getattr(obj, name) - except AttributeError: - # descriptors are allowed to raise AttributeError - # even if they are in dir() - continue - pyobject = None - if inspect.isclass(child): - pyobject = BuiltinClass(child, {}, parent=parent) - elif inspect.isroutine(child): - if inspect.ismethoddescriptor(child) and "__weakref__" in dir(obj): - try: - weak = child.__get__(obj.__weakref__.__objclass__()) - except: - weak = child - pyobject = BuiltinFunction(builtin=weak, parent=parent) - else: - pyobject = BuiltinFunction(builtin=child, parent=parent) - else: - pyobject = BuiltinUnknown(builtin=child) - attributes[name] = BuiltinName(pyobject) - return attributes - - -def _create_builtin_type_getter(cls): - def _get_builtin(*args): - if not hasattr(cls, '_generated'): - cls._generated = {} - if args not in cls._generated: - cls._generated[args] = cls(*args) - return cls._generated[args] - return _get_builtin - -def _create_builtin_getter(cls): - type_getter = _create_builtin_type_getter(cls) - def _get_builtin(*args): - return pyobjects.PyObject(type_getter(*args)) - return _get_builtin - - -class _CallContext(object): - - def __init__(self, argnames, args): - self.argnames = argnames - self.args = args - - def _get_scope_and_pyname(self, pyname): - if pyname is not None and isinstance(pyname, pynames.AssignedName): - pymodule, lineno = pyname.get_definition_location() - if pymodule is None: - return None, None - if lineno is None: - lineno = 1 - scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - name = None - while name is None and scope is not None: - for current in scope.get_names(): - if scope[current] is pyname: - name = current - break - else: - scope = scope.parent - return scope, name - return None, None - - def get_argument(self, name): - if self.args: - args = self.args.get_arguments(self.argnames) - return args[self.argnames.index(name)] - - def get_pyname(self, name): - if self.args: - args = self.args.get_pynames(self.argnames) - if name in self.argnames: - return args[self.argnames.index(name)] - - def get_arguments(self, argnames): - if self.args: - return self.args.get_arguments(argnames) - - def get_pynames(self, argnames): - if self.args: - return self.args.get_pynames(argnames) - - def get_per_name(self): - if self.args is None: - return None - pyname = self.args.get_instance_pyname() - scope, name = self._get_scope_and_pyname(pyname) - if name is not None: - pymodule = pyname.get_definition_location()[0] - return pymodule.pycore.object_info.get_per_name(scope, name) - return None - - def save_per_name(self, value): - if self.args is None: - return None - pyname = self.args.get_instance_pyname() - scope, name = self._get_scope_and_pyname(pyname) - if name is not None: - pymodule = pyname.get_definition_location()[0] - pymodule.pycore.object_info.save_per_name(scope, name, value) - - -class _AttributeCollector(object): - - def __init__(self, type): - self.attributes = {} - self.type = type - - def __call__(self, name, returned=None, function=None, - argnames=['self'], check_existence=True): - try: - builtin = getattr(self.type, name) - except AttributeError: - if check_existence: - raise - builtin=None - self.attributes[name] = BuiltinName( - BuiltinFunction(returned=returned, function=function, - argnames=argnames, builtin=builtin)) - - def __setitem__(self, name, value): - self.attributes[name] = value - - -class List(BuiltinClass): - - def __init__(self, holding=None): - self.holding = holding - collector = _AttributeCollector(list) - - collector('__iter__', function=self._iterator_get) - collector('__new__', function=self._new_list) - - # Adding methods - collector('append', function=self._list_add, argnames=['self', 'value']) - collector('__setitem__', function=self._list_add, - argnames=['self', 'index', 'value']) - collector('insert', function=self._list_add, - argnames=['self', 'index', 'value']) - collector('extend', function=self._self_set, - argnames=['self', 'iterable']) - - # Getting methods - collector('__getitem__', function=self._list_get) - collector('pop', function=self._list_get) - - super(List, self).__init__(list, collector.attributes) - - def _new_list(self, args): - return _create_builtin(args, get_list) - - def _list_add(self, context): - if self.holding is not None: - return - holding = context.get_argument('value') - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _self_set(self, context): - if self.holding is not None: - return - iterable = context.get_pyname('iterable') - holding = _infer_sequence_for_pyname(iterable) - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _list_get(self, context): - if self.holding is not None: - args = context.get_arguments(['self', 'key']) - if len(args) > 1 and args[1] is not None \ - and args[1].get_type() == builtins['slice'].get_object(): - return get_list(self.holding) - return self.holding - return context.get_per_name() - - def _iterator_get(self, context): - return get_iterator(self._list_get(context)) - - def _self_get(self, context): - return get_list(self._list_get(context)) - - -get_list = _create_builtin_getter(List) -get_list_type = _create_builtin_type_getter(List) - - -class Dict(BuiltinClass): - - def __init__(self, keys=None, values=None): - self.keys = keys - self.values = values - item = get_tuple(self.keys, self.values) - collector = _AttributeCollector(dict) - collector('__new__', function=self._new_dict) - collector('__setitem__', function=self._dict_add) - collector('popitem', function=self._item_get) - collector('pop', function=self._value_get) - collector('get', function=self._key_get) - collector('keys', function=self._key_list) - collector('values', function=self._value_list) - collector('items', function=self._item_list) - collector('copy', function=self._self_get) - collector('__getitem__', function=self._value_get) - collector('__iter__', function=self._key_iter) - collector('update', function=self._self_set) - super(Dict, self).__init__(dict, collector.attributes) - - def _new_dict(self, args): - def do_create(holding=None): - if holding is None: - return get_dict() - type = holding.get_type() - if isinstance(type, Tuple) and len(type.get_holding_objects()) == 2: - return get_dict(*type.get_holding_objects()) - return _create_builtin(args, do_create) - - def _dict_add(self, context): - if self.keys is not None: - return - key, value = context.get_arguments(['self', 'key', 'value'])[1:] - if key is not None and key != pyobjects.get_unknown(): - context.save_per_name(get_tuple(key, value)) - - def _item_get(self, context): - if self.keys is not None: - return get_tuple(self.keys, self.values) - item = context.get_per_name() - if item is None or not isinstance(item.get_type(), Tuple): - return get_tuple(self.keys, self.values) - return item - - def _value_get(self, context): - item = self._item_get(context).get_type() - return item.get_holding_objects()[1] - - def _key_get(self, context): - item = self._item_get(context).get_type() - return item.get_holding_objects()[0] - - def _value_list(self, context): - return get_list(self._value_get(context)) - - def _key_list(self, context): - return get_list(self._key_get(context)) - - def _item_list(self, context): - return get_list(self._item_get(context)) - - def _value_iter(self, context): - return get_iterator(self._value_get(context)) - - def _key_iter(self, context): - return get_iterator(self._key_get(context)) - - def _item_iter(self, context): - return get_iterator(self._item_get(context)) - - def _self_get(self, context): - item = self._item_get(context).get_type() - key, value = item.get_holding_objects()[:2] - return get_dict(key, value) - - def _self_set(self, context): - if self.keys is not None: - return - new_dict = context.get_pynames(['self', 'd'])[1] - if new_dict and isinstance(new_dict.get_object().get_type(), Dict): - args = arguments.ObjectArguments([new_dict]) - items = new_dict.get_object()['popitem'].\ - get_object().get_returned_object(args) - context.save_per_name(items) - else: - holding = _infer_sequence_for_pyname(new_dict) - if holding is not None and isinstance(holding.get_type(), Tuple): - context.save_per_name(holding) - - -get_dict = _create_builtin_getter(Dict) -get_dict_type = _create_builtin_type_getter(Dict) - - -class Tuple(BuiltinClass): - - def __init__(self, *objects): - self.objects = objects - first = None - if objects: - first = objects[0] - attributes = { - '__getitem__': BuiltinName(BuiltinFunction(first)), - '__getslice__': BuiltinName(BuiltinFunction(pyobjects.PyObject(self))), - '__new__': BuiltinName(BuiltinFunction(function=self._new_tuple)), - '__iter__': BuiltinName(BuiltinFunction(get_iterator(first)))} - super(Tuple, self).__init__(tuple, attributes) - - def get_holding_objects(self): - return self.objects - - def _new_tuple(self, args): - return _create_builtin(args, get_tuple) - - -get_tuple = _create_builtin_getter(Tuple) -get_tuple_type = _create_builtin_type_getter(Tuple) - - -class Set(BuiltinClass): - - def __init__(self, holding=None): - self.holding = holding - collector = _AttributeCollector(set) - collector('__new__', function=self._new_set) - - self_methods = ['copy', 'difference', 'intersection', - 'symmetric_difference', 'union'] - for method in self_methods: - collector(method, function=self._self_get) - collector('add', function=self._set_add) - collector('update', function=self._self_set) - collector('update', function=self._self_set) - collector('symmetric_difference_update', function=self._self_set) - collector('difference_update', function=self._self_set) - - collector('pop', function=self._set_get) - collector('__iter__', function=self._iterator_get) - super(Set, self).__init__(set, collector.attributes) - - def _new_set(self, args): - return _create_builtin(args, get_set) - - def _set_add(self, context): - if self.holding is not None: - return - holding = context.get_arguments(['self', 'value'])[1] - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _self_set(self, context): - if self.holding is not None: - return - iterable = context.get_pyname('iterable') - holding = _infer_sequence_for_pyname(iterable) - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _set_get(self, context): - if self.holding is not None: - return self.holding - return context.get_per_name() - - def _iterator_get(self, context): - return get_iterator(self._set_get(context)) - - def _self_get(self, context): - return get_list(self._set_get(context)) - - -get_set = _create_builtin_getter(Set) -get_set_type = _create_builtin_type_getter(Set) - - -class Str(BuiltinClass): - - def __init__(self): - self_object = pyobjects.PyObject(self) - collector = _AttributeCollector(str) - collector('__iter__', get_iterator(self_object), check_existence=False) - - self_methods = ['__getitem__', 'capitalize', 'center', - 'encode', 'expandtabs', 'join', 'ljust', - 'lower', 'lstrip', 'replace', 'rjust', 'rstrip', 'strip', - 'swapcase', 'title', 'translate', 'upper', 'zfill'] - for method in self_methods: - collector(method, self_object) - - for method in ['rsplit', 'split', 'splitlines']: - collector(method, get_list(self_object)) - - super(Str, self).__init__(str, collector.attributes) - - def get_doc(self): - return str.__doc__ - - -get_str = _create_builtin_getter(Str) -get_str_type = _create_builtin_type_getter(Str) - - -class BuiltinName(pynames.PyName): - - def __init__(self, pyobject): - self.pyobject = pyobject - - def get_object(self): - return self.pyobject - - def get_definition_location(self): - return (None, None) - -class Iterator(pyobjects.AbstractClass): - - def __init__(self, holding=None): - super(Iterator, self).__init__() - self.holding = holding - self.attributes = { - 'next': BuiltinName(BuiltinFunction(self.holding)), - '__iter__': BuiltinName(BuiltinFunction(self))} - - def get_attributes(self): - return self.attributes - - def get_returned_object(self, args): - return self.holding - -get_iterator = _create_builtin_getter(Iterator) - - -class Generator(pyobjects.AbstractClass): - - def __init__(self, holding=None): - super(Generator, self).__init__() - self.holding = holding - self.attributes = { - 'next': BuiltinName(BuiltinFunction(self.holding)), - '__iter__': BuiltinName(BuiltinFunction(get_iterator(self.holding))), - 'close': BuiltinName(BuiltinFunction()), - 'send': BuiltinName(BuiltinFunction()), - 'throw': BuiltinName(BuiltinFunction())} - - def get_attributes(self): - return self.attributes - - def get_returned_object(self, args): - return self.holding - -get_generator = _create_builtin_getter(Generator) - - -class File(BuiltinClass): - - def __init__(self): - self_object = pyobjects.PyObject(self) - str_object = get_str() - str_list = get_list(get_str()) - attributes = {} - def add(name, returned=None, function=None): - builtin = getattr(open, name, None) - attributes[name] = BuiltinName( - BuiltinFunction(returned=returned, function=function, - builtin=builtin)) - add('__iter__', get_iterator(str_object)) - for method in ['next', 'read', 'readline', 'readlines']: - add(method, str_list) - for method in ['close', 'flush', 'lineno', 'isatty', 'seek', 'tell', - 'truncate', 'write', 'writelines']: - add(method) - super(File, self).__init__(open, attributes) - - -get_file = _create_builtin_getter(File) -get_file_type = _create_builtin_type_getter(File) - - -class Property(BuiltinClass): - - def __init__(self, fget=None, fset=None, fdel=None, fdoc=None): - self._fget = fget - self._fdoc = fdoc - attributes = { - 'fget': BuiltinName(BuiltinFunction()), - 'fset': BuiltinName(pynames.UnboundName()), - 'fdel': BuiltinName(pynames.UnboundName()), - '__new__': BuiltinName(BuiltinFunction(function=_property_function))} - super(Property, self).__init__(property, attributes) - - def get_property_object(self, args): - if isinstance(self._fget, pyobjects.AbstractFunction): - return self._fget.get_returned_object(args) - - -def _property_function(args): - parameters = args.get_arguments(['fget', 'fset', 'fdel', 'fdoc']) - return pyobjects.PyObject(Property(parameters[0])) - - -class Lambda(pyobjects.AbstractFunction): - - def __init__(self, node, scope): - super(Lambda, self).__init__() - self.node = node - self.arguments = node.args - self.scope = scope - - def get_returned_object(self, args): - result = rope.base.evaluate.eval_node(self.scope, self.node.body) - if result is not None: - return result.get_object() - else: - return pyobjects.get_unknown() - - def get_module(self): - return self.parent.get_module() - - def get_scope(self): - return self.scope - - def get_kind(self): - return 'lambda' - - def get_ast(self): - return self.node - - def get_attributes(self): - return {} - - def get_name(self): - return 'lambda' - - def get_param_names(self, special_args=True): - result = [node.arg for node in self.arguments.args - if isinstance(node, ast.arg)] - if self.arguments.vararg: - result.append('*' + self.arguments.vararg) - if self.arguments.kwarg: - result.append('**' + self.arguments.kwarg) - return result - - @property - def parent(self): - return self.scope.pyobject - - -class BuiltinObject(BuiltinClass): - - def __init__(self): - super(BuiltinObject, self).__init__(object, {}) - - -class BuiltinType(BuiltinClass): - - def __init__(self): - super(BuiltinType, self).__init__(type, {}) - - -def _infer_sequence_for_pyname(pyname): - if pyname is None: - return None - seq = pyname.get_object() - args = arguments.ObjectArguments([pyname]) - if '__iter__' in seq: - obj = seq['__iter__'].get_object() - if not isinstance(obj, pyobjects.AbstractFunction): - return None - iter = obj.get_returned_object(args) - if iter is not None and 'next' in iter: - holding = iter['next'].get_object().\ - get_returned_object(args) - return holding - - -def _create_builtin(args, creator): - passed = args.get_pynames(['sequence'])[0] - if passed is None: - holding = None - else: - holding = _infer_sequence_for_pyname(passed) - if holding is not None: - return creator(holding) - else: - return creator() - - -def _range_function(args): - return get_list() - -def _reversed_function(args): - return _create_builtin(args, get_iterator) - -def _sorted_function(args): - return _create_builtin(args, get_list) - -def _super_function(args): - passed_class, passed_self = args.get_arguments(['type', 'self']) - if passed_self is None: - return passed_class - else: - #pyclass = passed_self.get_type() - pyclass = passed_class - if isinstance(pyclass, pyobjects.AbstractClass): - supers = pyclass.get_superclasses() - if supers: - return pyobjects.PyObject(supers[0]) - return passed_self - -def _zip_function(args): - args = args.get_pynames(['sequence']) - objects = [] - for seq in args: - if seq is None: - holding = None - else: - holding = _infer_sequence_for_pyname(seq) - objects.append(holding) - tuple = get_tuple(*objects) - return get_list(tuple) - -def _enumerate_function(args): - passed = args.get_pynames(['sequence'])[0] - if passed is None: - holding = None - else: - holding = _infer_sequence_for_pyname(passed) - tuple = get_tuple(None, holding) - return get_iterator(tuple) - -def _iter_function(args): - passed = args.get_pynames(['sequence'])[0] - if passed is None: - holding = None - else: - holding = _infer_sequence_for_pyname(passed) - return get_iterator(holding) - -def _input_function(args): - return get_str() - - -_initial_builtins = { - 'list': BuiltinName(get_list_type()), - 'dict': BuiltinName(get_dict_type()), - 'tuple': BuiltinName(get_tuple_type()), - 'set': BuiltinName(get_set_type()), - 'str': BuiltinName(get_str_type()), - 'file': BuiltinName(get_file_type()), - 'open': BuiltinName(get_file_type()), - 'unicode': BuiltinName(get_str_type()), - 'range': BuiltinName(BuiltinFunction(function=_range_function, builtin=range)), - 'reversed': BuiltinName(BuiltinFunction(function=_reversed_function, builtin=reversed)), - 'sorted': BuiltinName(BuiltinFunction(function=_sorted_function, builtin=sorted)), - 'super': BuiltinName(BuiltinFunction(function=_super_function, builtin=super)), - 'property': BuiltinName(BuiltinFunction(function=_property_function, builtin=property)), - 'zip': BuiltinName(BuiltinFunction(function=_zip_function, builtin=zip)), - 'enumerate': BuiltinName(BuiltinFunction(function=_enumerate_function, builtin=enumerate)), - 'object': BuiltinName(BuiltinObject()), - 'type': BuiltinName(BuiltinType()), - 'iter': BuiltinName(BuiltinFunction(function=_iter_function, builtin=iter)), - 'input': BuiltinName(BuiltinFunction(function=_input_function, builtin=input)), - } - -builtins = BuiltinModule('builtins', initial=_initial_builtins) diff --git a/pymode/libs3/rope/base/change.py b/pymode/libs3/rope/base/change.py deleted file mode 100644 index 3b0d8a14..00000000 --- a/pymode/libs3/rope/base/change.py +++ /dev/null @@ -1,448 +0,0 @@ -import datetime -import difflib -import os -import time -import warnings - -import rope.base.fscommands -from rope.base import taskhandle, exceptions, utils - - -class Change(object): - """The base class for changes - - Rope refactorings return `Change` objects. They can be previewed, - committed or undone. - """ - - def do(self, job_set=None): - """Perform the change - - .. note:: Do use this directly. Use `Project.do()` instead. - """ - - def undo(self, job_set=None): - """Perform the change - - .. note:: Do use this directly. Use `History.undo()` instead. - """ - - def get_description(self): - """Return the description of this change - - This can be used for previewing the changes. - """ - return str(self) - - def get_changed_resources(self): - """Return the list of resources that will be changed""" - return [] - - @property - @utils.saveit - def _operations(self): - return _ResourceOperations(self.resource.project) - - -class ChangeSet(Change): - """A collection of `Change` objects - - This class holds a collection of changes. This class provides - these fields: - - * `changes`: the list of changes - * `description`: the goal of these changes - """ - - def __init__(self, description, timestamp=None): - self.changes = [] - self.description = description - self.time = timestamp - - def do(self, job_set=taskhandle.NullJobSet()): - try: - done = [] - for change in self.changes: - change.do(job_set) - done.append(change) - self.time = time.time() - except Exception: - for change in done: - change.undo() - raise - - def undo(self, job_set=taskhandle.NullJobSet()): - try: - done = [] - for change in reversed(self.changes): - change.undo(job_set) - done.append(change) - except Exception: - for change in done: - change.do() - raise - - def add_change(self, change): - self.changes.append(change) - - def get_description(self): - result = [str(self) + ':\n\n\n'] - for change in self.changes: - result.append(change.get_description()) - result.append('\n') - return ''.join(result) - - def __str__(self): - if self.time is not None: - date = datetime.datetime.fromtimestamp(self.time) - if date.date() == datetime.date.today(): - string_date = 'today' - elif date.date() == (datetime.date.today() - datetime.timedelta(1)): - string_date = 'yesterday' - elif date.year == datetime.date.today().year: - string_date = date.strftime('%b %d') - else: - string_date = date.strftime('%d %b, %Y') - string_time = date.strftime('%H:%M:%S') - string_time = '%s %s ' % (string_date, string_time) - return self.description + ' - ' + string_time - return self.description - - def get_changed_resources(self): - result = set() - for change in self.changes: - result.update(change.get_changed_resources()) - return result - - -def _handle_job_set(function): - """A decorator for handling `taskhandle.JobSet`\s - - A decorator for handling `taskhandle.JobSet`\s for `do` and `undo` - methods of `Change`\s. - """ - def call(self, job_set=taskhandle.NullJobSet()): - job_set.started_job(str(self)) - function(self) - job_set.finished_job() - return call - - -class ChangeContents(Change): - """A class to change the contents of a file - - Fields: - - * `resource`: The `rope.base.resources.File` to change - * `new_contents`: What to write in the file - """ - - def __init__(self, resource, new_contents, old_contents=None): - self.resource = resource - # IDEA: Only saving diffs; possible problems when undo/redoing - self.new_contents = new_contents - self.old_contents = old_contents - - @_handle_job_set - def do(self): - if self.old_contents is None: - self.old_contents = self.resource.read() - self._operations.write_file(self.resource, self.new_contents) - - @_handle_job_set - def undo(self): - if self.old_contents is None: - raise exceptions.HistoryError( - 'Undoing a change that is not performed yet!') - self._operations.write_file(self.resource, self.old_contents) - - def __str__(self): - return 'Change <%s>' % self.resource.path - - def get_description(self): - new = self.new_contents - old = self.old_contents - if old is None: - if self.resource.exists(): - old = self.resource.read() - else: - old = '' - result = difflib.unified_diff( - old.splitlines(True), new.splitlines(True), - 'a/' + self.resource.path, 'b/' + self.resource.path) - return ''.join(list(result)) - - def get_changed_resources(self): - return [self.resource] - - -class MoveResource(Change): - """Move a resource to a new location - - Fields: - - * `resource`: The `rope.base.resources.Resource` to move - * `new_resource`: The destination for move; It is the moved - resource not the folder containing that resource. - """ - - def __init__(self, resource, new_location, exact=False): - self.project = resource.project - self.resource = resource - if not exact: - new_location = _get_destination_for_move(resource, new_location) - if resource.is_folder(): - self.new_resource = self.project.get_folder(new_location) - else: - self.new_resource = self.project.get_file(new_location) - - @_handle_job_set - def do(self): - self._operations.move(self.resource, self.new_resource) - - @_handle_job_set - def undo(self): - self._operations.move(self.new_resource, self.resource) - - def __str__(self): - return 'Move <%s>' % self.resource.path - - def get_description(self): - return 'rename from %s\nrename to %s' % (self.resource.path, - self.new_resource.path) - - def get_changed_resources(self): - return [self.resource, self.new_resource] - - -class CreateResource(Change): - """A class to create a resource - - Fields: - - * `resource`: The resource to create - """ - - def __init__(self, resource): - self.resource = resource - - @_handle_job_set - def do(self): - self._operations.create(self.resource) - - @_handle_job_set - def undo(self): - self._operations.remove(self.resource) - - def __str__(self): - return 'Create Resource <%s>' % (self.resource.path) - - def get_description(self): - return 'new file %s' % (self.resource.path) - - def get_changed_resources(self): - return [self.resource] - - def _get_child_path(self, parent, name): - if parent.path == '': - return name - else: - return parent.path + '/' + name - - -class CreateFolder(CreateResource): - """A class to create a folder - - See docs for `CreateResource`. - """ - - def __init__(self, parent, name): - resource = parent.project.get_folder(self._get_child_path(parent, name)) - super(CreateFolder, self).__init__(resource) - - -class CreateFile(CreateResource): - """A class to create a file - - See docs for `CreateResource`. - """ - - def __init__(self, parent, name): - resource = parent.project.get_file(self._get_child_path(parent, name)) - super(CreateFile, self).__init__(resource) - - -class RemoveResource(Change): - """A class to remove a resource - - Fields: - - * `resource`: The resource to be removed - """ - - def __init__(self, resource): - self.resource = resource - - @_handle_job_set - def do(self): - self._operations.remove(self.resource) - - # TODO: Undoing remove operations - @_handle_job_set - def undo(self): - raise NotImplementedError( - 'Undoing `RemoveResource` is not implemented yet.') - - def __str__(self): - return 'Remove <%s>' % (self.resource.path) - - def get_changed_resources(self): - return [self.resource] - - -def count_changes(change): - """Counts the number of basic changes a `Change` will make""" - if isinstance(change, ChangeSet): - result = 0 - for child in change.changes: - result += count_changes(child) - return result - return 1 - -def create_job_set(task_handle, change): - return task_handle.create_jobset(str(change), count_changes(change)) - - -class _ResourceOperations(object): - - def __init__(self, project): - self.project = project - self.fscommands = project.fscommands - self.direct_commands = rope.base.fscommands.FileSystemCommands() - - def _get_fscommands(self, resource): - if self.project.is_ignored(resource): - return self.direct_commands - return self.fscommands - - def write_file(self, resource, contents): - data = rope.base.fscommands.unicode_to_file_data(contents) - fscommands = self._get_fscommands(resource) - fscommands.write(resource.real_path, data) - for observer in list(self.project.observers): - observer.resource_changed(resource) - - def move(self, resource, new_resource): - fscommands = self._get_fscommands(resource) - fscommands.move(resource.real_path, new_resource.real_path) - for observer in list(self.project.observers): - observer.resource_moved(resource, new_resource) - - def create(self, resource): - if resource.is_folder(): - self._create_resource(resource.path, kind='folder') - else: - self._create_resource(resource.path) - for observer in list(self.project.observers): - observer.resource_created(resource) - - def remove(self, resource): - fscommands = self._get_fscommands(resource) - fscommands.remove(resource.real_path) - for observer in list(self.project.observers): - observer.resource_removed(resource) - - def _create_resource(self, file_name, kind='file'): - resource_path = self.project._get_resource_path(file_name) - if os.path.exists(resource_path): - raise exceptions.RopeError('Resource <%s> already exists' - % resource_path) - resource = self.project.get_file(file_name) - if not resource.parent.exists(): - raise exceptions.ResourceNotFoundError( - 'Parent folder of <%s> does not exist' % resource.path) - fscommands = self._get_fscommands(resource) - try: - if kind == 'file': - fscommands.create_file(resource_path) - else: - fscommands.create_folder(resource_path) - except IOError as e: - raise exceptions.RopeError(e) - - -def _get_destination_for_move(resource, destination): - dest_path = resource.project._get_resource_path(destination) - if os.path.isdir(dest_path): - if destination != '': - return destination + '/' + resource.name - else: - return resource.name - return destination - - -class ChangeToData(object): - - def convertChangeSet(self, change): - description = change.description - changes = [] - for child in change.changes: - changes.append(self(child)) - return (description, changes, change.time) - - def convertChangeContents(self, change): - return (change.resource.path, change.new_contents, change.old_contents) - - def convertMoveResource(self, change): - return (change.resource.path, change.new_resource.path) - - def convertCreateResource(self, change): - return (change.resource.path, change.resource.is_folder()) - - def convertRemoveResource(self, change): - return (change.resource.path, change.resource.is_folder()) - - def __call__(self, change): - change_type = type(change) - if change_type in (CreateFolder, CreateFile): - change_type = CreateResource - method = getattr(self, 'convert' + change_type.__name__) - return (change_type.__name__, method(change)) - - -class DataToChange(object): - - def __init__(self, project): - self.project = project - - def makeChangeSet(self, description, changes, time=None): - result = ChangeSet(description, time) - for child in changes: - result.add_change(self(child)) - return result - - def makeChangeContents(self, path, new_contents, old_contents): - resource = self.project.get_file(path) - return ChangeContents(resource, new_contents, old_contents) - - def makeMoveResource(self, old_path, new_path): - resource = self.project.get_file(old_path) - return MoveResource(resource, new_path, exact=True) - - def makeCreateResource(self, path, is_folder): - if is_folder: - resource = self.project.get_folder(path) - else: - resource = self.project.get_file(path) - return CreateResource(resource) - - def makeRemoveResource(self, path, is_folder): - if is_folder: - resource = self.project.get_folder(path) - else: - resource = self.project.get_file(path) - return RemoveResource(resource) - - def __call__(self, data): - method = getattr(self, 'make' + data[0]) - return method(*data[1]) diff --git a/pymode/libs3/rope/base/codeanalyze.py b/pymode/libs3/rope/base/codeanalyze.py deleted file mode 100644 index 843f477d..00000000 --- a/pymode/libs3/rope/base/codeanalyze.py +++ /dev/null @@ -1,358 +0,0 @@ -import bisect -import re -import token -import tokenize - - -class ChangeCollector(object): - - def __init__(self, text): - self.text = text - self.changes = [] - - def add_change(self, start, end, new_text=None): - if new_text is None: - new_text = self.text[start:end] - self.changes.append((start, end, new_text)) - - def get_changed(self): - if not self.changes: - return None - def compare_changes(change1, change2): - return cmp(change1[:2], change2[:2]) - self.changes.sort(key=lambda change: change[:2]) - pieces = [] - last_changed = 0 - for change in self.changes: - start, end, text = change - pieces.append(self.text[last_changed:start] + text) - last_changed = end - if last_changed < len(self.text): - pieces.append(self.text[last_changed:]) - result = ''.join(pieces) - if result != self.text: - return result - - -class SourceLinesAdapter(object): - """Adapts source to Lines interface - - Note: The creation of this class is expensive. - """ - - def __init__(self, source_code): - self.code = source_code - self.starts = None - self._initialize_line_starts() - - def _initialize_line_starts(self): - self.starts = [] - self.starts.append(0) - try: - i = 0 - while True: - i = self.code.index('\n', i) + 1 - self.starts.append(i) - except ValueError: - pass - self.starts.append(len(self.code) + 1) - - def get_line(self, lineno): - return self.code[self.starts[lineno - 1]: - self.starts[lineno] - 1] - - def length(self): - return len(self.starts) - 1 - - def get_line_number(self, offset): - return bisect.bisect(self.starts, offset) - - def get_line_start(self, lineno): - return self.starts[lineno - 1] - - def get_line_end(self, lineno): - return self.starts[lineno] - 1 - - -class ArrayLinesAdapter(object): - - def __init__(self, lines): - self.lines = lines - - def get_line(self, line_number): - return self.lines[line_number - 1] - - def length(self): - return len(self.lines) - - -class LinesToReadline(object): - - def __init__(self, lines, start): - self.lines = lines - self.current = start - - def readline(self): - if self.current <= self.lines.length(): - self.current += 1 - return self.lines.get_line(self.current - 1) + '\n' - return '' - - def __call__(self): - return self.readline() - - -class _CustomGenerator(object): - - def __init__(self, lines): - self.lines = lines - self.in_string = '' - self.open_count = 0 - self.continuation = False - - def __call__(self): - size = self.lines.length() - result = [] - i = 1 - while i <= size: - while i <= size and not self.lines.get_line(i).strip(): - i += 1 - if i <= size: - start = i - while True: - line = self.lines.get_line(i) - self._analyze_line(line) - if not (self.continuation or self.open_count or - self.in_string) or i == size: - break - i += 1 - result.append((start, i)) - i += 1 - return result - - _main_chars = re.compile(r'[\'|"|#|\\|\[|\]|\{|\}|\(|\)]') - def _analyze_line(self, line): - char = None - for match in self._main_chars.finditer(line): - char = match.group() - i = match.start() - if char in '\'"': - if not self.in_string: - self.in_string = char - if char * 3 == line[i:i + 3]: - self.in_string = char * 3 - elif self.in_string == line[i:i + len(self.in_string)] and \ - not (i > 0 and line[i - 1] == '\\' and - not (i > 1 and line[i - 2] == '\\')): - self.in_string = '' - if self.in_string: - continue - if char == '#': - break - if char in '([{': - self.open_count += 1 - elif char in ')]}': - self.open_count -= 1 - if line and char != '#' and line.endswith('\\'): - self.continuation = True - else: - self.continuation = False - -def custom_generator(lines): - return _CustomGenerator(lines)() - - -class LogicalLineFinder(object): - - def __init__(self, lines): - self.lines = lines - - def logical_line_in(self, line_number): - indents = count_line_indents(self.lines.get_line(line_number)) - tries = 0 - while True: - block_start = get_block_start(self.lines, line_number, indents) - try: - return self._block_logical_line(block_start, line_number) - except IndentationError as e: - tries += 1 - if tries == 5: - raise e - lineno = e.lineno + block_start - 1 - indents = count_line_indents(self.lines.get_line(lineno)) - - def generate_starts(self, start_line=1, end_line=None): - for start, end in self.generate_regions(start_line, end_line): - yield start - - def generate_regions(self, start_line=1, end_line=None): - # XXX: `block_start` should be at a better position! - block_start = 1 - readline = LinesToReadline(self.lines, block_start) - shifted = start_line - block_start + 1 - try: - for start, end in self._logical_lines(readline): - real_start = start + block_start - 1 - real_start = self._first_non_blank(real_start) - if end_line is not None and real_start >= end_line: - break - real_end = end + block_start - 1 - if real_start >= start_line: - yield (real_start, real_end) - except tokenize.TokenError as e: - pass - - def _block_logical_line(self, block_start, line_number): - readline = LinesToReadline(self.lines, block_start) - shifted = line_number - block_start + 1 - region = self._calculate_logical(readline, shifted) - start = self._first_non_blank(region[0] + block_start - 1) - if region[1] is None: - end = self.lines.length() - else: - end = region[1] + block_start - 1 - return start, end - - def _calculate_logical(self, readline, line_number): - last_end = 1 - try: - for start, end in self._logical_lines(readline): - if line_number <= end: - return (start, end) - last_end = end + 1 - except tokenize.TokenError as e: - current = e.args[1][0] - return (last_end, max(last_end, current - 1)) - return (last_end, None) - - def _logical_lines(self, readline): - last_end = 1 - for current_token in tokenize.generate_tokens(readline): - current = current_token[2][0] - if current_token[0] == token.NEWLINE: - yield (last_end, current) - last_end = current + 1 - - def _first_non_blank(self, line_number): - current = line_number - while current < self.lines.length(): - line = self.lines.get_line(current).strip() - if line and not line.startswith('#'): - return current - current += 1 - return current - - -def tokenizer_generator(lines): - return LogicalLineFinder(lines).generate_regions() - - -class CachingLogicalLineFinder(object): - - def __init__(self, lines, generate=custom_generator): - self.lines = lines - self._generate = generate - - _starts = None - @property - def starts(self): - if self._starts is None: - self._init_logicals() - return self._starts - - _ends = None - @property - def ends(self): - if self._ends is None: - self._init_logicals() - return self._ends - - def _init_logicals(self): - """Should initialize _starts and _ends attributes""" - size = self.lines.length() + 1 - self._starts = [None] * size - self._ends = [None] * size - for start, end in self._generate(self.lines): - self._starts[start] = True - self._ends[end] = True - - def logical_line_in(self, line_number): - start = line_number - while start > 0 and not self.starts[start]: - start -= 1 - if start == 0: - try: - start = self.starts.index(True, line_number) - except ValueError: - return (line_number, line_number) - return (start, self.ends.index(True, start)) - - def generate_starts(self, start_line=1, end_line=None): - if end_line is None: - end_line = self.lines.length() - for index in range(start_line, end_line): - if self.starts[index]: - yield index - - -def get_block_start(lines, lineno, maximum_indents=80): - """Approximate block start""" - pattern = get_block_start_patterns() - for i in range(lineno, 0, -1): - match = pattern.search(lines.get_line(i)) - if match is not None and \ - count_line_indents(lines.get_line(i)) <= maximum_indents: - striped = match.string.lstrip() - # Maybe we're in a list comprehension or generator expression - if i > 1 and striped.startswith('if') or striped.startswith('for'): - bracs = 0 - for j in range(i, min(i + 5, lines.length() + 1)): - for c in lines.get_line(j): - if c == '#': - break - if c in '[(': - bracs += 1 - if c in ')]': - bracs -= 1 - if bracs < 0: - break - if bracs < 0: - break - if bracs < 0: - continue - return i - return 1 - - -_block_start_pattern = None - -def get_block_start_patterns(): - global _block_start_pattern - if not _block_start_pattern: - pattern = '^\\s*(((def|class|if|elif|except|for|while|with)\\s)|'\ - '((try|else|finally|except)\\s*:))' - _block_start_pattern = re.compile(pattern, re.M) - return _block_start_pattern - - -def count_line_indents(line): - indents = 0 - for char in line: - if char == ' ': - indents += 1 - elif char == '\t': - indents += 8 - else: - return indents - return 0 - - -def get_string_pattern(): - start = r'(\b[uU]?[rR]?)?' - longstr = r'%s"""(\\.|"(?!"")|\\\n|[^"\\])*"""' % start - shortstr = r'%s"(\\.|[^"\\\n])*"' % start - return '|'.join([longstr, longstr.replace('"', "'"), - shortstr, shortstr.replace('"', "'")]) - -def get_comment_pattern(): - return r'#[^\n]*' diff --git a/pymode/libs3/rope/base/default_config.py b/pymode/libs3/rope/base/default_config.py deleted file mode 100644 index 126cf7bf..00000000 --- a/pymode/libs3/rope/base/default_config.py +++ /dev/null @@ -1,86 +0,0 @@ -# The default ``config.py`` - - -def set_prefs(prefs): - """This function is called before opening the project""" - - # Specify which files and folders to ignore in the project. - # Changes to ignored resources are not added to the history and - # VCSs. Also they are not returned in `Project.get_files()`. - # Note that ``?`` and ``*`` match all characters but slashes. - # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc' - # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc' - # '.svn': matches 'pkg/.svn' and all of its children - # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' - # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' - prefs['ignored_resources'] = [ - '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', '.git', - '__pycache__', '.tox', '.env', 'node_modules', 'bower_components'] - - # Specifies which files should be considered python files. It is - # useful when you have scripts inside your project. Only files - # ending with ``.py`` are considered to be python files by - # default. - #prefs['python_files'] = ['*.py'] - - # Custom source folders: By default rope searches the project - # for finding source folders (folders that should be searched - # for finding modules). You can add paths to that list. Note - # that rope guesses project source folders correctly most of the - # time; use this if you have any problems. - # The folders should be relative to project root and use '/' for - # separating folders regardless of the platform rope is running on. - # 'src/my_source_folder' for instance. - #prefs.add('source_folders', 'src') - - # You can extend python path for looking up modules - #prefs.add('python_path', '~/python/') - - # Should rope save object information or not. - prefs['save_objectdb'] = True - prefs['compress_objectdb'] = False - - # If `True`, rope analyzes each module when it is being saved. - prefs['automatic_soa'] = True - # The depth of calls to follow in static object analysis - prefs['soa_followed_calls'] = 0 - - # If `False` when running modules or unit tests "dynamic object - # analysis" is turned off. This makes them much faster. - prefs['perform_doa'] = True - - # Rope can check the validity of its object DB when running. - prefs['validate_objectdb'] = True - - # How many undos to hold? - prefs['max_history_items'] = 32 - - # Shows whether to save history across sessions. - prefs['save_history'] = True - prefs['compress_history'] = False - - # Set the number spaces used for indenting. According to - # :PEP:`8`, it is best to use 4 spaces. Since most of rope's - # unit-tests use 4 spaces it is more reliable, too. - prefs['indent_size'] = 4 - - # Builtin and c-extension modules that are allowed to be imported - # and inspected by rope. - prefs['extension_modules'] = [] - - # Add all standard c-extensions to extension_modules list. - prefs['import_dynload_stdmods'] = True - - # If `True` modules with syntax errors are considered to be empty. - # The default value is `False`; When `False` syntax errors raise - # `rope.base.exceptions.ModuleSyntaxError` exception. - prefs['ignore_syntax_errors'] = False - - # If `True`, rope ignores unresolvable imports. Otherwise, they - # appear in the importing namespace. - prefs['ignore_bad_imports'] = False - - -def project_opened(project): - """This function is called after opening the project""" - # Do whatever you like here! diff --git a/pymode/libs3/rope/base/evaluate.py b/pymode/libs3/rope/base/evaluate.py deleted file mode 100644 index 659646c0..00000000 --- a/pymode/libs3/rope/base/evaluate.py +++ /dev/null @@ -1,325 +0,0 @@ -import rope.base.builtins -import rope.base.pynames -import rope.base.pyobjects -from rope.base import ast, astutils, exceptions, pyobjects, arguments, worder - - -BadIdentifierError = exceptions.BadIdentifierError - -def eval_location(pymodule, offset): - """Find the pyname at the offset""" - return eval_location2(pymodule, offset)[1] - - -def eval_location2(pymodule, offset): - """Find the primary and pyname at offset""" - pyname_finder = ScopeNameFinder(pymodule) - return pyname_finder.get_primary_and_pyname_at(offset) - - -def eval_node(scope, node): - """Evaluate a `ast.AST` node and return a PyName - - Return `None` if the expression cannot be evaluated. - """ - return eval_node2(scope, node)[1] - - -def eval_node2(scope, node): - evaluator = StatementEvaluator(scope) - ast.walk(node, evaluator) - return evaluator.old_result, evaluator.result - - -def eval_str(holding_scope, name): - return eval_str2(holding_scope, name)[1] - - -def eval_str2(holding_scope, name): - try: - # parenthesizing for handling cases like 'a_var.\nattr' - node = ast.parse('(%s)' % name) - except SyntaxError: - raise BadIdentifierError('Not a resolvable python identifier selected.') - return eval_node2(holding_scope, node) - - -class ScopeNameFinder(object): - - def __init__(self, pymodule): - self.module_scope = pymodule.get_scope() - self.lines = pymodule.lines - self.worder = worder.Worder(pymodule.source_code, True) - - def _is_defined_in_class_body(self, holding_scope, offset, lineno): - if lineno == holding_scope.get_start() and \ - holding_scope.parent is not None and \ - holding_scope.parent.get_kind() == 'Class' and \ - self.worder.is_a_class_or_function_name_in_header(offset): - return True - if lineno != holding_scope.get_start() and \ - holding_scope.get_kind() == 'Class' and \ - self.worder.is_name_assigned_in_class_body(offset): - return True - return False - - def _is_function_name_in_function_header(self, scope, offset, lineno): - if scope.get_start() <= lineno <= scope.get_body_start() and \ - scope.get_kind() == 'Function' and \ - self.worder.is_a_class_or_function_name_in_header(offset): - return True - return False - - def get_pyname_at(self, offset): - return self.get_primary_and_pyname_at(offset)[1] - - def get_primary_and_pyname_at(self, offset): - lineno = self.lines.get_line_number(offset) - holding_scope = self.module_scope.get_inner_scope_for_line(lineno) - # function keyword parameter - if self.worder.is_function_keyword_parameter(offset): - keyword_name = self.worder.get_word_at(offset) - pyobject = self.get_enclosing_function(offset) - if isinstance(pyobject, pyobjects.PyFunction): - return (None, pyobject.get_parameters().get(keyword_name, None)) - # class body - if self._is_defined_in_class_body(holding_scope, offset, lineno): - class_scope = holding_scope - if lineno == holding_scope.get_start(): - class_scope = holding_scope.parent - name = self.worder.get_primary_at(offset).strip() - try: - return (None, class_scope.pyobject[name]) - except rope.base.exceptions.AttributeNotFoundError: - return (None, None) - # function header - if self._is_function_name_in_function_header(holding_scope, offset, lineno): - name = self.worder.get_primary_at(offset).strip() - return (None, holding_scope.parent[name]) - # from statement module - if self.worder.is_from_statement_module(offset): - module = self.worder.get_primary_at(offset) - module_pyname = self._find_module(module) - return (None, module_pyname) - if self.worder.is_from_aliased(offset): - name = self.worder.get_from_aliased(offset) - else: - name = self.worder.get_primary_at(offset) - return eval_str2(holding_scope, name) - - def get_enclosing_function(self, offset): - function_parens = self.worder.find_parens_start_from_inside(offset) - try: - function_pyname = self.get_pyname_at(function_parens - 1) - except BadIdentifierError: - function_pyname = None - if function_pyname is not None: - pyobject = function_pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - return pyobject - elif isinstance(pyobject, pyobjects.AbstractClass) and \ - '__init__' in pyobject: - return pyobject['__init__'].get_object() - elif '__call__' in pyobject: - return pyobject['__call__'].get_object() - return None - - def _find_module(self, module_name): - dots = 0 - while module_name[dots] == '.': - dots += 1 - return rope.base.pynames.ImportedModule( - self.module_scope.pyobject, module_name[dots:], dots) - - -class StatementEvaluator(object): - - def __init__(self, scope): - self.scope = scope - self.result = None - self.old_result = None - - def _Name(self, node): - self.result = self.scope.lookup(node.id) - - def _Attribute(self, node): - pyname = eval_node(self.scope, node.value) - if pyname is None: - pyname = rope.base.pynames.UnboundName() - self.old_result = pyname - if pyname.get_object() != rope.base.pyobjects.get_unknown(): - try: - self.result = pyname.get_object()[node.attr] - except exceptions.AttributeNotFoundError: - self.result = None - - def _Call(self, node): - primary, pyobject = self._get_primary_and_object_for_node(node.func) - if pyobject is None: - return - def _get_returned(pyobject): - args = arguments.create_arguments(primary, pyobject, - node, self.scope) - return pyobject.get_returned_object(args) - if isinstance(pyobject, rope.base.pyobjects.AbstractClass): - result = None - if '__new__' in pyobject: - new_function = pyobject['__new__'].get_object() - result = _get_returned(new_function) - if result is None or \ - result == rope.base.pyobjects.get_unknown(): - result = rope.base.pyobjects.PyObject(pyobject) - self.result = rope.base.pynames.UnboundName(pyobject=result) - return - - pyfunction = None - if isinstance(pyobject, rope.base.pyobjects.AbstractFunction): - pyfunction = pyobject - elif '__call__' in pyobject: - pyfunction = pyobject['__call__'].get_object() - if pyfunction is not None: - self.result = rope.base.pynames.UnboundName( - pyobject=_get_returned(pyfunction)) - - def _Str(self, node): - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_str()) - - def _Num(self, node): - type_name = type(node.n).__name__ - self.result = self._get_builtin_name(type_name) - - def _get_builtin_name(self, type_name): - pytype = rope.base.builtins.builtins[type_name].get_object() - return rope.base.pynames.UnboundName( - rope.base.pyobjects.PyObject(pytype)) - - def _BinOp(self, node): - self.result = rope.base.pynames.UnboundName( - self._get_object_for_node(node.left)) - - def _BoolOp(self, node): - pyobject = self._get_object_for_node(node.values[0]) - if pyobject is None: - pyobject = self._get_object_for_node(node.values[1]) - self.result = rope.base.pynames.UnboundName(pyobject) - - def _Repr(self, node): - self.result = self._get_builtin_name('str') - - def _UnaryOp(self, node): - self.result = rope.base.pynames.UnboundName( - self._get_object_for_node(node.operand)) - - def _Compare(self, node): - self.result = self._get_builtin_name('bool') - - def _Dict(self, node): - keys = None - values = None - if node.keys: - keys = self._get_object_for_node(node.keys[0]) - values = self._get_object_for_node(node.values[0]) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_dict(keys, values)) - - def _List(self, node): - holding = None - if node.elts: - holding = self._get_object_for_node(node.elts[0]) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_list(holding)) - - def _ListComp(self, node): - pyobject = self._what_does_comprehension_hold(node) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_list(pyobject)) - - def _GeneratorExp(self, node): - pyobject = self._what_does_comprehension_hold(node) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_iterator(pyobject)) - - def _what_does_comprehension_hold(self, node): - scope = self._make_comprehension_scope(node) - pyname = eval_node(scope, node.elt) - return pyname.get_object() if pyname is not None else None - - def _make_comprehension_scope(self, node): - scope = self.scope - module = scope.pyobject.get_module() - names = {} - for comp in node.generators: - new_names = _get_evaluated_names(comp.target, comp.iter, module, - '.__iter__().next()', node.lineno) - names.update(new_names) - return rope.base.pyscopes.TemporaryScope(scope.pycore, scope, names) - - def _Tuple(self, node): - objects = [] - if len(node.elts) < 4: - for stmt in node.elts: - pyobject = self._get_object_for_node(stmt) - objects.append(pyobject) - else: - objects.append(self._get_object_for_node(node.elts[0])) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_tuple(*objects)) - - def _get_object_for_node(self, stmt): - pyname = eval_node(self.scope, stmt) - pyobject = None - if pyname is not None: - pyobject = pyname.get_object() - return pyobject - - def _get_primary_and_object_for_node(self, stmt): - primary, pyname = eval_node2(self.scope, stmt) - pyobject = None - if pyname is not None: - pyobject = pyname.get_object() - return primary, pyobject - - def _Subscript(self, node): - if isinstance(node.slice, ast.Index): - self._call_function(node.value, '__getitem__', - [node.slice.value]) - elif isinstance(node.slice, ast.Slice): - self._call_function(node.value, '__getitem__', - [node.slice]) - - def _Slice(self, node): - self.result = self._get_builtin_name('slice') - - def _call_function(self, node, function_name, other_args=None): - pyname = eval_node(self.scope, node) - if pyname is not None: - pyobject = pyname.get_object() - else: - return - if function_name in pyobject: - called = pyobject[function_name].get_object() - if not called or not isinstance(called, pyobjects.AbstractFunction): - return - args = [node] - if other_args: - args += other_args - arguments_ = arguments.Arguments(args, self.scope) - self.result = rope.base.pynames.UnboundName( - pyobject=called.get_returned_object(arguments_)) - - def _Lambda(self, node): - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.Lambda(node, self.scope)) - - -def _get_evaluated_names(targets, assigned, module, evaluation, lineno): - result = {} - for name, levels in astutils.get_name_levels(targets): - assignment = rope.base.pynames.AssignmentValue(assigned, levels, - evaluation) - # XXX: this module should not access `rope.base.pynamesdef`! - pyname = rope.base.pynamesdef.AssignedName(lineno, module) - pyname.assignments.append(assignment) - result[name] = pyname - return result diff --git a/pymode/libs3/rope/base/exceptions.py b/pymode/libs3/rope/base/exceptions.py deleted file mode 100644 index d161c89e..00000000 --- a/pymode/libs3/rope/base/exceptions.py +++ /dev/null @@ -1,61 +0,0 @@ -class RopeError(Exception): - """Base exception for rope""" - - -class ResourceNotFoundError(RopeError): - """Resource not found exception""" - - -class RefactoringError(RopeError): - """Errors for performing a refactoring""" - - -class InterruptedTaskError(RopeError): - """The task has been interrupted""" - - -class HistoryError(RopeError): - """Errors for history undo/redo operations""" - - -class ModuleNotFoundError(RopeError): - """Module not found exception""" - - -class AttributeNotFoundError(RopeError): - """Attribute not found exception""" - - -class NameNotFoundError(RopeError): - """Name not found exception""" - - -class BadIdentifierError(RopeError): - """The name cannot be resolved""" - - -class ModuleSyntaxError(RopeError): - """Module has syntax errors - - The `filename` and `lineno` fields indicate where the error has - occurred. - - """ - - def __init__(self, filename, lineno, message): - self.filename = filename - self.lineno = lineno - self.message_ = message - super(ModuleSyntaxError, self).__init__( - 'Syntax error in file <%s> line <%s>: %s' % - (filename, lineno, message)) - - -class ModuleDecodeError(RopeError): - """Cannot decode module""" - - def __init__(self, filename, message): - self.filename = filename - self.message_ = message - super(ModuleDecodeError, self).__init__( - 'Cannot decode file <%s>: %s' % (filename, message)) diff --git a/pymode/libs3/rope/base/fscommands.py b/pymode/libs3/rope/base/fscommands.py deleted file mode 100644 index ea1a061a..00000000 --- a/pymode/libs3/rope/base/fscommands.py +++ /dev/null @@ -1,267 +0,0 @@ -"""Project file system commands. - -This modules implements file system operations used by rope. Different -version control systems can be supported by implementing the interface -provided by `FileSystemCommands` class. See `SubversionCommands` and -`MercurialCommands` for example. - -""" -import os, re -import shutil -import subprocess - - -def create_fscommands(root): - dirlist = os.listdir(root) - commands = {'.hg': MercurialCommands, - '.svn': SubversionCommands, - '.git': GITCommands, - '_svn': SubversionCommands, - '_darcs': DarcsCommands} - for key in commands: - if key in dirlist: - try: - return commands[key](root) - except (ImportError, OSError): - pass - return FileSystemCommands() - - -class FileSystemCommands(object): - - def create_file(self, path): - open(path, 'w').close() - - def create_folder(self, path): - os.mkdir(path) - - def move(self, path, new_location): - shutil.move(path, new_location) - - def remove(self, path): - if os.path.isfile(path): - os.remove(path) - else: - shutil.rmtree(path) - - def write(self, path, data): - file_ = open(path, 'wb') - try: - file_.write(data) - finally: - file_.close() - - -class SubversionCommands(object): - - def __init__(self, *args): - self.normal_actions = FileSystemCommands() - import pysvn - self.client = pysvn.Client() - - def create_file(self, path): - self.normal_actions.create_file(path) - self.client.add(path, force=True) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - self.client.add(path, force=True) - - def move(self, path, new_location): - self.client.move(path, new_location, force=True) - - def remove(self, path): - self.client.remove(path, force=True) - - def write(self, path, data): - self.normal_actions.write(path, data) - - -class MercurialCommands(object): - - def __init__(self, root): - self.hg = self._import_mercurial() - self.normal_actions = FileSystemCommands() - try: - self.ui = self.hg.ui.ui( - verbose=False, debug=False, quiet=True, - interactive=False, traceback=False, report_untrusted=False) - except: - self.ui = self.hg.ui.ui() - self.ui.setconfig('ui', 'interactive', 'no') - self.ui.setconfig('ui', 'debug', 'no') - self.ui.setconfig('ui', 'traceback', 'no') - self.ui.setconfig('ui', 'verbose', 'no') - self.ui.setconfig('ui', 'report_untrusted', 'no') - self.ui.setconfig('ui', 'quiet', 'yes') - - self.repo = self.hg.hg.repository(self.ui, root) - - def _import_mercurial(self): - import mercurial.commands - import mercurial.hg - import mercurial.ui - return mercurial - - def create_file(self, path): - self.normal_actions.create_file(path) - self.hg.commands.add(self.ui, self.repo, path) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - - def move(self, path, new_location): - self.hg.commands.rename(self.ui, self.repo, path, - new_location, after=False) - - def remove(self, path): - self.hg.commands.remove(self.ui, self.repo, path) - - def write(self, path, data): - self.normal_actions.write(path, data) - - -class GITCommands(object): - - def __init__(self, root): - self.root = root - self._do(['version']) - self.normal_actions = FileSystemCommands() - - def create_file(self, path): - self.normal_actions.create_file(path) - self._do(['add', self._in_dir(path)]) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - - def move(self, path, new_location): - self._do(['mv', self._in_dir(path), self._in_dir(new_location)]) - - def remove(self, path): - self._do(['rm', self._in_dir(path)]) - - def write(self, path, data): - # XXX: should we use ``git add``? - self.normal_actions.write(path, data) - - def _do(self, args): - _execute(['git'] + args, cwd=self.root) - - def _in_dir(self, path): - if path.startswith(self.root): - return path[len(self.root) + 1:] - return self.root - - -class DarcsCommands(object): - - def __init__(self, root): - self.root = root - self.normal_actions = FileSystemCommands() - - def create_file(self, path): - self.normal_actions.create_file(path) - self._do(['add', path]) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - self._do(['add', path]) - - def move(self, path, new_location): - self._do(['mv', path, new_location]) - - def remove(self, path): - self.normal_actions.remove(path) - - def write(self, path, data): - self.normal_actions.write(path, data) - - def _do(self, args): - _execute(['darcs'] + args, cwd=self.root) - - -def _execute(args, cwd=None): - process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE) - process.wait() - return process.returncode - - -def unicode_to_file_data(contents, encoding=None): - if not isinstance(contents, str): - return contents - if encoding is None: - encoding = read_str_coding(contents) - if encoding is not None: - return contents.encode(encoding) - try: - return contents.encode() - except UnicodeEncodeError: - return contents.encode('utf-8') - -def file_data_to_unicode(data, encoding=None): - result = _decode_data(data, encoding) - if '\r' in result: - result = result.replace('\r\n', '\n').replace('\r', '\n') - return result - -def _decode_data(data, encoding): - if isinstance(data, str): - return data - if encoding is None: - encoding = read_str_coding(data) - if encoding is None: - # there is no encoding tip, we need to guess. - # PEP263 says that "encoding not explicitly defined" means it is ascii, - # but we will use utf8 instead since utf8 fully covers ascii and btw is - # the only non-latin sane encoding. - encoding = 'utf-8' - try: - return data.decode(encoding) - except (UnicodeError, LookupError): - # fallback to utf-8: it should never fail - return data.decode('utf-8') - - -def read_file_coding(path): - file = open(path, 'b') - count = 0 - result = [] - buffsize = 10 - while True: - current = file.read(10) - if not current: - break - count += current.count('\n') - result.append(current) - file.close() - return _find_coding(''.join(result)) - - -def read_str_coding(source): - if not isinstance(source, str): - source = source.decode("utf-8", "ignore") - #TODO: change it to precompiled version - mex = re.search("\-\*\-\s+coding:\s*(.*?)\s+\-\*\-", source) - if mex: - return mex.group(1) - return "utf-8" - -def _find_coding(text): - coding = 'coding' - try: - start = text.index(coding) + len(coding) - if text[start] not in '=:': - return - start += 1 - while start < len(text) and text[start].isspace(): - start += 1 - end = start - while end < len(text): - c = text[end] - if not c.isalnum() and c not in '-_': - break - end += 1 - return text[start:end] - except ValueError: - pass diff --git a/pymode/libs3/rope/base/history.py b/pymode/libs3/rope/base/history.py deleted file mode 100644 index d3c523d3..00000000 --- a/pymode/libs3/rope/base/history.py +++ /dev/null @@ -1,235 +0,0 @@ -from rope.base import exceptions, change, taskhandle - - -class History(object): - """A class that holds project history""" - - def __init__(self, project, maxundos=None): - self.project = project - self._undo_list = [] - self._redo_list = [] - self._maxundos = maxundos - self._load_history() - self.project.data_files.add_write_hook(self.write) - self.current_change = None - - def _load_history(self): - if self.save: - result = self.project.data_files.read_data( - 'history', compress=self.compress, import_=True) - if result is not None: - to_change = change.DataToChange(self.project) - for data in result[0]: - self._undo_list.append(to_change(data)) - for data in result[1]: - self._redo_list.append(to_change(data)) - - def do(self, changes, task_handle=taskhandle.NullTaskHandle()): - """Perform the change and add it to the `self.undo_list` - - Note that uninteresting changes (changes to ignored files) - will not be appended to `self.undo_list`. - - """ - try: - self.current_change = changes - changes.do(change.create_job_set(task_handle, changes)) - finally: - self.current_change = None - if self._is_change_interesting(changes): - self.undo_list.append(changes) - self._remove_extra_items() - del self.redo_list[:] - - def _remove_extra_items(self): - if len(self.undo_list) > self.max_undos: - del self.undo_list[0:len(self.undo_list) - self.max_undos] - - def _is_change_interesting(self, changes): - for resource in changes.get_changed_resources(): - if not self.project.is_ignored(resource): - return True - return False - - def undo(self, change=None, drop=False, - task_handle=taskhandle.NullTaskHandle()): - """Redo done changes from the history - - When `change` is `None`, the last done change will be undone. - If change is not `None` it should be an item from - `self.undo_list`; this change and all changes that depend on - it will be undone. In both cases the list of undone changes - will be returned. - - If `drop` is `True`, the undone change will not be appended to - the redo list. - - """ - if not self._undo_list: - raise exceptions.HistoryError('Undo list is empty') - if change is None: - change = self.undo_list[-1] - dependencies = self._find_dependencies(self.undo_list, change) - self._move_front(self.undo_list, dependencies) - self._perform_undos(len(dependencies), task_handle) - result = self.redo_list[-len(dependencies):] - if drop: - del self.redo_list[-len(dependencies):] - return result - - def redo(self, change=None, task_handle=taskhandle.NullTaskHandle()): - """Redo undone changes from the history - - When `change` is `None`, the last undone change will be - redone. If change is not `None` it should be an item from - `self.redo_list`; this change and all changes that depend on - it will be redone. In both cases the list of redone changes - will be returned. - - """ - if not self.redo_list: - raise exceptions.HistoryError('Redo list is empty') - if change is None: - change = self.redo_list[-1] - dependencies = self._find_dependencies(self.redo_list, change) - self._move_front(self.redo_list, dependencies) - self._perform_redos(len(dependencies), task_handle) - return self.undo_list[-len(dependencies):] - - def _move_front(self, change_list, changes): - for change in changes: - change_list.remove(change) - change_list.append(change) - - def _find_dependencies(self, change_list, change): - index = change_list.index(change) - return _FindChangeDependencies(change_list[index:])() - - def _perform_undos(self, count, task_handle): - for i in range(count): - self.current_change = self.undo_list[-1] - try: - job_set = change.create_job_set(task_handle, - self.current_change) - self.current_change.undo(job_set) - finally: - self.current_change = None - self.redo_list.append(self.undo_list.pop()) - - def _perform_redos(self, count, task_handle): - for i in range(count): - self.current_change = self.redo_list[-1] - try: - job_set = change.create_job_set(task_handle, - self.current_change) - self.current_change.do(job_set) - finally: - self.current_change = None - self.undo_list.append(self.redo_list.pop()) - - def contents_before_current_change(self, file): - if self.current_change is None: - return None - result = self._search_for_change_contents([self.current_change], file) - if result is not None: - return result - if file.exists() and not file.is_folder(): - return file.read() - else: - return None - - def _search_for_change_contents(self, change_list, file): - for change_ in reversed(change_list): - if isinstance(change_, change.ChangeSet): - result = self._search_for_change_contents(change_.changes, - file) - if result is not None: - return result - if isinstance(change_, change.ChangeContents) and \ - change_.resource == file: - return change_.old_contents - - def write(self): - if self.save: - data = [] - to_data = change.ChangeToData() - self._remove_extra_items() - data.append([to_data(change_) for change_ in self.undo_list]) - data.append([to_data(change_) for change_ in self.redo_list]) - self.project.data_files.write_data('history', data, - compress=self.compress) - - def get_file_undo_list(self, resource): - result = [] - for change in self.undo_list: - if resource in change.get_changed_resources(): - result.append(change) - return result - - def __str__(self): - return 'History holds %s changes in memory' % \ - (len(self.undo_list) + len(self.redo_list)) - - undo_list = property(lambda self: self._undo_list) - redo_list = property(lambda self: self._redo_list) - - @property - def tobe_undone(self): - """The last done change if available, `None` otherwise""" - if self.undo_list: - return self.undo_list[-1] - - @property - def tobe_redone(self): - """The last undone change if available, `None` otherwise""" - if self.redo_list: - return self.redo_list[-1] - - @property - def max_undos(self): - if self._maxundos is None: - return self.project.prefs.get('max_history_items', 100) - else: - return self._maxundos - - @property - def save(self): - return self.project.prefs.get('save_history', False) - - @property - def compress(self): - return self.project.prefs.get('compress_history', False) - - def clear(self): - """Forget all undo and redo information""" - del self.undo_list[:] - del self.redo_list[:] - - -class _FindChangeDependencies(object): - - def __init__(self, change_list): - self.change = change_list[0] - self.change_list = change_list - self.changed_resources = set(self.change.get_changed_resources()) - - def __call__(self): - result = [self.change] - for change in self.change_list[1:]: - if self._depends_on(change, result): - result.append(change) - self.changed_resources.update(change.get_changed_resources()) - return result - - def _depends_on(self, changes, result): - for resource in changes.get_changed_resources(): - if resource is None: - continue - if resource in self.changed_resources: - return True - for changed in self.changed_resources: - if resource.is_folder() and resource.contains(changed): - return True - if changed.is_folder() and changed.contains(resource): - return True - return False diff --git a/pymode/libs3/rope/base/libutils.py b/pymode/libs3/rope/base/libutils.py deleted file mode 100644 index cb9381e3..00000000 --- a/pymode/libs3/rope/base/libutils.py +++ /dev/null @@ -1,65 +0,0 @@ -"""A few useful functions for using rope as a library""" -import os.path - -import rope.base.project -import rope.base.pycore -from rope.base import taskhandle - - -def path_to_resource(project, path, type=None): - """Get the resource at path - - You only need to specify `type` if `path` does not exist. It can - be either 'file' or 'folder'. If the type is `None` it is assumed - that the resource already exists. - - Note that this function uses `Project.get_resource()`, - `Project.get_file()`, and `Project.get_folder()` methods. - - """ - project_path = relative(project.address, path) - if project_path is None: - project_path = rope.base.project._realpath(path) - project = rope.base.project.get_no_project() - if type is None: - return project.get_resource(project_path) - if type == 'file': - return project.get_file(project_path) - if type == 'folder': - return project.get_folder(project_path) - return None - -def relative(root, path): - root = rope.base.project._realpath(root).replace(os.path.sep, '/') - path = rope.base.project._realpath(path).replace(os.path.sep, '/') - if path == root: - return '' - if path.startswith(root + '/'): - return path[len(root) + 1:] - -def report_change(project, path, old_content): - """Report that the contents of file at `path` was changed - - The new contents of file is retrieved by reading the file. - - """ - resource = path_to_resource(project, path) - if resource is None: - return - for observer in list(project.observers): - observer.resource_changed(resource) - if project.pycore.automatic_soa: - rope.base.pycore.perform_soa_on_changed_scopes(project, resource, - old_content) - -def analyze_modules(project, task_handle=taskhandle.NullTaskHandle()): - """Perform static object analysis on all python files in the project - - Note that this might be really time consuming. - """ - resources = project.pycore.get_python_files() - job_set = task_handle.create_jobset('Analyzing Modules', len(resources)) - for resource in resources: - job_set.started_job(resource.path) - project.pycore.analyze_module(resource) - job_set.finished_job() diff --git a/pymode/libs3/rope/base/oi/__init__.py b/pymode/libs3/rope/base/oi/__init__.py deleted file mode 100644 index 0b1a1525..00000000 --- a/pymode/libs3/rope/base/oi/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Rope object analysis and inference package - -Rope makes some simplifying assumptions about a python program. It -assumes that a program only performs assignments and function calls. -Tracking assignments is simple and `PyName` objects handle that. The -main problem is function calls. Rope uses these two approaches for -obtaining call information: - -* Static object analysis: `rope.base.pycore.PyCore.analyze_module()` - - It can analyze modules to obtain information about functions. This - is done by analyzing function calls in a module or scope. Currently - SOA analyzes the scopes that are changed while saving or when the - user asks to analyze a module. That is mainly because static - analysis is time-consuming. - -* Dynamic object analysis: `rope.base.pycore.PyCore.run_module()` - - When you run a module or your testsuite, when DOA is enabled, it - collects information about parameters passed to and objects returned - from functions. The main problem with this approach is that it is - quite slow; Not when looking up the information but when collecting - them. - -An instance of `rope.base.oi.objectinfo.ObjectInfoManager` can be used -for accessing these information. It saves the data in a -`rope.base.oi.objectdb.ObjectDB` internally. - -Now if our objectdb does not know anything about a function and we -need the value returned by it, static object inference, SOI, comes -into play. It analyzes function body and tries to infer the object -that is returned from it (we usually need the returned value for the -given parameter objects). - -Rope might collect and store information for other `PyName`\s, too. -For instance rope stores the object builtin containers hold. - -""" diff --git a/pymode/libs3/rope/base/oi/doa.py b/pymode/libs3/rope/base/oi/doa.py deleted file mode 100644 index 1efb994c..00000000 --- a/pymode/libs3/rope/base/oi/doa.py +++ /dev/null @@ -1,162 +0,0 @@ -import pickle -import marshal -import os -import socket -import subprocess -import sys -import tempfile -import threading - - -class PythonFileRunner(object): - """A class for running python project files""" - - def __init__(self, pycore, file_, args=None, stdin=None, - stdout=None, analyze_data=None): - self.pycore = pycore - self.file = file_ - self.analyze_data = analyze_data - self.observers = [] - self.args = args - self.stdin = stdin - self.stdout = stdout - - def run(self): - """Execute the process""" - env = dict(os.environ) - file_path = self.file.real_path - path_folders = self.pycore.get_source_folders() + \ - self.pycore.get_python_path_folders() - env['PYTHONPATH'] = os.pathsep.join(folder.real_path - for folder in path_folders) - runmod_path = self.pycore.find_module('rope.base.oi.runmod').real_path - self.receiver = None - self._init_data_receiving() - send_info = '-' - if self.receiver: - send_info = self.receiver.get_send_info() - args = [sys.executable, runmod_path, send_info, - self.pycore.project.address, self.file.real_path] - if self.analyze_data is None: - del args[1:4] - if self.args is not None: - args.extend(self.args) - self.process = subprocess.Popen( - executable=sys.executable, args=args, env=env, - cwd=os.path.split(file_path)[0], stdin=self.stdin, - stdout=self.stdout, stderr=self.stdout, close_fds=os.name != 'nt') - - def _init_data_receiving(self): - if self.analyze_data is None: - return - # Disabling FIFO data transfer due to blocking when running - # unittests in the GUI. - # XXX: Handle FIFO data transfer for `rope.ui.testview` - if True or os.name == 'nt': - self.receiver = _SocketReceiver() - else: - self.receiver = _FIFOReceiver() - self.receiving_thread = threading.Thread(target=self._receive_information) - self.receiving_thread.setDaemon(True) - self.receiving_thread.start() - - def _receive_information(self): - #temp = open('/dev/shm/info', 'w') - for data in self.receiver.receive_data(): - self.analyze_data(data) - #temp.write(str(data) + '\n') - #temp.close() - for observer in self.observers: - observer() - - def wait_process(self): - """Wait for the process to finish""" - self.process.wait() - if self.analyze_data: - self.receiving_thread.join() - - def kill_process(self): - """Stop the process""" - if self.process.poll() is not None: - return - try: - if hasattr(self.process, 'terminate'): - self.process.terminate() - elif os.name != 'nt': - os.kill(self.process.pid, 9) - else: - import ctypes - handle = int(self.process._handle) - ctypes.windll.kernel32.TerminateProcess(handle, -1) - except OSError: - pass - - def add_finishing_observer(self, observer): - """Notify this observer when execution finishes""" - self.observers.append(observer) - - -class _MessageReceiver(object): - - def receive_data(self): - pass - - def get_send_info(self): - pass - - -class _SocketReceiver(_MessageReceiver): - - def __init__(self): - self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.data_port = 3037 - while self.data_port < 4000: - try: - self.server_socket.bind(('', self.data_port)) - break - except socket.error as e: - self.data_port += 1 - self.server_socket.listen(1) - - def get_send_info(self): - return str(self.data_port) - - def receive_data(self): - conn, addr = self.server_socket.accept() - self.server_socket.close() - my_file = conn.makefile('rb') - while True: - try: - yield pickle.load(my_file) - except EOFError: - break - my_file.close() - conn.close() - - -class _FIFOReceiver(_MessageReceiver): - - def __init__(self): - # XXX: this is insecure and might cause race conditions - self.file_name = self._get_file_name() - os.mkfifo(self.file_name) - - def _get_file_name(self): - prefix = tempfile.gettempdir() + '/__rope_' - i = 0 - while os.path.exists(prefix + str(i).rjust(4, '0')): - i += 1 - return prefix + str(i).rjust(4, '0') - - def get_send_info(self): - return self.file_name - - def receive_data(self): - my_file = open(self.file_name, 'rb') - while True: - try: - yield marshal.load(my_file) - except EOFError: - break - my_file.close() - os.remove(self.file_name) diff --git a/pymode/libs3/rope/base/oi/memorydb.py b/pymode/libs3/rope/base/oi/memorydb.py deleted file mode 100644 index e4c3a1c8..00000000 --- a/pymode/libs3/rope/base/oi/memorydb.py +++ /dev/null @@ -1,106 +0,0 @@ -from rope.base.oi import objectdb - - -class MemoryDB(objectdb.FileDict): - - def __init__(self, project, persist=None): - self.project = project - self._persist = persist - self.files = self - self._load_files() - self.project.data_files.add_write_hook(self.write) - - def _load_files(self): - self._files = {} - if self.persist: - result = self.project.data_files.read_data( - 'objectdb', compress=self.compress, import_=True) - if result is not None: - self._files = result - - def keys(self): - return list(self._files.keys()) - - def __contains__(self, key): - return key in self._files - - def __getitem__(self, key): - return FileInfo(self._files[key]) - - def create(self, path): - self._files[path] = {} - - def rename(self, file, newfile): - if file not in self._files: - return - self._files[newfile] = self._files[file] - del self[file] - - def __delitem__(self, file): - del self._files[file] - - def write(self): - if self.persist: - self.project.data_files.write_data('objectdb', self._files, - self.compress) - - @property - def compress(self): - return self.project.prefs.get('compress_objectdb', False) - - @property - def persist(self): - if self._persist is not None: - return self._persist - else: - return self.project.prefs.get('save_objectdb', False) - - -class FileInfo(objectdb.FileInfo): - - def __init__(self, scopes): - self.scopes = scopes - - def create_scope(self, key): - self.scopes[key] = ScopeInfo() - - def keys(self): - return list(self.scopes.keys()) - - def __contains__(self, key): - return key in self.scopes - - def __getitem__(self, key): - return self.scopes[key] - - def __delitem__(self, key): - del self.scopes[key] - - -class ScopeInfo(objectdb.ScopeInfo): - - def __init__(self): - self.call_info = {} - self.per_name = {} - - def get_per_name(self, name): - return self.per_name.get(name, None) - - def save_per_name(self, name, value): - self.per_name[name] = value - - def get_returned(self, parameters): - return self.call_info.get(parameters, None) - - def get_call_infos(self): - for args, returned in self.call_info.items(): - yield objectdb.CallInfo(args, returned) - - def add_call(self, parameters, returned): - self.call_info[parameters] = returned - - def __getstate__(self): - return (self.call_info, self.per_name) - - def __setstate__(self, data): - self.call_info, self.per_name = data diff --git a/pymode/libs3/rope/base/oi/objectdb.py b/pymode/libs3/rope/base/oi/objectdb.py deleted file mode 100644 index 97d4c2eb..00000000 --- a/pymode/libs3/rope/base/oi/objectdb.py +++ /dev/null @@ -1,192 +0,0 @@ -from collections import UserDict, MutableMapping - -class ObjectDB(object): - - def __init__(self, db, validation): - self.db = db - self.validation = validation - self.observers = [] - self.files = db.files - - def validate_files(self): - for file in list(self.files): - if not self.validation.is_file_valid(file): - del self.files[file] - self._file_removed(file) - - def validate_file(self, file): - if file not in self.files: - return - for key in list(self.files[file]): - if not self.validation.is_scope_valid(file, key): - del self.files[file][key] - - def file_moved(self, file, newfile): - if file not in self.files: - return - self.files.rename(file, newfile) - self._file_removed(file) - self._file_added(newfile) - - def get_files(self): - return list(self.files.keys()) - - def get_returned(self, path, key, args): - scope_info = self._get_scope_info(path, key, readonly=True) - result = scope_info.get_returned(args) - if self.validation.is_value_valid(result): - return result - - def get_pername(self, path, key, name): - scope_info = self._get_scope_info(path, key, readonly=True) - result = scope_info.get_per_name(name) - if self.validation.is_value_valid(result): - return result - - def get_callinfos(self, path, key): - scope_info = self._get_scope_info(path, key, readonly=True) - return scope_info.get_call_infos() - - def add_callinfo(self, path, key, args, returned): - scope_info = self._get_scope_info(path, key, readonly=False) - old_returned = scope_info.get_returned(args) - if self.validation.is_more_valid(returned, old_returned): - scope_info.add_call(args, returned) - - def add_pername(self, path, key, name, value): - scope_info = self._get_scope_info(path, key, readonly=False) - old_value = scope_info.get_per_name(name) - if self.validation.is_more_valid(value, old_value): - scope_info.save_per_name(name, value) - - def add_file_list_observer(self, observer): - self.observers.append(observer) - - def write(self): - self.db.write() - - def _get_scope_info(self, path, key, readonly=True): - if path not in self.files: - if readonly: - return _NullScopeInfo() - self.files.create(path) - self._file_added(path) - if key not in self.files[path]: - if readonly: - return _NullScopeInfo() - self.files[path].create_scope(key) - result = self.files[path][key] - if isinstance(result, dict): - print(self.files, self.files[path], self.files[path][key]) - return result - - def _file_removed(self, path): - for observer in self.observers: - observer.removed(path) - - def _file_added(self, path): - for observer in self.observers: - observer.added(path) - - def __str__(self): - scope_count = 0 - for file_dict in self.files.values(): - scope_count += len(file_dict) - return 'ObjectDB holds %s file and %s scope infos' % \ - (len(self.files), scope_count) - - -class _NullScopeInfo(object): - - def __init__(self, error_on_write=True): - self.error_on_write = error_on_write - - def get_per_name(self, name): - pass - - def save_per_name(self, name, value): - if self.error_on_write: - raise NotImplementedError() - - def get_returned(self, parameters): - pass - - def get_call_infos(self): - return [] - - def add_call(self, parameters, returned): - if self.error_on_write: - raise NotImplementedError() - - -class FileInfo(MutableMapping): - - def create_scope(self, key): - pass - - def __iter__(self): - for key in self.keys(): - yield key - - def __len__(self): - return len(self.keys()) - - def __setitem__(self, key, value): - self[key] = value - -class FileDict(MutableMapping): - - def create(self, key): - pass - - def rename(self, key, new_key): - pass - - def __iter__(self): - for key in self.keys(): - yield key - - def __len__(self): - return len(self.keys()) - - def __setitem__(self, key, value): - self[key] = value - -class ScopeInfo(object): - - def get_per_name(self, name): - pass - - def save_per_name(self, name, value): - pass - - def get_returned(self, parameters): - pass - - def get_call_infos(self): - pass - - def add_call(self, parameters, returned): - pass - - -class CallInfo(object): - - def __init__(self, args, returned): - self.args = args - self.returned = returned - - def get_parameters(self): - return self.args - - def get_returned(self): - return self.returned - - -class FileListObserver(object): - - def added(self, path): - pass - - def removed(self, path): - pass diff --git a/pymode/libs3/rope/base/oi/objectinfo.py b/pymode/libs3/rope/base/oi/objectinfo.py deleted file mode 100644 index f86d72e0..00000000 --- a/pymode/libs3/rope/base/oi/objectinfo.py +++ /dev/null @@ -1,232 +0,0 @@ -import warnings - -from rope.base import exceptions, resourceobserver -from rope.base.oi import objectdb, memorydb, transform - - -class ObjectInfoManager(object): - """Stores object information - - It uses an instance of `objectdb.ObjectDB` for storing - information. - - """ - - def __init__(self, project): - self.project = project - self.to_textual = transform.PyObjectToTextual(project) - self.to_pyobject = transform.TextualToPyObject(project) - self.doi_to_pyobject = transform.DOITextualToPyObject(project) - self._init_objectdb() - if project.prefs.get('validate_objectdb', False): - self._init_validation() - - def _init_objectdb(self): - dbtype = self.project.get_prefs().get('objectdb_type', None) - persist = None - if dbtype is not None: - warnings.warn( - '"objectdb_type" project config is deprecated;\n' - 'Use "save_objectdb" instead in your project ' - 'config file.\n(".ropeproject/config.py" by default)\n', - DeprecationWarning) - if dbtype != 'memory' and self.project.ropefolder is not None: - persist = True - self.validation = TextualValidation(self.to_pyobject) - db = memorydb.MemoryDB(self.project, persist=persist) - self.objectdb = objectdb.ObjectDB(db, self.validation) - - def _init_validation(self): - self.objectdb.validate_files() - observer = resourceobserver.ResourceObserver( - changed=self._resource_changed, moved=self._resource_moved, - removed=self._resource_moved) - files = [] - for path in self.objectdb.get_files(): - resource = self.to_pyobject.path_to_resource(path) - if resource is not None and resource.project == self.project: - files.append(resource) - self.observer = resourceobserver.FilteredResourceObserver(observer, - files) - self.objectdb.add_file_list_observer(_FileListObserver(self)) - self.project.add_observer(self.observer) - - def _resource_changed(self, resource): - try: - self.objectdb.validate_file( - self.to_textual.resource_to_path(resource)) - except exceptions.ModuleSyntaxError: - pass - - def _resource_moved(self, resource, new_resource=None): - self.observer.remove_resource(resource) - if new_resource is not None: - old = self.to_textual.resource_to_path(resource) - new = self.to_textual.resource_to_path(new_resource) - self.objectdb.file_moved(old, new) - self.observer.add_resource(new_resource) - - def get_returned(self, pyobject, args): - result = self.get_exact_returned(pyobject, args) - if result is not None: - return result - path, key = self._get_scope(pyobject) - if path is None: - return None - for call_info in self.objectdb.get_callinfos(path, key): - returned = call_info.get_returned() - if returned and returned[0] not in ('unknown', 'none'): - result = returned - break - if result is None: - result = returned - if result is not None: - return self.to_pyobject(result) - - def get_exact_returned(self, pyobject, args): - path, key = self._get_scope(pyobject) - if path is not None: - returned = self.objectdb.get_returned( - path, key, self._args_to_textual(pyobject, args)) - if returned is not None: - return self.to_pyobject(returned) - - def _args_to_textual(self, pyfunction, args): - parameters = list(pyfunction.get_param_names(special_args=False)) - arguments = args.get_arguments(parameters)[:len(parameters)] - textual_args = tuple([self.to_textual(arg) - for arg in arguments]) - return textual_args - - def get_parameter_objects(self, pyobject): - path, key = self._get_scope(pyobject) - if path is None: - return None - arg_count = len(pyobject.get_param_names(special_args=False)) - unknowns = arg_count - parameters = [None] * arg_count - for call_info in self.objectdb.get_callinfos(path, key): - args = call_info.get_parameters() - for index, arg in enumerate(args[:arg_count]): - old = parameters[index] - if self.validation.is_more_valid(arg, old): - parameters[index] = arg - if self.validation.is_value_valid(arg): - unknowns -= 1 - if unknowns == 0: - break - if unknowns < arg_count: - return [self.to_pyobject(parameter) - for parameter in parameters] - - def get_passed_objects(self, pyfunction, parameter_index): - path, key = self._get_scope(pyfunction) - if path is None: - return [] - result = [] - for call_info in self.objectdb.get_callinfos(path, key): - args = call_info.get_parameters() - if len(args) > parameter_index: - parameter = self.to_pyobject(args[parameter_index]) - if parameter is not None: - result.append(parameter) - return result - - def doa_data_received(self, data): - def doi_to_normal(textual): - pyobject = self.doi_to_pyobject(textual) - return self.to_textual(pyobject) - function = doi_to_normal(data[0]) - args = tuple([doi_to_normal(textual) for textual in data[1]]) - returned = doi_to_normal(data[2]) - if function[0] == 'defined' and len(function) == 3: - self._save_data(function, args, returned) - - def function_called(self, pyfunction, params, returned=None): - function_text = self.to_textual(pyfunction) - params_text = tuple([self.to_textual(param) - for param in params]) - returned_text = ('unknown',) - if returned is not None: - returned_text = self.to_textual(returned) - self._save_data(function_text, params_text, returned_text) - - def save_per_name(self, scope, name, data): - path, key = self._get_scope(scope.pyobject) - if path is not None: - self.objectdb.add_pername(path, key, name, self.to_textual(data)) - - def get_per_name(self, scope, name): - path, key = self._get_scope(scope.pyobject) - if path is not None: - result = self.objectdb.get_pername(path, key, name) - if result is not None: - return self.to_pyobject(result) - - def _save_data(self, function, args, returned=('unknown',)): - self.objectdb.add_callinfo(function[1], function[2], args, returned) - - def _get_scope(self, pyobject): - resource = pyobject.get_module().get_resource() - if resource is None: - return None, None - textual = self.to_textual(pyobject) - if textual[0] == 'defined': - path = textual[1] - if len(textual) == 3: - key = textual[2] - else: - key = '' - return path, key - return None, None - - def sync(self): - self.objectdb.sync() - - def __str__(self): - return str(self.objectdb) - - -class TextualValidation(object): - - def __init__(self, to_pyobject): - self.to_pyobject = to_pyobject - - def is_value_valid(self, value): - # ???: Should none and unknown be considered valid? - if value is None or value[0] in ('none', 'unknown'): - return False - return self.to_pyobject(value) is not None - - def is_more_valid(self, new, old): - if old is None: - return True - return new[0] not in ('unknown', 'none') - - def is_file_valid(self, path): - return self.to_pyobject.path_to_resource(path) is not None - - def is_scope_valid(self, path, key): - if key == '': - textual = ('defined', path) - else: - textual = ('defined', path, key) - return self.to_pyobject(textual) is not None - - -class _FileListObserver(object): - - def __init__(self, object_info): - self.object_info = object_info - self.observer = self.object_info.observer - self.to_pyobject = self.object_info.to_pyobject - - def removed(self, path): - resource = self.to_pyobject.path_to_resource(path) - if resource is not None: - self.observer.remove_resource(resource) - - def added(self, path): - resource = self.to_pyobject.path_to_resource(path) - if resource is not None: - self.observer.add_resource(resource) diff --git a/pymode/libs3/rope/base/oi/runmod.py b/pymode/libs3/rope/base/oi/runmod.py deleted file mode 100644 index 45b33fbc..00000000 --- a/pymode/libs3/rope/base/oi/runmod.py +++ /dev/null @@ -1,215 +0,0 @@ - -def __rope_start_everything(): - import os - import sys - import socket - import pickle - import marshal - import inspect - import types - import threading - - class _MessageSender(object): - - def send_data(self, data): - pass - - class _SocketSender(_MessageSender): - - def __init__(self, port): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect(('127.0.0.1', port)) - self.my_file = s.makefile('wb') - - def send_data(self, data): - if not self.my_file.closed: - pickle.dump(data, self.my_file) - - def close(self): - self.my_file.close() - - class _FileSender(_MessageSender): - - def __init__(self, file_name): - self.my_file = open(file_name, 'wb') - - def send_data(self, data): - if not self.my_file.closed: - marshal.dump(data, self.my_file) - - def close(self): - self.my_file.close() - - - def _cached(func): - cache = {} - def newfunc(self, arg): - if arg in cache: - return cache[arg] - result = func(self, arg) - cache[arg] = result - return result - return newfunc - - class _FunctionCallDataSender(object): - - def __init__(self, send_info, project_root): - self.project_root = project_root - if send_info.isdigit(): - self.sender = _SocketSender(int(send_info)) - else: - self.sender = _FileSender(send_info) - - def global_trace(frame, event, arg): - # HACK: Ignoring out->in calls - # This might lose some information - if self._is_an_interesting_call(frame): - return self.on_function_call - sys.settrace(global_trace) - threading.settrace(global_trace) - - def on_function_call(self, frame, event, arg): - if event != 'return': - return - args = [] - returned = ('unknown',) - code = frame.f_code - for argname in code.co_varnames[:code.co_argcount]: - try: - args.append(self._object_to_persisted_form(frame.f_locals[argname])) - except (TypeError, AttributeError): - args.append(('unknown',)) - try: - returned = self._object_to_persisted_form(arg) - except (TypeError, AttributeError): - pass - try: - data = (self._object_to_persisted_form(frame.f_code), - tuple(args), returned) - self.sender.send_data(data) - except (TypeError): - pass - return self.on_function_call - - def _is_an_interesting_call(self, frame): - #if frame.f_code.co_name in ['?', '']: - # return False - #return not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code) - - if not self._is_code_inside_project(frame.f_code) and \ - (not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code)): - return False - return True - - def _is_code_inside_project(self, code): - source = self._path(code.co_filename) - return source is not None and os.path.exists(source) and \ - _realpath(source).startswith(self.project_root) - - @_cached - def _get_persisted_code(self, object_): - source = self._path(object_.co_filename) - if not os.path.exists(source): - raise TypeError('no source') - return ('defined', _realpath(source), str(object_.co_firstlineno)) - - @_cached - def _get_persisted_class(self, object_): - try: - return ('defined', _realpath(inspect.getsourcefile(object_)), - object_.__name__) - except (TypeError, AttributeError): - return ('unknown',) - - def _get_persisted_builtin(self, object_): - if isinstance(object_, str): - return ('builtin', 'str') - if isinstance(object_, list): - holding = None - if len(object_) > 0: - holding = object_[0] - return ('builtin', 'list', self._object_to_persisted_form(holding)) - if isinstance(object_, dict): - keys = None - values = None - if len(object_) > 0: - keys = list(object_.keys())[0] - values = object_[keys] - if values == object_ and len(object_) > 1: - keys = list(object_.keys())[1] - values = object_[keys] - return ('builtin', 'dict', - self._object_to_persisted_form(keys), - self._object_to_persisted_form(values)) - if isinstance(object_, tuple): - objects = [] - if len(object_) < 3: - for holding in object_: - objects.append(self._object_to_persisted_form(holding)) - else: - objects.append(self._object_to_persisted_form(object_[0])) - return tuple(['builtin', 'tuple'] + objects) - if isinstance(object_, set): - holding = None - if len(object_) > 0: - for o in object_: - holding = o - break - return ('builtin', 'set', self._object_to_persisted_form(holding)) - return ('unknown',) - - def _object_to_persisted_form(self, object_): - if object_ is None: - return ('none',) - if isinstance(object_, types.CodeType): - return self._get_persisted_code(object_) - if isinstance(object_, types.FunctionType): - return self._get_persisted_code(object_.__code__) - if isinstance(object_, types.MethodType): - return self._get_persisted_code(object_.__func__.__code__) - if isinstance(object_, types.ModuleType): - return self._get_persisted_module(object_) - if isinstance(object_, (str, list, dict, tuple, set)): - return self._get_persisted_builtin(object_) - if isinstance(object_, type): - return self._get_persisted_class(object_) - return ('instance', self._get_persisted_class(type(object_))) - - @_cached - def _get_persisted_module(self, object_): - path = self._path(object_.__file__) - if path and os.path.exists(path): - return ('defined', _realpath(path)) - return ('unknown',) - - def _path(self, path): - if path.endswith('.pyc'): - path = path[:-1] - if path.endswith('.py'): - return path - - def close(self): - self.sender.close() - sys.settrace(None) - - def _realpath(path): - return os.path.realpath(os.path.abspath(os.path.expanduser(path))) - - send_info = sys.argv[1] - project_root = sys.argv[2] - file_to_run = sys.argv[3] - run_globals = globals() - run_globals.update({'__name__': '__main__', - 'builtins': __builtins__, - '__file__': file_to_run}) - if send_info != '-': - data_sender = _FunctionCallDataSender(send_info, project_root) - del sys.argv[1:4] - with open(file_to_run) as file: - exec(compile(file.read(), file_to_run, 'exec'), run_globals) - if send_info != '-': - data_sender.close() - - -if __name__ == '__main__': - __rope_start_everything() diff --git a/pymode/libs3/rope/base/oi/soa.py b/pymode/libs3/rope/base/oi/soa.py deleted file mode 100644 index 38cd5c9d..00000000 --- a/pymode/libs3/rope/base/oi/soa.py +++ /dev/null @@ -1,136 +0,0 @@ -import rope.base.ast -import rope.base.oi.soi -import rope.base.pynames -from rope.base import pyobjects, evaluate, astutils, arguments - - -def analyze_module(pycore, pymodule, should_analyze, - search_subscopes, followed_calls): - """Analyze `pymodule` for static object inference - - Analyzes scopes for collecting object information. The analysis - starts from inner scopes. - - """ - _analyze_node(pycore, pymodule, should_analyze, - search_subscopes, followed_calls) - - -def _analyze_node(pycore, pydefined, should_analyze, - search_subscopes, followed_calls): - if search_subscopes(pydefined): - for scope in pydefined.get_scope().get_scopes(): - _analyze_node(pycore, scope.pyobject, should_analyze, - search_subscopes, followed_calls) - if should_analyze(pydefined): - new_followed_calls = max(0, followed_calls - 1) - return_true = lambda pydefined: True - return_false = lambda pydefined: False - def _follow(pyfunction): - _analyze_node(pycore, pyfunction, return_true, - return_false, new_followed_calls) - if not followed_calls: - _follow = None - visitor = SOAVisitor(pycore, pydefined, _follow) - for child in rope.base.ast.get_child_nodes(pydefined.get_ast()): - rope.base.ast.walk(child, visitor) - - -class SOAVisitor(object): - - def __init__(self, pycore, pydefined, follow_callback=None): - self.pycore = pycore - self.pymodule = pydefined.get_module() - self.scope = pydefined.get_scope() - self.follow = follow_callback - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - def _Call(self, node): - for child in rope.base.ast.get_child_nodes(node): - rope.base.ast.walk(child, self) - primary, pyname = evaluate.eval_node2(self.scope, node.func) - if pyname is None: - return - pyfunction = pyname.get_object() - if isinstance(pyfunction, pyobjects.AbstractFunction): - args = arguments.create_arguments(primary, pyfunction, - node, self.scope) - elif isinstance(pyfunction, pyobjects.PyClass): - pyclass = pyfunction - if '__init__' in pyfunction: - pyfunction = pyfunction['__init__'].get_object() - pyname = rope.base.pynames.UnboundName(pyobjects.PyObject(pyclass)) - args = self._args_with_self(primary, pyname, pyfunction, node) - elif '__call__' in pyfunction: - pyfunction = pyfunction['__call__'].get_object() - args = self._args_with_self(primary, pyname, pyfunction, node) - else: - return - self._call(pyfunction, args) - - def _args_with_self(self, primary, self_pyname, pyfunction, node): - base_args = arguments.create_arguments(primary, pyfunction, - node, self.scope) - return arguments.MixedArguments(self_pyname, base_args, self.scope) - - def _call(self, pyfunction, args): - if isinstance(pyfunction, pyobjects.PyFunction): - if self.follow is not None: - before = self._parameter_objects(pyfunction) - self.pycore.object_info.function_called( - pyfunction, args.get_arguments(pyfunction.get_param_names())) - pyfunction._set_parameter_pyobjects(None) - if self.follow is not None: - after = self._parameter_objects(pyfunction) - if after != before: - self.follow(pyfunction) - # XXX: Maybe we should not call every builtin function - if isinstance(pyfunction, rope.base.builtins.BuiltinFunction): - pyfunction.get_returned_object(args) - - def _parameter_objects(self, pyfunction): - result = [] - for i in range(len(pyfunction.get_param_names(False))): - result.append(pyfunction.get_parameter(i)) - return result - - def _Assign(self, node): - for child in rope.base.ast.get_child_nodes(node): - rope.base.ast.walk(child, self) - visitor = _SOAAssignVisitor() - nodes = [] - for child in node.targets: - rope.base.ast.walk(child, visitor) - nodes.extend(visitor.nodes) - for subscript, levels in nodes: - instance = evaluate.eval_node(self.scope, subscript.value) - args_pynames = [] - args_pynames.append(evaluate.eval_node(self.scope, - subscript.slice.value)) - value = rope.base.oi.soi._infer_assignment( - rope.base.pynames.AssignmentValue(node.value, levels), self.pymodule) - args_pynames.append(rope.base.pynames.UnboundName(value)) - if instance is not None and value is not None: - pyobject = instance.get_object() - if '__setitem__' in pyobject: - pyfunction = pyobject['__setitem__'].get_object() - args = arguments.ObjectArguments([instance] + args_pynames) - self._call(pyfunction, args) - # IDEA: handle `__setslice__`, too - - -class _SOAAssignVisitor(astutils._NodeNameCollector): - - def __init__(self): - super(_SOAAssignVisitor, self).__init__() - self.nodes = [] - - def _added(self, node, levels): - if isinstance(node, rope.base.ast.Subscript) and \ - isinstance(node.slice, rope.base.ast.Index): - self.nodes.append((node, levels)) diff --git a/pymode/libs3/rope/base/oi/soi.py b/pymode/libs3/rope/base/oi/soi.py deleted file mode 100644 index bf40af90..00000000 --- a/pymode/libs3/rope/base/oi/soi.py +++ /dev/null @@ -1,186 +0,0 @@ -"""A module for inferring objects - -For more information see the documentation in `rope.base.oi` -package. - -""" -import rope.base.builtins -import rope.base.pynames -import rope.base.pyobjects -from rope.base import evaluate, utils, arguments - - -_ignore_inferred = utils.ignore_exception( - rope.base.pyobjects.IsBeingInferredError) - - -@_ignore_inferred -def infer_returned_object(pyfunction, args): - """Infer the `PyObject` this `PyFunction` returns after calling""" - object_info = pyfunction.pycore.object_info - result = object_info.get_exact_returned(pyfunction, args) - if result is not None: - return result - result = _infer_returned(pyfunction, args) - if result is not None: - if args and pyfunction.get_module().get_resource() is not None: - params = args.get_arguments( - pyfunction.get_param_names(special_args=False)) - object_info.function_called(pyfunction, params, result) - return result - return object_info.get_returned(pyfunction, args) - -@_ignore_inferred -def infer_parameter_objects(pyfunction): - """Infer the `PyObject`\s of parameters of this `PyFunction`""" - object_info = pyfunction.pycore.object_info - result = object_info.get_parameter_objects(pyfunction) - if result is None: - result = _parameter_objects(pyfunction) - _handle_first_parameter(pyfunction, result) - return result - -def _handle_first_parameter(pyobject, parameters): - kind = pyobject.get_kind() - if parameters is None or kind not in ['method', 'classmethod']: - pass - if not parameters: - if not pyobject.get_param_names(special_args=False): - return - parameters.append(rope.base.pyobjects.get_unknown()) - if kind == 'method': - parameters[0] = rope.base.pyobjects.PyObject(pyobject.parent) - if kind == 'classmethod': - parameters[0] = pyobject.parent - -@_ignore_inferred -def infer_assigned_object(pyname): - if not pyname.assignments: - return - for assignment in reversed(pyname.assignments): - result = _infer_assignment(assignment, pyname.module) - if result is not None: - return result - -def get_passed_objects(pyfunction, parameter_index): - object_info = pyfunction.pycore.object_info - result = object_info.get_passed_objects(pyfunction, - parameter_index) - if not result: - statically_inferred = _parameter_objects(pyfunction) - if len(statically_inferred) > parameter_index: - result.append(statically_inferred[parameter_index]) - return result - -def _infer_returned(pyobject, args): - if args: - # HACK: Setting parameter objects manually - # This is not thread safe and might cause problems if `args` - # does not come from a good call site - pyobject.get_scope().invalidate_data() - pyobject._set_parameter_pyobjects( - args.get_arguments(pyobject.get_param_names(special_args=False))) - scope = pyobject.get_scope() - if not scope._get_returned_asts(): - return - maxtries = 3 - for returned_node in reversed(scope._get_returned_asts()[-maxtries:]): - try: - resulting_pyname = evaluate.eval_node(scope, returned_node) - if resulting_pyname is None: - continue - pyobject = resulting_pyname.get_object() - if pyobject == rope.base.pyobjects.get_unknown(): - continue - if not scope._is_generator(): - return pyobject - else: - return rope.base.builtins.get_generator(pyobject) - except rope.base.pyobjects.IsBeingInferredError: - pass - -def _parameter_objects(pyobject): - params = pyobject.get_param_names(special_args=False) - return [rope.base.pyobjects.get_unknown()] * len(params) - -# handling `rope.base.pynames.AssignmentValue` - -@_ignore_inferred -def _infer_assignment(assignment, pymodule): - result = _follow_pyname(assignment, pymodule) - if result is None: - return None - pyname, pyobject = result - pyobject = _follow_evaluations(assignment, pyname, pyobject) - if pyobject is None: - return None - return _follow_levels(assignment, pyobject) - -def _follow_levels(assignment, pyobject): - for index in assignment.levels: - if isinstance(pyobject.get_type(), rope.base.builtins.Tuple): - holdings = pyobject.get_type().get_holding_objects() - if holdings: - pyobject = holdings[min(len(holdings) - 1, index)] - else: - pyobject = None - elif isinstance(pyobject.get_type(), rope.base.builtins.List): - pyobject = pyobject.get_type().holding - else: - pyobject = None - if pyobject is None: - break - return pyobject - -@_ignore_inferred -def _follow_pyname(assignment, pymodule, lineno=None): - assign_node = assignment.ast_node - if lineno is None: - lineno = _get_lineno_for_node(assign_node) - holding_scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - pyname = evaluate.eval_node(holding_scope, assign_node) - if pyname is not None: - result = pyname.get_object() - if isinstance(result.get_type(), rope.base.builtins.Property) and \ - holding_scope.get_kind() == 'Class': - arg = rope.base.pynames.UnboundName( - rope.base.pyobjects.PyObject(holding_scope.pyobject)) - return pyname, result.get_type().get_property_object( - arguments.ObjectArguments([arg])) - return pyname, result - -@_ignore_inferred -def _follow_evaluations(assignment, pyname, pyobject): - new_pyname = pyname - tokens = assignment.evaluation.split('.') - for token in tokens: - call = token.endswith('()') - if call: - token = token[:-2] - if token: - pyname = new_pyname - new_pyname = _get_attribute(pyobject, token) - if new_pyname is not None: - pyobject = new_pyname.get_object() - if pyobject is not None and call: - if isinstance(pyobject, rope.base.pyobjects.AbstractFunction): - args = arguments.ObjectArguments([pyname]) - pyobject = pyobject.get_returned_object(args) - else: - pyobject = None - if pyobject is None: - break - if pyobject is not None and assignment.assign_type: - return rope.base.pyobjects.PyObject(pyobject) - return pyobject - - -def _get_lineno_for_node(assign_node): - if hasattr(assign_node, 'lineno') and \ - assign_node.lineno is not None: - return assign_node.lineno - return 1 - -def _get_attribute(pyobject, name): - if pyobject is not None and name in pyobject: - return pyobject[name] diff --git a/pymode/libs3/rope/base/oi/transform.py b/pymode/libs3/rope/base/oi/transform.py deleted file mode 100644 index 5a9d600e..00000000 --- a/pymode/libs3/rope/base/oi/transform.py +++ /dev/null @@ -1,285 +0,0 @@ -"""Provides classes for persisting `PyObject`\s""" -import os -import re - -import rope.base.builtins -from rope.base import exceptions - - -class PyObjectToTextual(object): - """For transforming `PyObject` to textual form - - This can be used for storing `PyObjects` in files. Use - `TextualToPyObject` for converting back. - - """ - - def __init__(self, project): - self.project = project - - def transform(self, pyobject): - """Transform a `PyObject` to textual form""" - if pyobject is None: - return ('none',) - object_type = type(pyobject) - try: - method = getattr(self, object_type.__name__ + '_to_textual') - return method(pyobject) - except AttributeError: - return ('unknown',) - - def __call__(self, pyobject): - return self.transform(pyobject) - - def PyObject_to_textual(self, pyobject): - if isinstance(pyobject.get_type(), rope.base.pyobjects.AbstractClass): - result = self.transform(pyobject.get_type()) - if result[0] == 'defined': - return ('instance', result) - return result - return ('unknown',) - - def PyFunction_to_textual(self, pyobject): - return self._defined_to_textual(pyobject) - - def PyClass_to_textual(self, pyobject): - return self._defined_to_textual(pyobject) - - def _defined_to_textual(self, pyobject): - address = [] - while pyobject.parent is not None: - address.insert(0, pyobject.get_name()) - pyobject = pyobject.parent - return ('defined', self._get_pymodule_path(pyobject.get_module()), - '.'.join(address)) - - def PyModule_to_textual(self, pyobject): - return ('defined', self._get_pymodule_path(pyobject)) - - def PyPackage_to_textual(self, pyobject): - return ('defined', self._get_pymodule_path(pyobject)) - - def List_to_textual(self, pyobject): - return ('builtin', 'list', self.transform(pyobject.holding)) - - def Dict_to_textual(self, pyobject): - return ('builtin', 'dict', self.transform(pyobject.keys), - self.transform(pyobject.values)) - - def Tuple_to_textual(self, pyobject): - objects = [self.transform(holding) - for holding in pyobject.get_holding_objects()] - return tuple(['builtin', 'tuple'] + objects) - - def Set_to_textual(self, pyobject): - return ('builtin', 'set', self.transform(pyobject.holding)) - - def Iterator_to_textual(self, pyobject): - return ('builtin', 'iter', self.transform(pyobject.holding)) - - def Generator_to_textual(self, pyobject): - return ('builtin', 'generator', self.transform(pyobject.holding)) - - def Str_to_textual(self, pyobject): - return ('builtin', 'str') - - def File_to_textual(self, pyobject): - return ('builtin', 'file') - - def BuiltinFunction_to_textual(self, pyobject): - return ('builtin', 'function', pyobject.get_name()) - - def _get_pymodule_path(self, pymodule): - return self.resource_to_path(pymodule.get_resource()) - - def resource_to_path(self, resource): - if resource.project == self.project: - return resource.path - else: - return resource.real_path - - -class TextualToPyObject(object): - """For transforming textual form to `PyObject`""" - - def __init__(self, project, allow_in_project_absolutes=False): - self.project = project - - def __call__(self, textual): - return self.transform(textual) - - def transform(self, textual): - """Transform an object from textual form to `PyObject`""" - if textual is None: - return None - type = textual[0] - try: - method = getattr(self, type + '_to_pyobject') - return method(textual) - except AttributeError: - return None - - def builtin_to_pyobject(self, textual): - name = textual[1] - method = getattr(self, 'builtin_%s_to_pyobject' % textual[1], None) - if method is not None: - return method(textual) - - def builtin_str_to_pyobject(self, textual): - return rope.base.builtins.get_str() - - def builtin_list_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_list(holding) - - def builtin_dict_to_pyobject(self, textual): - keys = self.transform(textual[2]) - values = self.transform(textual[3]) - return rope.base.builtins.get_dict(keys, values) - - def builtin_tuple_to_pyobject(self, textual): - objects = [] - for holding in textual[2:]: - objects.append(self.transform(holding)) - return rope.base.builtins.get_tuple(*objects) - - def builtin_set_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_set(holding) - - def builtin_iter_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_iterator(holding) - - def builtin_generator_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_generator(holding) - - def builtin_file_to_pyobject(self, textual): - return rope.base.builtins.get_file() - - def builtin_function_to_pyobject(self, textual): - if textual[2] in rope.base.builtins.builtins: - return rope.base.builtins.builtins[textual[2]].get_object() - - def unknown_to_pyobject(self, textual): - return None - - def none_to_pyobject(self, textual): - return None - - def _module_to_pyobject(self, textual): - path = textual[1] - return self._get_pymodule(path) - - def _hierarchical_defined_to_pyobject(self, textual): - path = textual[1] - names = textual[2].split('.') - pymodule = self._get_pymodule(path) - pyobject = pymodule - for name in names: - if pyobject is None: - return None - if isinstance(pyobject, rope.base.pyobjects.PyDefinedObject): - try: - pyobject = pyobject.get_scope()[name].get_object() - except exceptions.NameNotFoundError: - return None - else: - return None - return pyobject - - def defined_to_pyobject(self, textual): - if len(textual) == 2 or textual[2] == '': - return self._module_to_pyobject(textual) - else: - return self._hierarchical_defined_to_pyobject(textual) - - def instance_to_pyobject(self, textual): - type = self.transform(textual[1]) - if type is not None: - return rope.base.pyobjects.PyObject(type) - - def _get_pymodule(self, path): - resource = self.path_to_resource(path) - if resource is not None: - return self.project.pycore.resource_to_pyobject(resource) - - def path_to_resource(self, path): - try: - root = self.project.address - if not os.path.isabs(path): - return self.project.get_resource(path) - if path == root or path.startswith(root + os.sep): - # INFO: This is a project file; should not be absolute - return None - import rope.base.project - return rope.base.project.get_no_project().get_resource(path) - except exceptions.ResourceNotFoundError: - return None - - -class DOITextualToPyObject(TextualToPyObject): - """For transforming textual form to `PyObject` - - The textual form DOI uses is different from rope's standard - textual form. The reason is that we cannot find the needed - information by analyzing live objects. This class can be - used to transform DOI textual form to `PyObject` and later - we can convert it to standard textual form using - `TextualToPyObject` class. - - """ - - def _function_to_pyobject(self, textual): - path = textual[1] - lineno = int(textual[2]) - pymodule = self._get_pymodule(path) - if pymodule is not None: - scope = pymodule.get_scope() - inner_scope = scope.get_inner_scope_for_line(lineno) - return inner_scope.pyobject - - def _class_to_pyobject(self, textual): - path, name = textual[1:] - pymodule = self._get_pymodule(path) - if pymodule is None: - return None - module_scope = pymodule.get_scope() - suspected = None - if name in module_scope.get_names(): - suspected = module_scope[name].get_object() - if suspected is not None and \ - isinstance(suspected, rope.base.pyobjects.PyClass): - return suspected - else: - lineno = self._find_occurrence(name, pymodule.get_resource().read()) - if lineno is not None: - inner_scope = module_scope.get_inner_scope_for_line(lineno) - return inner_scope.pyobject - - def defined_to_pyobject(self, textual): - if len(textual) == 2: - return self._module_to_pyobject(textual) - else: - if textual[2].isdigit(): - result = self._function_to_pyobject(textual) - else: - result = self._class_to_pyobject(textual) - if not isinstance(result, rope.base.pyobjects.PyModule): - return result - - def _find_occurrence(self, name, source): - pattern = re.compile(r'^\s*class\s*' + name + r'\b') - lines = source.split('\n') - for i in range(len(lines)): - if pattern.match(lines[i]): - return i + 1 - - def path_to_resource(self, path): - import rope.base.libutils - root = self.project.address - relpath = rope.base.libutils.relative(root, path) - if relpath is not None: - path = relpath - return super(DOITextualToPyObject, self).path_to_resource(path) diff --git a/pymode/libs3/rope/base/prefs.py b/pymode/libs3/rope/base/prefs.py deleted file mode 100644 index 674a58ec..00000000 --- a/pymode/libs3/rope/base/prefs.py +++ /dev/null @@ -1,41 +0,0 @@ -class Prefs(object): - - def __init__(self): - self.prefs = {} - self.callbacks = {} - - def set(self, key, value): - """Set the value of `key` preference to `value`.""" - if key in self.callbacks: - self.callbacks[key](value) - else: - self.prefs[key] = value - - def add(self, key, value): - """Add an entry to a list preference - - Add `value` to the list of entries for the `key` preference. - - """ - if not key in self.prefs: - self.prefs[key] = [] - self.prefs[key].append(value) - - def get(self, key, default=None): - """Get the value of the key preference""" - return self.prefs.get(key, default) - - def add_callback(self, key, callback): - """Add `key` preference with `callback` function - - Whenever `key` is set the callback is called with the - given `value` as parameter. - - """ - self.callbacks[key] = callback - - def __setitem__(self, key, value): - self.set(key, value) - - def __getitem__(self, key): - return self.get(key) diff --git a/pymode/libs3/rope/base/project.py b/pymode/libs3/rope/base/project.py deleted file mode 100644 index 0c9952ba..00000000 --- a/pymode/libs3/rope/base/project.py +++ /dev/null @@ -1,375 +0,0 @@ -import pickle -import os -import shutil -import sys -import warnings - -import rope.base.fscommands -from rope.base import exceptions, taskhandle, prefs, history, pycore, utils -from rope.base.resourceobserver import * -from rope.base.resources import File, Folder, _ResourceMatcher - - -class _Project(object): - - def __init__(self, fscommands): - self.observers = [] - self.fscommands = fscommands - self.prefs = prefs.Prefs() - self.data_files = _DataFiles(self) - - def get_resource(self, resource_name): - """Get a resource in a project. - - `resource_name` is the path of a resource in a project. It is - the path of a resource relative to project root. Project root - folder address is an empty string. If the resource does not - exist a `exceptions.ResourceNotFound` exception would be - raised. Use `get_file()` and `get_folder()` when you need to - get nonexistent `Resource`\s. - - """ - path = self._get_resource_path(resource_name) - if not os.path.exists(path): - raise exceptions.ResourceNotFoundError( - 'Resource <%s> does not exist' % resource_name) - elif os.path.isfile(path): - return File(self, resource_name) - elif os.path.isdir(path): - return Folder(self, resource_name) - else: - raise exceptions.ResourceNotFoundError('Unknown resource ' - + resource_name) - - def validate(self, folder): - """Validate files and folders contained in this folder - - It validates all of the files and folders contained in this - folder if some observers are interested in them. - - """ - for observer in list(self.observers): - observer.validate(folder) - - def add_observer(self, observer): - """Register a `ResourceObserver` - - See `FilteredResourceObserver`. - """ - self.observers.append(observer) - - def remove_observer(self, observer): - """Remove a registered `ResourceObserver`""" - if observer in self.observers: - self.observers.remove(observer) - - def do(self, changes, task_handle=taskhandle.NullTaskHandle()): - """Apply the changes in a `ChangeSet` - - Most of the time you call this function for committing the - changes for a refactoring. - """ - self.history.do(changes, task_handle=task_handle) - - def get_pycore(self): - return self.pycore - - def get_file(self, path): - """Get the file with `path` (it may not exist)""" - return File(self, path) - - def get_folder(self, path): - """Get the folder with `path` (it may not exist)""" - return Folder(self, path) - - def is_ignored(self, resource): - return False - - def get_prefs(self): - return self.prefs - - def _get_resource_path(self, name): - pass - - @property - @utils.saveit - def history(self): - return history.History(self) - - @property - @utils.saveit - def pycore(self): - return pycore.PyCore(self) - - def close(self): - warnings.warn('Cannot close a NoProject', - DeprecationWarning, stacklevel=2) - - ropefolder = None - - -class Project(_Project): - """A Project containing files and folders""" - - def __init__(self, projectroot, fscommands=None, - ropefolder='.ropeproject', **prefs): - """A rope project - - :parameters: - - `projectroot`: The address of the root folder of the project - - `fscommands`: Implements the file system operations used - by rope; have a look at `rope.base.fscommands` - - `ropefolder`: The name of the folder in which rope stores - project configurations and data. Pass `None` for not using - such a folder at all. - - `prefs`: Specify project preferences. These values - overwrite config file preferences. - - """ - if projectroot != '/': - projectroot = _realpath(projectroot).rstrip('/\\') - self._address = projectroot - self._ropefolder_name = ropefolder - if not os.path.exists(self._address): - os.mkdir(self._address) - elif not os.path.isdir(self._address): - raise exceptions.RopeError('Project root exists and' - ' is not a directory') - if fscommands is None: - fscommands = rope.base.fscommands.create_fscommands(self._address) - super(Project, self).__init__(fscommands) - self.ignored = _ResourceMatcher() - self.file_list = _FileListCacher(self) - self.prefs.add_callback('ignored_resources', self.ignored.set_patterns) - if ropefolder is not None: - self.prefs['ignored_resources'] = [ropefolder] - self._init_prefs(prefs) - - def get_files(self): - return self.file_list.get_files() - - def _get_resource_path(self, name): - return os.path.join(self._address, *name.split('/')) - - def _init_ropefolder(self): - if self.ropefolder is not None: - if not self.ropefolder.exists(): - self._create_recursively(self.ropefolder) - if not self.ropefolder.has_child('config.py'): - config = self.ropefolder.create_file('config.py') - config.write(self._default_config()) - - def _create_recursively(self, folder): - if folder.parent != self.root and not folder.parent.exists(): - self._create_recursively(folder.parent) - folder.create() - - def _init_prefs(self, prefs): - run_globals = {} - if self.ropefolder is not None: - config = self.get_file(self.ropefolder.path + '/config.py') - run_globals.update({'__name__': '__main__', - '__builtins__': __builtins__, - '__file__': config.real_path}) - if config.exists(): - config = self.ropefolder.get_child('config.py') - exec(config.read(), run_globals) - else: - exec(self._default_config(), run_globals) - if 'set_prefs' in run_globals: - run_globals['set_prefs'](self.prefs) - for key, value in prefs.items(): - self.prefs[key] = value - self._init_other_parts() - self._init_ropefolder() - if 'project_opened' in run_globals: - run_globals['project_opened'](self) - - def _default_config(self): - import rope.base.default_config - import inspect - return inspect.getsource(rope.base.default_config) - - def _init_other_parts(self): - # Forcing the creation of `self.pycore` to register observers - self.pycore - - def is_ignored(self, resource): - return self.ignored.does_match(resource) - - def sync(self): - """Closes project open resources""" - self.close() - - def close(self): - """Closes project open resources""" - self.data_files.write() - - def set(self, key, value): - """Set the `key` preference to `value`""" - self.prefs.set(key, value) - - @property - def ropefolder(self): - if self._ropefolder_name is not None: - return self.get_folder(self._ropefolder_name) - - def validate(self, folder=None): - if folder is None: - folder = self.root - super(Project, self).validate(folder) - - root = property(lambda self: self.get_resource('')) - address = property(lambda self: self._address) - - -class NoProject(_Project): - """A null object for holding out of project files. - - This class is singleton use `get_no_project` global function - """ - - def __init__(self): - fscommands = rope.base.fscommands.FileSystemCommands() - super(NoProject, self).__init__(fscommands) - - def _get_resource_path(self, name): - real_name = name.replace('/', os.path.sep) - return _realpath(real_name) - - def get_resource(self, name): - universal_name = _realpath(name).replace(os.path.sep, '/') - return super(NoProject, self).get_resource(universal_name) - - def get_files(self): - return [] - - _no_project = None - - -def get_no_project(): - if NoProject._no_project is None: - NoProject._no_project = NoProject() - return NoProject._no_project - - -class _FileListCacher(object): - - def __init__(self, project): - self.project = project - self.files = None - rawobserver = ResourceObserver( - self._changed, self._invalid, self._invalid, - self._invalid, self._invalid) - self.project.add_observer(rawobserver) - - def get_files(self): - if self.files is None: - self.files = set() - self._add_files(self.project.root) - return self.files - - def _add_files(self, folder): - for child in folder.get_children(): - if child.is_folder(): - self._add_files(child) - elif not self.project.is_ignored(child): - self.files.add(child) - - def _changed(self, resource): - if resource.is_folder(): - self.files = None - - def _invalid(self, resource, new_resource=None): - self.files = None - - -class _DataFiles(object): - - def __init__(self, project): - self.project = project - self.hooks = [] - - def read_data(self, name, compress=False, import_=False): - if self.project.ropefolder is None: - return None - compress = compress and self._can_compress() - opener = self._get_opener(compress) - file = self._get_file(name, compress) - if not compress and import_: - self._import_old_files(name) - if file.exists(): - input = opener(file.real_path, 'rb') - try: - result = [] - try: - while True: - result.append(pickle.load(input)) - except EOFError: - pass - if len(result) == 1: - return result[0] - if len(result) > 1: - return result - finally: - input.close() - - def write_data(self, name, data, compress=False): - if self.project.ropefolder is not None: - compress = compress and self._can_compress() - file = self._get_file(name, compress) - opener = self._get_opener(compress) - output = opener(file.real_path, 'wb') - try: - pickle.dump(data, output, 2) - finally: - output.close() - - def add_write_hook(self, hook): - self.hooks.append(hook) - - def write(self): - for hook in self.hooks: - hook() - - def _can_compress(self): - try: - import gzip - return True - except ImportError: - return False - - def _import_old_files(self, name): - old = self._get_file(name + '.pickle', False) - new = self._get_file(name, False) - if old.exists() and not new.exists(): - shutil.move(old.real_path, new.real_path) - - def _get_opener(self, compress): - if compress: - try: - import gzip - return gzip.open - except ImportError: - pass - return open - - def _get_file(self, name, compress): - path = self.project.ropefolder.path + '/' + name - if compress: - path += '.gz' - return self.project.get_file(path) - - -def _realpath(path): - """Return the real path of `path` - - Is equivalent to ``realpath(abspath(expanduser(path)))``. - - """ - # there is a bug in cygwin for os.path.abspath() for abs paths - if sys.platform == 'cygwin': - if path[1:3] == ':\\': - return path - return os.path.abspath(os.path.expanduser(path)) - return os.path.realpath(os.path.abspath(os.path.expanduser(path))) diff --git a/pymode/libs3/rope/base/pycore.py b/pymode/libs3/rope/base/pycore.py deleted file mode 100644 index 700fcde6..00000000 --- a/pymode/libs3/rope/base/pycore.py +++ /dev/null @@ -1,410 +0,0 @@ -import bisect -import difflib -import sys -import warnings - -import rope.base.oi.doa -import rope.base.oi.objectinfo -import rope.base.oi.soa -from rope.base import ast, exceptions, taskhandle, utils, stdmods -from rope.base.exceptions import ModuleNotFoundError -from rope.base.pyobjectsdef import PyModule, PyPackage, PyClass -import rope.base.resources -import rope.base.resourceobserver -from rope.base import builtins - - -class PyCore(object): - - def __init__(self, project): - self.project = project - self._init_resource_observer() - self.cache_observers = [] - self.module_cache = _ModuleCache(self) - self.extension_cache = _ExtensionCache(self) - self.object_info = rope.base.oi.objectinfo.ObjectInfoManager(project) - self._init_python_files() - self._init_automatic_soa() - self._init_source_folders() - - def _init_python_files(self): - self.python_matcher = None - patterns = self.project.prefs.get('python_files', None) - if patterns is not None: - self.python_matcher = rope.base.resources._ResourceMatcher() - self.python_matcher.set_patterns(patterns) - - def _init_resource_observer(self): - callback = self._invalidate_resource_cache - observer = rope.base.resourceobserver.ResourceObserver( - changed=callback, moved=callback, removed=callback) - self.observer = rope.base.resourceobserver.FilteredResourceObserver(observer) - self.project.add_observer(self.observer) - - def _init_source_folders(self): - self._custom_source_folders = [] - for path in self.project.prefs.get('source_folders', []): - folder = self.project.get_resource(path) - self._custom_source_folders.append(folder) - - def _init_automatic_soa(self): - if not self.automatic_soa: - return - callback = self._file_changed_for_soa - observer = rope.base.resourceobserver.ResourceObserver( - changed=callback, moved=callback, removed=callback) - self.project.add_observer(observer) - - @property - def automatic_soa(self): - auto_soa = self.project.prefs.get('automatic_soi', None) - return self.project.prefs.get('automatic_soa', auto_soa) - - def _file_changed_for_soa(self, resource, new_resource=None): - old_contents = self.project.history.\ - contents_before_current_change(resource) - if old_contents is not None: - perform_soa_on_changed_scopes(self.project, resource, old_contents) - - def is_python_file(self, resource): - if resource.is_folder(): - return False - if self.python_matcher is None: - return resource.name.endswith('.py') - return self.python_matcher.does_match(resource) - - def get_module(self, name, folder=None): - """Returns a `PyObject` if the module was found.""" - # check if this is a builtin module - pymod = self._builtin_module(name) - if pymod is not None: - return pymod - module = self.find_module(name, folder) - if module is None: - raise ModuleNotFoundError('Module %s not found' % name) - return self.resource_to_pyobject(module) - - def _builtin_submodules(self, modname): - result = {} - for extension in self.extension_modules: - if extension.startswith(modname + '.'): - name = extension[len(modname) + 1:] - if '.' not in name: - result[name] = self._builtin_module(extension) - return result - - def _builtin_module(self, name): - return self.extension_cache.get_pymodule(name) - - def get_relative_module(self, name, folder, level): - module = self.find_relative_module(name, folder, level) - if module is None: - raise ModuleNotFoundError('Module %s not found' % name) - return self.resource_to_pyobject(module) - - def get_string_module(self, code, resource=None, force_errors=False): - """Returns a `PyObject` object for the given code - - If `force_errors` is `True`, `exceptions.ModuleSyntaxError` is - raised if module has syntax errors. This overrides - ``ignore_syntax_errors`` project config. - - """ - return PyModule(self, code, resource, force_errors=force_errors) - - def get_string_scope(self, code, resource=None): - """Returns a `Scope` object for the given code""" - return self.get_string_module(code, resource).get_scope() - - def _invalidate_resource_cache(self, resource, new_resource=None): - for observer in self.cache_observers: - observer(resource) - - def _find_module_in_folder(self, folder, modname): - module = folder - packages = modname.split('.') - for pkg in packages[:-1]: - if module.is_folder() and module.has_child(pkg): - module = module.get_child(pkg) - else: - return None - if module.is_folder(): - if module.has_child(packages[-1]) and \ - module.get_child(packages[-1]).is_folder(): - return module.get_child(packages[-1]) - elif module.has_child(packages[-1] + '.py') and \ - not module.get_child(packages[-1] + '.py').is_folder(): - return module.get_child(packages[-1] + '.py') - - def get_python_path_folders(self): - import rope.base.project - result = [] - for src in self.project.prefs.get('python_path', []) + sys.path: - try: - src_folder = rope.base.project.get_no_project().get_resource(src) - result.append(src_folder) - except rope.base.exceptions.ResourceNotFoundError: - pass - return result - - def find_module(self, modname, folder=None): - """Returns a resource corresponding to the given module - - returns None if it can not be found - """ - return self._find_module(modname, folder) - - def find_relative_module(self, modname, folder, level): - for i in range(level - 1): - folder = folder.parent - if modname == '': - return folder - else: - return self._find_module_in_folder(folder, modname) - - def _find_module(self, modname, folder=None): - """Return `modname` module resource""" - for src in self.get_source_folders(): - module = self._find_module_in_folder(src, modname) - if module is not None: - return module - for src in self.get_python_path_folders(): - module = self._find_module_in_folder(src, modname) - if module is not None: - return module - if folder is not None: - module = self._find_module_in_folder(folder, modname) - if module is not None: - return module - return None - - # INFO: It was decided not to cache source folders, since: - # - Does not take much time when the root folder contains - # packages, that is most of the time - # - We need a separate resource observer; `self.observer` - # does not get notified about module and folder creations - def get_source_folders(self): - """Returns project source folders""" - if self.project.root is None: - return [] - result = list(self._custom_source_folders) - result.extend(self._find_source_folders(self.project.root)) - return result - - def resource_to_pyobject(self, resource, force_errors=False): - return self.module_cache.get_pymodule(resource, force_errors) - - def get_python_files(self): - """Returns all python files available in the project""" - return [resource for resource in self.project.get_files() - if self.is_python_file(resource)] - - def _is_package(self, folder): - if folder.has_child('__init__.py') and \ - not folder.get_child('__init__.py').is_folder(): - return True - else: - return False - - def _find_source_folders(self, folder): - for resource in folder.get_folders(): - if self._is_package(resource): - return [folder] - result = [] - for resource in folder.get_files(): - if resource.name.endswith('.py'): - result.append(folder) - break - for resource in folder.get_folders(): - result.extend(self._find_source_folders(resource)) - return result - - def run_module(self, resource, args=None, stdin=None, stdout=None): - """Run `resource` module - - Returns a `rope.base.oi.doa.PythonFileRunner` object for - controlling the process. - - """ - perform_doa = self.project.prefs.get('perform_doi', True) - perform_doa = self.project.prefs.get('perform_doa', perform_doa) - receiver = self.object_info.doa_data_received - if not perform_doa: - receiver = None - runner = rope.base.oi.doa.PythonFileRunner( - self, resource, args, stdin, stdout, receiver) - runner.add_finishing_observer(self.module_cache.forget_all_data) - runner.run() - return runner - - def analyze_module(self, resource, should_analyze=lambda py: True, - search_subscopes=lambda py: True, followed_calls=None): - """Analyze `resource` module for static object inference - - This function forces rope to analyze this module to collect - information about function calls. `should_analyze` is a - function that is called with a `PyDefinedObject` argument. If - it returns `True` the element is analyzed. If it is `None` or - returns `False` the element is not analyzed. - - `search_subscopes` is like `should_analyze`; The difference is - that if it returns `False` the sub-scopes are all ignored. - That is it is assumed that `should_analyze` returns `False` - for all of its subscopes. - - `followed_calls` override the value of ``soa_followed_calls`` - project config. - """ - if followed_calls is None: - followed_calls = self.project.prefs.get('soa_followed_calls', 0) - pymodule = self.resource_to_pyobject(resource) - self.module_cache.forget_all_data() - rope.base.oi.soa.analyze_module( - self, pymodule, should_analyze, search_subscopes, followed_calls) - - def get_classes(self, task_handle=taskhandle.NullTaskHandle()): - warnings.warn('`PyCore.get_classes()` is deprecated', - DeprecationWarning, stacklevel=2) - return [] - - def __str__(self): - return str(self.module_cache) + str(self.object_info) - - def modname(self, resource): - if resource.is_folder(): - module_name = resource.name - source_folder = resource.parent - elif resource.name == '__init__.py': - module_name = resource.parent.name - source_folder = resource.parent.parent - else: - module_name = resource.name[:-3] - source_folder = resource.parent - - while source_folder != source_folder.parent and \ - source_folder.has_child('__init__.py'): - module_name = source_folder.name + '.' + module_name - source_folder = source_folder.parent - return module_name - - @property - @utils.cacheit - def extension_modules(self): - result = set(self.project.prefs.get('extension_modules', [])) - if self.project.prefs.get('import_dynload_stdmods', False): - result.update(stdmods.dynload_modules()) - return result - - -class _ModuleCache(object): - - def __init__(self, pycore): - self.pycore = pycore - self.module_map = {} - self.pycore.cache_observers.append(self._invalidate_resource) - self.observer = self.pycore.observer - - def _invalidate_resource(self, resource): - if resource in self.module_map: - self.forget_all_data() - self.observer.remove_resource(resource) - del self.module_map[resource] - - def get_pymodule(self, resource, force_errors=False): - if resource in self.module_map: - return self.module_map[resource] - if resource.is_folder(): - result = PyPackage(self.pycore, resource, - force_errors=force_errors) - else: - result = PyModule(self.pycore, resource=resource, - force_errors=force_errors) - if result.has_errors: - return result - self.module_map[resource] = result - self.observer.add_resource(resource) - return result - - def forget_all_data(self): - for pymodule in self.module_map.values(): - pymodule._forget_concluded_data() - - def __str__(self): - return 'PyCore caches %d PyModules\n' % len(self.module_map) - - -class _ExtensionCache(object): - - def __init__(self, pycore): - self.pycore = pycore - self.extensions = {} - - def get_pymodule(self, name): - if name == '__builtin__': - return builtins.builtins - allowed = self.pycore.extension_modules - if name not in self.extensions and name in allowed: - self.extensions[name] = builtins.BuiltinModule(name, self.pycore) - return self.extensions.get(name) - - -def perform_soa_on_changed_scopes(project, resource, old_contents): - pycore = project.pycore - if resource.exists() and pycore.is_python_file(resource): - try: - new_contents = resource.read() - # detecting changes in new_contents relative to old_contents - detector = _TextChangeDetector(new_contents, old_contents) - def search_subscopes(pydefined): - scope = pydefined.get_scope() - return detector.is_changed(scope.get_start(), scope.get_end()) - def should_analyze(pydefined): - scope = pydefined.get_scope() - start = scope.get_start() - end = scope.get_end() - return detector.consume_changes(start, end) - pycore.analyze_module(resource, should_analyze, search_subscopes) - except exceptions.ModuleSyntaxError: - pass - - -class _TextChangeDetector(object): - - def __init__(self, old, new): - self.old = old - self.new = new - self._set_diffs() - - def _set_diffs(self): - differ = difflib.Differ() - self.lines = [] - lineno = 0 - for line in differ.compare(self.old.splitlines(True), - self.new.splitlines(True)): - if line.startswith(' '): - lineno += 1 - elif line.startswith('-'): - lineno += 1 - self.lines.append(lineno) - - def is_changed(self, start, end): - """Tell whether any of start till end lines have changed - - The end points are inclusive and indices start from 1. - """ - left, right = self._get_changed(start, end) - if left < right: - return True - return False - - def consume_changes(self, start, end): - """Clear the changed status of lines from start till end""" - left, right = self._get_changed(start, end) - if left < right: - del self.lines[left:right] - return left < right - - def _get_changed(self, start, end): - left = bisect.bisect_left(self.lines, start) - right = bisect.bisect_right(self.lines, end) - return left, right diff --git a/pymode/libs3/rope/base/pynames.py b/pymode/libs3/rope/base/pynames.py deleted file mode 100644 index 79bba156..00000000 --- a/pymode/libs3/rope/base/pynames.py +++ /dev/null @@ -1,199 +0,0 @@ -import rope.base.pyobjects -from rope.base import exceptions, utils - - -class PyName(object): - """References to `PyObject`\s inside python programs""" - - def get_object(self): - """Return the `PyObject` object referenced by this `PyName`""" - - def get_definition_location(self): - """Return a (module, lineno) tuple""" - - -class DefinedName(PyName): - - def __init__(self, pyobject): - self.pyobject = pyobject - - def get_object(self): - return self.pyobject - - def get_definition_location(self): - return (self.pyobject.get_module(), self.pyobject.get_ast().lineno) - - -class AssignedName(PyName): - """Only a placeholder""" - - -class UnboundName(PyName): - - def __init__(self, pyobject=None): - self.pyobject = pyobject - if self.pyobject is None: - self.pyobject = rope.base.pyobjects.get_unknown() - - def get_object(self): - return self.pyobject - - def get_definition_location(self): - return (None, None) - - -class AssignmentValue(object): - """An assigned expression""" - - def __init__(self, ast_node, levels=None, evaluation='', - assign_type=False): - """The `level` is `None` for simple assignments and is - a list of numbers for tuple assignments for example in:: - - a, (b, c) = x - - The levels for for `a` is ``[0]``, for `b` is ``[1, 0]`` and for - `c` is ``[1, 1]``. - - """ - self.ast_node = ast_node - if levels == None: - self.levels = [] - else: - self.levels = levels - self.evaluation = evaluation - self.assign_type = assign_type - - def get_lineno(self): - return self.ast_node.lineno - - -class EvaluatedName(PyName): - """A name whose object will be evaluated later""" - - def __init__(self, callback, module=None, lineno=None): - self.module = module - self.lineno = lineno - self.callback = callback - self.pyobject = _Inferred(callback, _get_concluded_data(module)) - - def get_object(self): - return self.pyobject.get() - - def get_definition_location(self): - return (self.module, self.lineno) - - def invalidate(self): - """Forget the `PyObject` this `PyName` holds""" - self.pyobject.set(None) - - -class ParameterName(PyName): - """Only a placeholder""" - - -class ImportedModule(PyName): - - def __init__(self, importing_module, module_name=None, - level=0, resource=None): - self.importing_module = importing_module - self.module_name = module_name - self.level = level - self.resource = resource - self.pymodule = _get_concluded_data(self.importing_module) - - def _current_folder(self): - resource = self.importing_module.get_module().get_resource() - if resource is None: - return None - return resource.parent - - def _get_pymodule(self): - if self.pymodule.get() is None: - pycore = self.importing_module.pycore - if self.resource is not None: - self.pymodule.set(pycore.resource_to_pyobject(self.resource)) - elif self.module_name is not None: - try: - if self.level == 0: - pymodule = pycore.get_module(self.module_name, - self._current_folder()) - else: - pymodule = pycore.get_relative_module( - self.module_name, self._current_folder(), self.level) - self.pymodule.set(pymodule) - except exceptions.ModuleNotFoundError: - pass - return self.pymodule.get() - - def get_object(self): - if self._get_pymodule() is None: - return rope.base.pyobjects.get_unknown() - return self._get_pymodule() - - def get_definition_location(self): - pymodule = self._get_pymodule() - if not isinstance(pymodule, rope.base.pyobjects.PyDefinedObject): - return (None, None) - return (pymodule.get_module(), 1) - - -class ImportedName(PyName): - - def __init__(self, imported_module, imported_name): - self.imported_module = imported_module - self.imported_name = imported_name - - def _get_imported_pyname(self): - try: - result = self.imported_module.get_object()[self.imported_name] - if result != self: - return result - except exceptions.AttributeNotFoundError: - pass - return UnboundName() - - @utils.prevent_recursion(rope.base.pyobjects.get_unknown) - def get_object(self): - return self._get_imported_pyname().get_object() - - @utils.prevent_recursion(lambda: (None, None)) - def get_definition_location(self): - return self._get_imported_pyname().get_definition_location() - - -def _get_concluded_data(module): - if module is None: - return rope.base.pyobjects._ConcludedData() - return module._get_concluded_data() - - -def _circular_inference(): - raise rope.base.pyobjects.IsBeingInferredError( - 'Circular Object Inference') - -class _Inferred(object): - - def __init__(self, get_inferred, concluded=None): - self.get_inferred = get_inferred - self.concluded = concluded - if self.concluded is None: - self.temp = None - - @utils.prevent_recursion(_circular_inference) - def get(self, *args, **kwds): - if self.concluded is None or self.concluded.get() is None: - self.set(self.get_inferred(*args, **kwds)) - if self._get() is None: - self.set(rope.base.pyobjects.get_unknown()) - return self._get() - - def set(self, pyobject): - if self.concluded is not None: - self.concluded.set(pyobject) - self.temp = pyobject - - def _get(self): - if self.concluded is not None: - return self.concluded.get() - return self.temp diff --git a/pymode/libs3/rope/base/pynamesdef.py b/pymode/libs3/rope/base/pynamesdef.py deleted file mode 100644 index 6dba0a80..00000000 --- a/pymode/libs3/rope/base/pynamesdef.py +++ /dev/null @@ -1,55 +0,0 @@ -import rope.base.oi.soi -from rope.base import pynames -from rope.base.pynames import * - - -class AssignedName(pynames.AssignedName): - - def __init__(self, lineno=None, module=None, pyobject=None): - self.lineno = lineno - self.module = module - self.assignments = [] - self.pyobject = _Inferred(self._get_inferred, - pynames._get_concluded_data(module)) - self.pyobject.set(pyobject) - - @utils.prevent_recursion(lambda: None) - def _get_inferred(self): - if self.module is not None: - return rope.base.oi.soi.infer_assigned_object(self) - - def get_object(self): - return self.pyobject.get() - - def get_definition_location(self): - """Returns a (module, lineno) tuple""" - if self.lineno is None and self.assignments: - self.lineno = self.assignments[0].get_lineno() - return (self.module, self.lineno) - - def invalidate(self): - """Forget the `PyObject` this `PyName` holds""" - self.pyobject.set(None) - - -class ParameterName(pynames.ParameterName): - - def __init__(self, pyfunction, index): - self.pyfunction = pyfunction - self.index = index - - def get_object(self): - result = self.pyfunction.get_parameter(self.index) - if result is None: - result = rope.base.pyobjects.get_unknown() - return result - - def get_objects(self): - """Returns the list of objects passed as this parameter""" - return rope.base.oi.soi.get_passed_objects( - self.pyfunction, self.index) - - def get_definition_location(self): - return (self.pyfunction.get_module(), self.pyfunction.get_ast().lineno) - -_Inferred = pynames._Inferred diff --git a/pymode/libs3/rope/base/pyobjects.py b/pymode/libs3/rope/base/pyobjects.py deleted file mode 100644 index 76be3040..00000000 --- a/pymode/libs3/rope/base/pyobjects.py +++ /dev/null @@ -1,311 +0,0 @@ -from rope.base.fscommands import _decode_data -from rope.base import ast, exceptions, utils - - -class PyObject(object): - - def __init__(self, type_): - if type_ is None: - type_ = self - self.type = type_ - - def get_attributes(self): - if self.type is self: - return {} - return self.type.get_attributes() - - def get_attribute(self, name): - if name not in self.get_attributes(): - raise exceptions.AttributeNotFoundError( - 'Attribute %s not found' % name) - return self.get_attributes()[name] - - def get_type(self): - return self.type - - def __getitem__(self, key): - """The same as ``get_attribute(key)``""" - return self.get_attribute(key) - - def __contains__(self, key): - """The same as ``key in self.get_attributes()``""" - return key in self.get_attributes() - - def __eq__(self, obj): - """Check the equality of two `PyObject`\s - - Currently it is assumed that instances (the direct instances - of `PyObject`, not the instances of its subclasses) are equal - if their types are equal. For every other object like - defineds or builtins rope assumes objects are reference - objects and their identities should match. - - """ - if self.__class__ != obj.__class__: - return False - if type(self) == PyObject: - if self is not self.type: - return self.type == obj.type - else: - return self.type is obj.type - return self is obj - - def __ne__(self, obj): - return not self.__eq__(obj) - - def __hash__(self): - """See docs for `__eq__()` method""" - if type(self) == PyObject and self != self.type: - return hash(self.type) + 1 - else: - return super(PyObject, self).__hash__() - - def __iter__(self): - """The same as ``iter(self.get_attributes())``""" - return iter(self.get_attributes()) - - _types = None - _unknown = None - - @staticmethod - def _get_base_type(name): - if PyObject._types is None: - PyObject._types = {} - base_type = PyObject(None) - PyObject._types['Type'] = base_type - PyObject._types['Module'] = PyObject(base_type) - PyObject._types['Function'] = PyObject(base_type) - PyObject._types['Unknown'] = PyObject(base_type) - return PyObject._types[name] - - -def get_base_type(name): - """Return the base type with name `name`. - - The base types are 'Type', 'Function', 'Module' and 'Unknown'. It - was used to check the type of a `PyObject` but currently its use - is discouraged. Use classes defined in this module instead. - For example instead of - ``pyobject.get_type() == get_base_type('Function')`` use - ``isinstance(pyobject, AbstractFunction)``. - - You can use `AbstractClass` for classes, `AbstractFunction` for - functions, and `AbstractModule` for modules. You can also use - `PyFunction` and `PyClass` for testing if an object is - defined somewhere and rope can access its source. These classes - provide more methods. - - """ - return PyObject._get_base_type(name) - - -def get_unknown(): - """Return a pyobject whose type is unknown - - Note that two unknown objects are equal. So for example you can - write:: - - if pyname.get_object() == get_unknown(): - print 'cannot determine what this pyname holds' - - Rope could have used `None` for indicating unknown objects but - we had to check that in many places. So actually this method - returns a null object. - - """ - if PyObject._unknown is None: - PyObject._unknown = PyObject(get_base_type('Unknown')) - return PyObject._unknown - - -class AbstractClass(PyObject): - - def __init__(self): - super(AbstractClass, self).__init__(get_base_type('Type')) - - def get_name(self): - pass - - def get_doc(self): - pass - - def get_superclasses(self): - return [] - - -class AbstractFunction(PyObject): - - def __init__(self): - super(AbstractFunction, self).__init__(get_base_type('Function')) - - def get_name(self): - pass - - def get_doc(self): - pass - - def get_param_names(self, special_args=True): - return [] - - def get_returned_object(self, args): - return get_unknown() - - -class AbstractModule(PyObject): - - def __init__(self, doc=None): - super(AbstractModule, self).__init__(get_base_type('Module')) - - def get_doc(self): - pass - - def get_resource(self): - pass - - -class PyDefinedObject(object): - """Python defined names that rope can access their sources""" - - def __init__(self, pycore, ast_node, parent): - self.pycore = pycore - self.ast_node = ast_node - self.scope = None - self.parent = parent - self.structural_attributes = None - self.concluded_attributes = self.get_module()._get_concluded_data() - self.attributes = self.get_module()._get_concluded_data() - self.defineds = None - - visitor_class = None - - @utils.prevent_recursion(lambda: {}) - def _get_structural_attributes(self): - if self.structural_attributes is None: - self.structural_attributes = self._create_structural_attributes() - return self.structural_attributes - - @utils.prevent_recursion(lambda: {}) - def _get_concluded_attributes(self): - if self.concluded_attributes.get() is None: - self._get_structural_attributes() - self.concluded_attributes.set(self._create_concluded_attributes()) - return self.concluded_attributes.get() - - def get_attributes(self): - if self.attributes.get() is None: - result = dict(self._get_concluded_attributes()) - result.update(self._get_structural_attributes()) - self.attributes.set(result) - return self.attributes.get() - - def get_attribute(self, name): - if name in self._get_structural_attributes(): - return self._get_structural_attributes()[name] - if name in self._get_concluded_attributes(): - return self._get_concluded_attributes()[name] - raise exceptions.AttributeNotFoundError('Attribute %s not found' % - name) - - def get_scope(self): - if self.scope is None: - self.scope = self._create_scope() - return self.scope - - def get_module(self): - current_object = self - while current_object.parent is not None: - current_object = current_object.parent - return current_object - - def get_doc(self): - if len(self.get_ast().body) > 0: - expr = self.get_ast().body[0] - if isinstance(expr, ast.Expr) and \ - isinstance(expr.value, ast.Str): - docstring = expr.value.s - coding = self.get_module().coding - return _decode_data(docstring, coding) - - def _get_defined_objects(self): - if self.defineds is None: - self._get_structural_attributes() - return self.defineds - - def _create_structural_attributes(self): - if self.visitor_class is None: - return {} - new_visitor = self.visitor_class(self.pycore, self) - for child in ast.get_child_nodes(self.ast_node): - ast.walk(child, new_visitor) - self.defineds = new_visitor.defineds - return new_visitor.names - - def _create_concluded_attributes(self): - return {} - - def get_ast(self): - return self.ast_node - - def _create_scope(self): - pass - - -class PyFunction(PyDefinedObject, AbstractFunction): - """Only a placeholder""" - - -class PyClass(PyDefinedObject, AbstractClass): - """Only a placeholder""" - - -class _ConcludedData(object): - - def __init__(self): - self.data_ = None - - def set(self, data): - self.data_ = data - - def get(self): - return self.data_ - - data = property(get, set) - - def _invalidate(self): - self.data = None - - def __str__(self): - return '<' + str(self.data) + '>' - - -class _PyModule(PyDefinedObject, AbstractModule): - - def __init__(self, pycore, ast_node, resource): - self.resource = resource - self.concluded_data = [] - AbstractModule.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, None) - - def _get_concluded_data(self): - new_data = _ConcludedData() - self.concluded_data.append(new_data) - return new_data - - def _forget_concluded_data(self): - for data in self.concluded_data: - data._invalidate() - - def get_resource(self): - return self.resource - - -class PyModule(_PyModule): - """Only a placeholder""" - - -class PyPackage(_PyModule): - """Only a placeholder""" - - -class IsBeingInferredError(exceptions.RopeError): - pass diff --git a/pymode/libs3/rope/base/pyobjectsdef.py b/pymode/libs3/rope/base/pyobjectsdef.py deleted file mode 100644 index 57e7af58..00000000 --- a/pymode/libs3/rope/base/pyobjectsdef.py +++ /dev/null @@ -1,555 +0,0 @@ -import sys -import rope.base.codeanalyze -import rope.base.evaluate -import rope.base.builtins -import rope.base.oi.soi -import rope.base.pyscopes -from rope.base import (pynamesdef as pynames, exceptions, ast, - astutils, pyobjects, fscommands, arguments, utils) -from rope.base.pyobjects import * - -class PyFunction(pyobjects.PyFunction): - - def __init__(self, pycore, ast_node, parent): - AbstractFunction.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, parent) - self.arguments = self.ast_node.args - self.parameter_pyobjects = pynames._Inferred( - self._infer_parameters, self.get_module()._get_concluded_data()) - self.returned = pynames._Inferred(self._infer_returned) - self.parameter_pynames = None - - def _create_structural_attributes(self): - return {} - - def _create_concluded_attributes(self): - return {} - - def _create_scope(self): - return rope.base.pyscopes.FunctionScope(self.pycore, self, - _FunctionVisitor) - - def _infer_parameters(self): - pyobjects = rope.base.oi.soi.infer_parameter_objects(self) - self._handle_special_args(pyobjects) - return pyobjects - - def _infer_returned(self, args=None): - return rope.base.oi.soi.infer_returned_object(self, args) - - def _handle_special_args(self, pyobjects): - if len(pyobjects) == len(self.arguments.args): - if self.arguments.vararg: - pyobjects.append(rope.base.builtins.get_list()) - if self.arguments.kwarg: - pyobjects.append(rope.base.builtins.get_dict()) - - def _set_parameter_pyobjects(self, pyobjects): - if pyobjects is not None: - self._handle_special_args(pyobjects) - self.parameter_pyobjects.set(pyobjects) - - def get_parameters(self): - if self.parameter_pynames is None: - result = {} - for index, name in enumerate(self.get_param_names()): - # TODO: handle tuple parameters - result[name] = pynames.ParameterName(self, index) - self.parameter_pynames = result - return self.parameter_pynames - - def get_parameter(self, index): - if index < len(self.parameter_pyobjects.get()): - return self.parameter_pyobjects.get()[index] - - def get_returned_object(self, args): - return self.returned.get(args) - - def get_name(self): - return self.get_ast().name - - def get_param_names(self, special_args=True): - # TODO: handle tuple parameters - result = [node.arg for node in self.arguments.args - if isinstance(node, ast.arg)] - if special_args: - if self.arguments.vararg: - result.append(self.arguments.vararg) - if self.arguments.kwarg: - result.append(self.arguments.kwarg) - return result - - def get_kind(self): - """Get function type - - It returns one of 'function', 'method', 'staticmethod' or - 'classmethod' strs. - - """ - scope = self.parent.get_scope() - if isinstance(self.parent, PyClass): - for decorator in self.decorators: - pyname = rope.base.evaluate.eval_node(scope, decorator) - if pyname == rope.base.builtins.builtins['staticmethod']: - return 'staticmethod' - if pyname == rope.base.builtins.builtins['classmethod']: - return 'classmethod' - return 'method' - return 'function' - - @property - def decorators(self): - try: - return getattr(self.ast_node, 'decorator_list') - except AttributeError: - return getattr(self.ast_node, 'decorators', None) - - -class PyClass(pyobjects.PyClass): - - def __init__(self, pycore, ast_node, parent): - self.visitor_class = _ClassVisitor - AbstractClass.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, parent) - self.parent = parent - self._superclasses = self.get_module()._get_concluded_data() - - def get_superclasses(self): - if self._superclasses.get() is None: - self._superclasses.set(self._get_bases()) - return self._superclasses.get() - - def get_name(self): - return self.get_ast().name - - def _create_concluded_attributes(self): - result = {} - for base in reversed(self.get_superclasses()): - result.update(base.get_attributes()) - return result - - def _get_bases(self): - result = [] - for base_name in self.ast_node.bases: - base = rope.base.evaluate.eval_node(self.parent.get_scope(), - base_name) - if base is not None and \ - base.get_object().get_type() == get_base_type('Type'): - result.append(base.get_object()) - return result - - def _create_scope(self): - return rope.base.pyscopes.ClassScope(self.pycore, self) - - -class PyModule(pyobjects.PyModule): - - def __init__(self, pycore, source=None, - resource=None, force_errors=False): - ignore = pycore.project.prefs.get('ignore_syntax_errors', False) - syntax_errors = force_errors or not ignore - self.has_errors = False - try: - source, node = self._init_source(pycore, source, resource) - except exceptions.ModuleSyntaxError: - self.has_errors = True - if syntax_errors: - raise - else: - source = '\n' - node = ast.parse('\n') - self.source_code = source - self.star_imports = [] - self.visitor_class = _GlobalVisitor - self.coding = fscommands.read_str_coding(self.source_code) - super(PyModule, self).__init__(pycore, node, resource) - - def _init_source(self, pycore, source_code, resource): - filename = 'string' - if resource: - filename = resource.path - try: - if source_code is None: - source_bytes = resource.read_bytes() - source_code = fscommands.file_data_to_unicode(source_bytes) - else: - if isinstance(source_code, str): - source_bytes = fscommands.unicode_to_file_data(source_code) - else: - source_bytes = source_code - ast_node = ast.parse(source_bytes, filename=filename) - except SyntaxError as e: - raise exceptions.ModuleSyntaxError(filename, e.lineno, e.msg) - except UnicodeDecodeError as e: - raise exceptions.ModuleSyntaxError(filename, 1, '%s' % (e.reason)) - return source_code, ast_node - - @utils.prevent_recursion(lambda: {}) - def _create_concluded_attributes(self): - result = {} - for star_import in self.star_imports: - result.update(star_import.get_names()) - return result - - def _create_scope(self): - return rope.base.pyscopes.GlobalScope(self.pycore, self) - - @property - @utils.saveit - def lines(self): - """A `SourceLinesAdapter`""" - return rope.base.codeanalyze.SourceLinesAdapter(self.source_code) - - @property - @utils.saveit - def logical_lines(self): - """A `LogicalLinesFinder`""" - return rope.base.codeanalyze.CachingLogicalLineFinder(self.lines) - - -class PyPackage(pyobjects.PyPackage): - - def __init__(self, pycore, resource=None, force_errors=False): - self.resource = resource - init_dot_py = self._get_init_dot_py() - if init_dot_py is not None: - ast_node = pycore.resource_to_pyobject( - init_dot_py, force_errors=force_errors).get_ast() - else: - ast_node = ast.parse('\n') - super(PyPackage, self).__init__(pycore, ast_node, resource) - - def _create_structural_attributes(self): - result = {} - modname = self.pycore.modname(self.resource) - extension_submodules = self.pycore._builtin_submodules(modname) - for name, module in extension_submodules.items(): - result[name] = rope.base.builtins.BuiltinName(module) - if self.resource is None: - return result - for name, resource in self._get_child_resources().items(): - result[name] = pynames.ImportedModule(self, resource=resource) - return result - - def _create_concluded_attributes(self): - result = {} - init_dot_py = self._get_init_dot_py() - if init_dot_py: - init_object = self.pycore.resource_to_pyobject(init_dot_py) - result.update(init_object.get_attributes()) - return result - - def _get_child_resources(self): - result = {} - for child in self.resource.get_children(): - if child.is_folder(): - result[child.name] = child - elif child.name.endswith('.py') and \ - child.name != '__init__.py': - name = child.name[:-3] - result[name] = child - return result - - def _get_init_dot_py(self): - if self.resource is not None and self.resource.has_child('__init__.py'): - return self.resource.get_child('__init__.py') - else: - return None - - def _create_scope(self): - return self.get_module().get_scope() - - def get_module(self): - init_dot_py = self._get_init_dot_py() - if init_dot_py: - return self.pycore.resource_to_pyobject(init_dot_py) - return self - - -class _AssignVisitor(object): - - def __init__(self, scope_visitor): - self.scope_visitor = scope_visitor - self.assigned_ast = None - - def _Assign(self, node): - self.assigned_ast = node.value - for child_node in node.targets: - ast.walk(child_node, self) - - def _assigned(self, name, assignment=None): - self.scope_visitor._assigned(name, assignment) - - def _Name(self, node): - assignment = None - if self.assigned_ast is not None: - assignment = pynames.AssignmentValue(self.assigned_ast) - self._assigned(node.id, assignment) - - def _Tuple(self, node): - names = astutils.get_name_levels(node) - for name, levels in names: - assignment = None - if self.assigned_ast is not None: - assignment = pynames.AssignmentValue(self.assigned_ast, levels) - self._assigned(name, assignment) - - def _Attribute(self, node): - pass - - def _Subscript(self, node): - pass - - def _Slice(self, node): - pass - - -class _ScopeVisitor(object): - - def __init__(self, pycore, owner_object): - self.pycore = pycore - self.owner_object = owner_object - self.names = {} - self.defineds = [] - - def get_module(self): - if self.owner_object is not None: - return self.owner_object.get_module() - else: - return None - - def _ClassDef(self, node): - pyclass = PyClass(self.pycore, node, self.owner_object) - self.names[node.name] = pynames.DefinedName(pyclass) - self.defineds.append(pyclass) - - def _FunctionDef(self, node): - pyfunction = PyFunction(self.pycore, node, self.owner_object) - for decorator in pyfunction.decorators: - if isinstance(decorator, ast.Name) and decorator.id == 'property': - if isinstance(self, _ClassVisitor): - type_ = rope.base.builtins.Property(pyfunction) - arg = pynames.UnboundName(PyObject(self.owner_object)) - def _eval(type_=type_, arg=arg): - return type_.get_property_object( - arguments.ObjectArguments([arg])) - self.names[node.name] = pynames.EvaluatedName( - _eval, module=self.get_module(), lineno=node.lineno) - break - else: - self.names[node.name] = pynames.DefinedName(pyfunction) - self.defineds.append(pyfunction) - - def _Assign(self, node): - ast.walk(node, _AssignVisitor(self)) - - def _AugAssign(self, node): - pass - - def _For(self, node): - names = self._update_evaluated(node.target, node.iter, - '.__iter__().next()') - for child in node.body + node.orelse: - ast.walk(child, self) - - def _assigned(self, name, assignment): - pyname = self.names.get(name, None) - if pyname is None: - pyname = pynames.AssignedName(module=self.get_module()) - if isinstance(pyname, pynames.AssignedName): - if assignment is not None: - pyname.assignments.append(assignment) - self.names[name] = pyname - - def _update_evaluated(self, targets, assigned, - evaluation= '', eval_type=False): - result = {} - if not isinstance(targets, str): - names = astutils.get_name_levels(targets) - for name, levels in names: - assignment = pynames.AssignmentValue(assigned, levels, - evaluation, eval_type) - self._assigned(name, assignment) - else: - assignment = pynames.AssignmentValue(assigned, [], - evaluation, eval_type) - self._assigned(targets, assignment) - return result - - def _With(self, node): - if (sys.version_info[1] < 3): - if node.optional_vars: - self._update_evaluated(node.optional_vars, - node.context_expr, '.__enter__()') - elif len(node.items) > 0: - #TODO Handle all items? - if node.items[0].optional_vars: - self._update_evaluated( - node.items[0].optional_vars, - node.items[0].context_expr, - '.__enter__()' - ) - - for child in node.body: - ast.walk(child, self) - - def _excepthandler(self, node): - if node.name is not None and isinstance(node.name, str): - type_node = node.type - if isinstance(node.type, ast.Tuple) and type_node.elts: - type_node = type_node.elts[0] - self._update_evaluated(node.name, type_node, eval_type=True) - for child in node.body: - ast.walk(child, self) - - def _ExceptHandler(self, node): - self._excepthandler(node) - - def _Import(self, node): - for import_pair in node.names: - module_name = import_pair.name - alias = import_pair.asname - first_package = module_name.split('.')[0] - if alias is not None: - imported = pynames.ImportedModule(self.get_module(), - module_name) - if not self._is_ignored_import(imported): - self.names[alias] = imported - else: - imported = pynames.ImportedModule(self.get_module(), - first_package) - if not self._is_ignored_import(imported): - self.names[first_package] = imported - - def _ImportFrom(self, node): - level = 0 - if node.level: - level = node.level - if node.module is None and len(node.names) > 0: #Relative import "." - self._Import(node) - return - imported_module = pynames.ImportedModule(self.get_module(), - node.module, level) - if self._is_ignored_import(imported_module): - return - if len(node.names) == 1 and node.names[0].name == '*': - if isinstance(self.owner_object, PyModule): - self.owner_object.star_imports.append( - StarImport(imported_module)) - else: - for imported_name in node.names: - imported = imported_name.name - alias = imported_name.asname - if alias is not None: - imported = alias - self.names[imported] = pynames.ImportedName(imported_module, - imported_name.name) - - def _is_ignored_import(self, imported_module): - if not self.pycore.project.prefs.get('ignore_bad_imports', False): - return False - return not isinstance(imported_module.get_object(), AbstractModule) - - def _Global(self, node): - module = self.get_module() - for name in node.names: - if module is not None: - try: - pyname = module[name] - except exceptions.AttributeNotFoundError: - pyname = pynames.AssignedName(node.lineno) - self.names[name] = pyname - - -class _GlobalVisitor(_ScopeVisitor): - - def __init__(self, pycore, owner_object): - super(_GlobalVisitor, self).__init__(pycore, owner_object) - - -class _ClassVisitor(_ScopeVisitor): - - def __init__(self, pycore, owner_object): - super(_ClassVisitor, self).__init__(pycore, owner_object) - - def _FunctionDef(self, node): - _ScopeVisitor._FunctionDef(self, node) - if len(node.args.args) > 0: - first = node.args.args[0] - if isinstance(first, ast.arg): - new_visitor = _ClassInitVisitor(self, first.arg) - for child in ast.get_child_nodes(node): - ast.walk(child, new_visitor) - - -class _FunctionVisitor(_ScopeVisitor): - - def __init__(self, pycore, owner_object): - super(_FunctionVisitor, self).__init__(pycore, owner_object) - self.returned_asts = [] - self.generator = False - - def _Return(self, node): - if node.value is not None: - self.returned_asts.append(node.value) - - def _Yield(self, node): - if node.value is not None: - self.returned_asts.append(node.value) - self.generator = True - - -class _ClassInitVisitor(_AssignVisitor): - - def __init__(self, scope_visitor, self_name): - super(_ClassInitVisitor, self).__init__(scope_visitor) - self.self_name = self_name - - def _Attribute(self, node): - if not isinstance(node.ctx, ast.Store): - return - if isinstance(node.value, ast.Name) and \ - node.value.id == self.self_name: - if node.attr not in self.scope_visitor.names: - self.scope_visitor.names[node.attr] = pynames.AssignedName( - lineno=node.lineno, module=self.scope_visitor.get_module()) - if self.assigned_ast is not None: - pyname = self.scope_visitor.names[node.attr] - if isinstance(pyname, pynames.AssignedName): - pyname.assignments.append( - pynames.AssignmentValue(self.assigned_ast)) - - def _Tuple(self, node): - if not isinstance(node.ctx, ast.Store): - return - for child in ast.get_child_nodes(node): - ast.walk(child, self) - - def _Name(self, node): - pass - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - def _For(self, node): - pass - - def _With(self, node): - pass - - -class StarImport(object): - - def __init__(self, imported_module): - self.imported_module = imported_module - - def get_names(self): - result = {} - imported = self.imported_module.get_object() - for name in imported: - if not name.startswith('_'): - result[name] = pynames.ImportedName(self.imported_module, name) - return result diff --git a/pymode/libs3/rope/base/pyscopes.py b/pymode/libs3/rope/base/pyscopes.py deleted file mode 100644 index a00381b7..00000000 --- a/pymode/libs3/rope/base/pyscopes.py +++ /dev/null @@ -1,313 +0,0 @@ -import rope.base.builtins -import rope.base.codeanalyze -import rope.base.pynames -from rope.base import ast, exceptions, utils - - -class Scope(object): - - def __init__(self, pycore, pyobject, parent_scope): - self.pycore = pycore - self.pyobject = pyobject - self.parent = parent_scope - - def get_names(self): - """Return the names defined or imported in this scope""" - return self.pyobject.get_attributes() - - def get_defined_names(self): - """Return the names defined in this scope""" - return self.pyobject._get_structural_attributes() - - def get_name(self, name): - """Return name `PyName` defined in this scope""" - if name not in self.get_names(): - raise exceptions.NameNotFoundError('name %s not found' % name) - return self.get_names()[name] - - def __getitem__(self, key): - """The same as ``get_name(key)``""" - return self.get_name(key) - - def __contains__(self, key): - """The same as ``key in self.get_names()``""" - return key in self.get_names() - - @utils.saveit - def get_scopes(self): - """Return the subscopes of this scope - - The returned scopes should be sorted by the order they appear. - """ - return self._create_scopes() - - def lookup(self, name): - if name in self.get_names(): - return self.get_names()[name] - if self.parent is not None: - return self.parent._propagated_lookup(name) - return None - - def get_propagated_names(self): - """Return the visible names of this scope - - Return the names defined in this scope that are visible from - scopes containing this scope. This method returns the same - dictionary returned by `get_names()` except for `ClassScope` - which returns an empty dict. - """ - return self.get_names() - - def _propagated_lookup(self, name): - if name in self.get_propagated_names(): - return self.get_propagated_names()[name] - if self.parent is not None: - return self.parent._propagated_lookup(name) - return None - - def _create_scopes(self): - return [pydefined.get_scope() - for pydefined in self.pyobject._get_defined_objects()] - - def _get_global_scope(self): - current = self - while current.parent is not None: - current = current.parent - return current - - def get_start(self): - return self.pyobject.get_ast().lineno - - def get_body_start(self): - body = self.pyobject.get_ast().body - if body: - return body[0].lineno - return self.get_start() - - def get_end(self): - pymodule = self._get_global_scope().pyobject - return pymodule.logical_lines.logical_line_in(self.logical_end)[1] - - @utils.saveit - def get_logical_end(self): - global_scope = self._get_global_scope() - return global_scope._scope_finder.find_scope_end(self) - - start = property(get_start) - end = property(get_end) - logical_end = property(get_logical_end) - - def get_kind(self): - pass - - -class GlobalScope(Scope): - - def __init__(self, pycore, module): - super(GlobalScope, self).__init__(pycore, module, None) - self.names = module._get_concluded_data() - - def get_start(self): - return 1 - - def get_kind(self): - return 'Module' - - def get_name(self, name): - try: - return self.pyobject[name] - except exceptions.AttributeNotFoundError: - if name in self.builtin_names: - return self.builtin_names[name] - raise exceptions.NameNotFoundError('name %s not found' % name) - - def get_names(self): - if self.names.get() is None: - result = dict(self.builtin_names) - result.update(super(GlobalScope, self).get_names()) - self.names.set(result) - return self.names.get() - - def get_inner_scope_for_line(self, lineno, indents=None): - return self._scope_finder.get_holding_scope(self, lineno, indents) - - def get_inner_scope_for_offset(self, offset): - return self._scope_finder.get_holding_scope_for_offset(self, offset) - - @property - @utils.saveit - def _scope_finder(self): - return _HoldingScopeFinder(self.pyobject) - - @property - def builtin_names(self): - return rope.base.builtins.builtins.get_attributes() - - -class FunctionScope(Scope): - - def __init__(self, pycore, pyobject, visitor): - super(FunctionScope, self).__init__(pycore, pyobject, - pyobject.parent.get_scope()) - self.names = None - self.returned_asts = None - self.is_generator = None - self.defineds = None - self.visitor = visitor - - def _get_names(self): - if self.names is None: - self._visit_function() - return self.names - - def _visit_function(self): - if self.names is None: - new_visitor = self.visitor(self.pycore, self.pyobject) - for n in ast.get_child_nodes(self.pyobject.get_ast()): - ast.walk(n, new_visitor) - self.names = new_visitor.names - self.names.update(self.pyobject.get_parameters()) - self.returned_asts = new_visitor.returned_asts - self.is_generator = new_visitor.generator - self.defineds = new_visitor.defineds - - def _get_returned_asts(self): - if self.names is None: - self._visit_function() - return self.returned_asts - - def _is_generator(self): - if self.is_generator is None: - self._get_returned_asts() - return self.is_generator - - def get_names(self): - return self._get_names() - - def _create_scopes(self): - if self.defineds is None: - self._visit_function() - return [pydefined.get_scope() for pydefined in self.defineds] - - def get_kind(self): - return 'Function' - - def invalidate_data(self): - for pyname in self.get_names().values(): - if isinstance(pyname, (rope.base.pynames.AssignedName, - rope.base.pynames.EvaluatedName)): - pyname.invalidate() - - -class ClassScope(Scope): - - def __init__(self, pycore, pyobject): - super(ClassScope, self).__init__(pycore, pyobject, - pyobject.parent.get_scope()) - - def get_kind(self): - return 'Class' - - def get_propagated_names(self): - return {} - - -class _HoldingScopeFinder(object): - - def __init__(self, pymodule): - self.pymodule = pymodule - - def get_indents(self, lineno): - return rope.base.codeanalyze.count_line_indents( - self.lines.get_line(lineno)) - - def _get_scope_indents(self, scope): - return self.get_indents(scope.get_start()) - - def get_holding_scope(self, module_scope, lineno, line_indents=None): - if line_indents is None: - line_indents = self.get_indents(lineno) - current_scope = module_scope - new_scope = current_scope - while new_scope is not None and \ - (new_scope.get_kind() == 'Module' or - self._get_scope_indents(new_scope) <= line_indents): - current_scope = new_scope - if current_scope.get_start() == lineno and \ - current_scope.get_kind() != 'Module': - return current_scope - new_scope = None - for scope in current_scope.get_scopes(): - if scope.get_start() <= lineno: - if lineno <= scope.get_end(): - new_scope = scope - break - else: - break - return current_scope - - def _is_empty_line(self, lineno): - line = self.lines.get_line(lineno) - return line.strip() == '' or line.lstrip().startswith('#') - - def _get_body_indents(self, scope): - return self.get_indents(scope.get_body_start()) - - def get_holding_scope_for_offset(self, scope, offset): - return self.get_holding_scope( - scope, self.lines.get_line_number(offset)) - - def find_scope_end(self, scope): - if not scope.parent: - return self.lines.length() - end = scope.pyobject.get_ast().body[-1].lineno - scope_start = self.pymodule.logical_lines.logical_line_in(scope.start) - if scope_start[1] >= end: - # handling one-liners - body_indents = self._get_scope_indents(scope) + 4 - else: - body_indents = self._get_body_indents(scope) - for l in self.logical_lines.generate_starts( - min(end + 1, self.lines.length()), self.lines.length() + 1): - if not self._is_empty_line(l): - if self.get_indents(l) < body_indents: - return end - else: - end = l - return end - - @property - def lines(self): - return self.pymodule.lines - - @property - def code(self): - return self.pymodule.source_code - - @property - def logical_lines(self): - return self.pymodule.logical_lines - -class TemporaryScope(Scope): - """Currently used for list comprehensions and generator expressions - - These scopes do not appear in the `get_scopes()` method of their - parent scopes. - """ - - def __init__(self, pycore, parent_scope, names): - super(TemporaryScope, self).__init__( - pycore, parent_scope.pyobject, parent_scope) - self.names = names - - def get_names(self): - return self.names - - def get_defined_names(self): - return self.names - - def _create_scopes(self): - return [] - - def get_kind(self): - return 'Temporary' diff --git a/pymode/libs3/rope/base/resourceobserver.py b/pymode/libs3/rope/base/resourceobserver.py deleted file mode 100644 index 6d1accbc..00000000 --- a/pymode/libs3/rope/base/resourceobserver.py +++ /dev/null @@ -1,271 +0,0 @@ -import os - - -class ResourceObserver(object): - """Provides the interface for observing resources - - `ResourceObserver`\s can be registered using `Project. - add_observer()`. But most of the time `FilteredResourceObserver` - should be used. `ResourceObserver`\s report all changes passed - to them and they don't report changes to all resources. For - example if a folder is removed, it only calls `removed()` for that - folder and not its contents. You can use - `FilteredResourceObserver` if you are interested in changes only - to a list of resources. And you want changes to be reported on - individual resources. - - """ - - def __init__(self, changed=None, moved=None, created=None, - removed=None, validate=None): - self.changed = changed - self.moved = moved - self.created = created - self.removed = removed - self._validate = validate - - def resource_changed(self, resource): - """It is called when the resource changes""" - if self.changed is not None: - self.changed(resource) - - def resource_moved(self, resource, new_resource): - """It is called when a resource is moved""" - if self.moved is not None: - self.moved(resource, new_resource) - - def resource_created(self, resource): - """Is called when a new resource is created""" - if self.created is not None: - self.created(resource) - - def resource_removed(self, resource): - """Is called when a new resource is removed""" - if self.removed is not None: - self.removed(resource) - - def validate(self, resource): - """Validate the existence of this resource and its children. - - This function is called when rope need to update its resource - cache about the files that might have been changed or removed - by other processes. - - """ - if self._validate is not None: - self._validate(resource) - - -class FilteredResourceObserver(object): - """A useful decorator for `ResourceObserver` - - Most resource observers have a list of resources and are - interested only in changes to those files. This class satisfies - this need. It dispatches resource changed and removed messages. - It performs these tasks: - - * Changes to files and folders are analyzed to check whether any - of the interesting resources are changed or not. If they are, - it reports these changes to `resource_observer` passed to the - constructor. - * When a resource is removed it checks whether any of the - interesting resources are contained in that folder and reports - them to `resource_observer`. - * When validating a folder it validates all of the interesting - files in that folder. - - Since most resource observers are interested in a list of - resources that change over time, `add_resource` and - `remove_resource` might be useful. - - """ - - def __init__(self, resource_observer, initial_resources=None, - timekeeper=None): - self.observer = resource_observer - self.resources = {} - if timekeeper is not None: - self.timekeeper = timekeeper - else: - self.timekeeper = ChangeIndicator() - if initial_resources is not None: - for resource in initial_resources: - self.add_resource(resource) - - def add_resource(self, resource): - """Add a resource to the list of interesting resources""" - if resource.exists(): - self.resources[resource] = self.timekeeper.get_indicator(resource) - else: - self.resources[resource] = None - - def remove_resource(self, resource): - """Add a resource to the list of interesting resources""" - if resource in self.resources: - del self.resources[resource] - - def clear_resources(self): - """Removes all registered resources""" - self.resources.clear() - - def resource_changed(self, resource): - changes = _Changes() - self._update_changes_caused_by_changed(changes, resource) - self._perform_changes(changes) - - def _update_changes_caused_by_changed(self, changes, changed): - if changed in self.resources: - changes.add_changed(changed) - if self._is_parent_changed(changed): - changes.add_changed(changed.parent) - - def _update_changes_caused_by_moved(self, changes, resource, - new_resource=None): - if resource in self.resources: - changes.add_removed(resource, new_resource) - if new_resource in self.resources: - changes.add_created(new_resource) - if resource.is_folder(): - for file in list(self.resources): - if resource.contains(file): - new_file = self._calculate_new_resource( - resource, new_resource, file) - changes.add_removed(file, new_file) - if self._is_parent_changed(resource): - changes.add_changed(resource.parent) - if new_resource is not None: - if self._is_parent_changed(new_resource): - changes.add_changed(new_resource.parent) - - def _is_parent_changed(self, child): - return child.parent in self.resources - - def resource_moved(self, resource, new_resource): - changes = _Changes() - self._update_changes_caused_by_moved(changes, resource, new_resource) - self._perform_changes(changes) - - def resource_created(self, resource): - changes = _Changes() - self._update_changes_caused_by_created(changes, resource) - self._perform_changes(changes) - - def _update_changes_caused_by_created(self, changes, resource): - if resource in self.resources: - changes.add_created(resource) - if self._is_parent_changed(resource): - changes.add_changed(resource.parent) - - def resource_removed(self, resource): - changes = _Changes() - self._update_changes_caused_by_moved(changes, resource) - self._perform_changes(changes) - - def _perform_changes(self, changes): - for resource in changes.changes: - self.observer.resource_changed(resource) - self.resources[resource] = self.timekeeper.get_indicator(resource) - for resource, new_resource in changes.moves.items(): - self.resources[resource] = None - if new_resource is not None: - self.observer.resource_moved(resource, new_resource) - else: - self.observer.resource_removed(resource) - for resource in changes.creations: - self.observer.resource_created(resource) - self.resources[resource] = self.timekeeper.get_indicator(resource) - - def validate(self, resource): - changes = _Changes() - for file in self._search_resource_moves(resource): - if file in self.resources: - self._update_changes_caused_by_moved(changes, file) - for file in self._search_resource_changes(resource): - if file in self.resources: - self._update_changes_caused_by_changed(changes, file) - for file in self._search_resource_creations(resource): - if file in self.resources: - changes.add_created(file) - self._perform_changes(changes) - - def _search_resource_creations(self, resource): - creations = set() - if resource in self.resources and resource.exists() and \ - self.resources[resource] is None: - creations.add(resource) - if resource.is_folder(): - for file in self.resources: - if file.exists() and resource.contains(file) and \ - self.resources[file] is None: - creations.add(file) - return creations - - def _search_resource_moves(self, resource): - all_moved = set() - if resource in self.resources and not resource.exists(): - all_moved.add(resource) - if resource.is_folder(): - for file in self.resources: - if resource.contains(file): - if not file.exists(): - all_moved.add(file) - moved = set(all_moved) - for folder in [file for file in all_moved if file.is_folder()]: - if folder in moved: - for file in list(moved): - if folder.contains(file): - moved.remove(file) - return moved - - def _search_resource_changes(self, resource): - changed = set() - if resource in self.resources and self._is_changed(resource): - changed.add(resource) - if resource.is_folder(): - for file in self.resources: - if file.exists() and resource.contains(file): - if self._is_changed(file): - changed.add(file) - return changed - - def _is_changed(self, resource): - if self.resources[resource] is None: - return False - return self.resources[resource] != self.timekeeper.get_indicator(resource) - - def _calculate_new_resource(self, main, new_main, resource): - if new_main is None: - return None - diff = resource.path[len(main.path):] - return resource.project.get_resource(new_main.path + diff) - - -class ChangeIndicator(object): - - def get_indicator(self, resource): - """Return the modification time and size of a `Resource`.""" - path = resource.real_path - # on dos, mtime does not change for a folder when files are added - if os.name != 'posix' and os.path.isdir(path): - return (os.path.getmtime(path), - len(os.listdir(path)), - os.path.getsize(path)) - return (os.path.getmtime(path), - os.path.getsize(path)) - - -class _Changes(object): - - def __init__(self): - self.changes = set() - self.creations = set() - self.moves = {} - - def add_changed(self, resource): - self.changes.add(resource) - - def add_removed(self, resource, new_resource=None): - self.moves[resource] = new_resource - - def add_created(self, resource): - self.creations.add(resource) diff --git a/pymode/libs3/rope/base/resources.py b/pymode/libs3/rope/base/resources.py deleted file mode 100644 index 871211a5..00000000 --- a/pymode/libs3/rope/base/resources.py +++ /dev/null @@ -1,212 +0,0 @@ -import os -import re - -import rope.base.change -import rope.base.fscommands -from rope.base import exceptions - - -class Resource(object): - """Represents files and folders in a project""" - - def __init__(self, project, path): - self.project = project - self._path = path - - def move(self, new_location): - """Move resource to `new_location`""" - self._perform_change(rope.base.change.MoveResource(self, new_location), - 'Moving <%s> to <%s>' % (self.path, new_location)) - - def remove(self): - """Remove resource from the project""" - self._perform_change(rope.base.change.RemoveResource(self), - 'Removing <%s>' % self.path) - - def is_folder(self): - """Return true if the resource is a folder""" - - def create(self): - """Create this resource""" - - def exists(self): - return os.path.exists(self.real_path) - - @property - def parent(self): - parent = '/'.join(self.path.split('/')[0:-1]) - return self.project.get_folder(parent) - - @property - def path(self): - """Return the path of this resource relative to the project root - - The path is the list of parent directories separated by '/' followed - by the resource name. - """ - return self._path - - @property - def name(self): - """Return the name of this resource""" - return self.path.split('/')[-1] - - @property - def real_path(self): - """Return the file system path of this resource""" - return self.project._get_resource_path(self.path) - - def __eq__(self, obj): - return self.__class__ == obj.__class__ and self.path == obj.path - - def __ne__(self, obj): - return not self.__eq__(obj) - - def __hash__(self): - return hash(self.path) - - def _perform_change(self, change_, description): - changes = rope.base.change.ChangeSet(description) - changes.add_change(change_) - self.project.do(changes) - - -class File(Resource): - """Represents a file""" - - def __init__(self, project, name): - super(File, self).__init__(project, name) - - def read(self): - data = self.read_bytes() - try: - return rope.base.fscommands.file_data_to_unicode(data) - except UnicodeDecodeError as e: - raise exceptions.ModuleDecodeError(self.path, e.reason) - - def read_bytes(self): - with open(self.real_path, 'rb') as fi: - return fi.read() - - def write(self, contents): - try: - if contents == self.read(): - return - except IOError: - pass - self._perform_change(rope.base.change.ChangeContents(self, contents), - 'Writing file <%s>' % self.path) - - def is_folder(self): - return False - - def create(self): - self.parent.create_file(self.name) - - -class Folder(Resource): - """Represents a folder""" - - def __init__(self, project, name): - super(Folder, self).__init__(project, name) - - def is_folder(self): - return True - - def get_children(self): - """Return the children of this folder""" - result = [] - for name in os.listdir(self.real_path): - try: - child = self.get_child(name) - except exceptions.ResourceNotFoundError: - continue - if not self.project.is_ignored(child): - result.append(self.get_child(name)) - return result - - def create_file(self, file_name): - self._perform_change( - rope.base.change.CreateFile(self, file_name), - 'Creating file <%s>' % self._get_child_path(file_name)) - return self.get_child(file_name) - - def create_folder(self, folder_name): - self._perform_change( - rope.base.change.CreateFolder(self, folder_name), - 'Creating folder <%s>' % self._get_child_path(folder_name)) - return self.get_child(folder_name) - - def _get_child_path(self, name): - if self.path: - return self.path + '/' + name - else: - return name - - def get_child(self, name): - return self.project.get_resource(self._get_child_path(name)) - - def has_child(self, name): - try: - self.get_child(name) - return True - except exceptions.ResourceNotFoundError: - return False - - def get_files(self): - return [resource for resource in self.get_children() - if not resource.is_folder()] - - def get_folders(self): - return [resource for resource in self.get_children() - if resource.is_folder()] - - def contains(self, resource): - if self == resource: - return False - return self.path == '' or resource.path.startswith(self.path + '/') - - def create(self): - self.parent.create_folder(self.name) - - -class _ResourceMatcher(object): - - def __init__(self): - self.patterns = [] - self._compiled_patterns = [] - - def set_patterns(self, patterns): - """Specify which resources to match - - `patterns` is a `list` of `str`\s that can contain ``*`` and - ``?`` signs for matching resource names. - - """ - self._compiled_patterns = None - self.patterns = patterns - - def _add_pattern(self, pattern): - re_pattern = pattern.replace('.', '\\.').\ - replace('*', '[^/]*').replace('?', '[^/]').\ - replace('//', '/(.*/)?') - re_pattern = '^(.*/)?' + re_pattern + '(/.*)?$' - self.compiled_patterns.append(re.compile(re_pattern)) - - def does_match(self, resource): - for pattern in self.compiled_patterns: - if pattern.match(resource.path): - return True - path = os.path.join(resource.project.address, - *resource.path.split('/')) - if os.path.islink(path): - return True - return False - - @property - def compiled_patterns(self): - if self._compiled_patterns is None: - self._compiled_patterns = [] - for pattern in self.patterns: - self._add_pattern(pattern) - return self._compiled_patterns diff --git a/pymode/libs3/rope/base/simplify.py b/pymode/libs3/rope/base/simplify.py deleted file mode 100644 index bc4cade4..00000000 --- a/pymode/libs3/rope/base/simplify.py +++ /dev/null @@ -1,55 +0,0 @@ -"""A module to ease code analysis - -This module is here to help source code analysis. -""" -import re - -from rope.base import codeanalyze, utils - - -@utils.cached(7) -def real_code(source): - """Simplify `source` for analysis - - It replaces: - - * comments with spaces - * strs with a new str filled with spaces - * implicit and explicit continuations with spaces - * tabs and semicolons with spaces - - The resulting code is a lot easier to analyze if we are interested - only in offsets. - """ - collector = codeanalyze.ChangeCollector(source) - for start, end in ignored_regions(source): - if source[start] == '#': - replacement = ' ' * (end - start) - else: - replacement = '"%s"' % (' ' * (end - start - 2)) - collector.add_change(start, end, replacement) - source = collector.get_changed() or source - collector = codeanalyze.ChangeCollector(source) - parens = 0 - for match in _parens.finditer(source): - i = match.start() - c = match.group() - if c in '({[': - parens += 1 - if c in ')}]': - parens -= 1 - if c == '\n' and parens > 0: - collector.add_change(i, i + 1, ' ') - source = collector.get_changed() or source - return source.replace('\\\n', ' ').replace('\t', ' ').replace(';', '\n') - - -@utils.cached(7) -def ignored_regions(source): - """Return ignored regions like strings and comments in `source` """ - return [(match.start(), match.end()) for match in _str.finditer(source)] - - -_str = re.compile('%s|%s' % (codeanalyze.get_comment_pattern(), - codeanalyze.get_string_pattern())) -_parens = re.compile(r'[\({\[\]}\)\n]') diff --git a/pymode/libs3/rope/base/stdmods.py b/pymode/libs3/rope/base/stdmods.py deleted file mode 100644 index 4a7d9fbe..00000000 --- a/pymode/libs3/rope/base/stdmods.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -import sys - -from rope.base import utils - - -def _stdlib_path(): - import inspect - return os.path.dirname(inspect.getsourcefile(inspect)) - -@utils.cached(1) -def standard_modules(): - return python_modules() | dynload_modules() - -@utils.cached(1) -def python_modules(): - result = set() - lib_path = _stdlib_path() - if os.path.exists(lib_path): - for name in os.listdir(lib_path): - path = os.path.join(lib_path, name) - if os.path.isdir(path): - if '-' not in name: - result.add(name) - else: - if name.endswith('.py'): - result.add(name[:-3]) - return result - -@utils.cached(1) -def dynload_modules(): - result = set(sys.builtin_module_names) - dynload_path = os.path.join(_stdlib_path(), 'lib-dynload') - if os.path.exists(dynload_path): - for name in os.listdir(dynload_path): - path = os.path.join(dynload_path, name) - if os.path.isfile(path): - if name.endswith('.so') or name.endswith('.dll'): - if "cpython" in name: - result.add(os.path.splitext(os.path.splitext(name)[0])[0]) - else: - result.add(os.path.splitext(name)[0]) - return result diff --git a/pymode/libs3/rope/base/taskhandle.py b/pymode/libs3/rope/base/taskhandle.py deleted file mode 100644 index 6d4ed856..00000000 --- a/pymode/libs3/rope/base/taskhandle.py +++ /dev/null @@ -1,133 +0,0 @@ -import warnings - -from rope.base import exceptions - - -class TaskHandle(object): - - def __init__(self, name='Task', interrupts=True): - """Construct a TaskHandle - - If `interrupts` is `False` the task won't be interrupted by - calling `TaskHandle.stop()`. - - """ - self.name = name - self.interrupts = interrupts - self.stopped = False - self.job_sets = [] - self.observers = [] - - def stop(self): - """Interrupts the refactoring""" - if self.interrupts: - self.stopped = True - self._inform_observers() - - def current_jobset(self): - """Return the current `JobSet`""" - if self.job_sets: - return self.job_sets[-1] - - def add_observer(self, observer): - """Register an observer for this task handle - - The observer is notified whenever the task is stopped or - a job gets finished. - - """ - self.observers.append(observer) - - def is_stopped(self): - return self.stopped - - def get_jobsets(self): - return self.job_sets - - def create_jobset(self, name='JobSet', count=None): - result = JobSet(self, name=name, count=count) - self.job_sets.append(result) - self._inform_observers() - return result - - def _inform_observers(self): - for observer in list(self.observers): - observer() - - -class JobSet(object): - - def __init__(self, handle, name, count): - self.handle = handle - self.name = name - self.count = count - self.done = 0 - self.job_name = None - - def started_job(self, name): - self.check_status() - self.job_name = name - self.handle._inform_observers() - - def finished_job(self): - self.check_status() - self.done += 1 - self.handle._inform_observers() - self.job_name = None - - def check_status(self): - if self.handle.is_stopped(): - raise exceptions.InterruptedTaskError() - - def get_active_job_name(self): - return self.job_name - - def get_percent_done(self): - if self.count is not None and self.count > 0: - percent = self.done * 100 // self.count - return min(percent, 100) - - def get_name(self): - return self.name - - -class NullTaskHandle(object): - - def __init__(self): - pass - - def is_stopped(self): - return False - - def stop(self): - pass - - def create_jobset(self, *args, **kwds): - return NullJobSet() - - def get_jobsets(self): - return [] - - def add_observer(self, observer): - pass - - -class NullJobSet(object): - - def started_job(self, name): - pass - - def finished_job(self): - pass - - def check_status(self): - pass - - def get_active_job_name(self): - pass - - def get_percent_done(self): - pass - - def get_name(self): - pass diff --git a/pymode/libs3/rope/base/utils.py b/pymode/libs3/rope/base/utils.py deleted file mode 100644 index e35ecbf3..00000000 --- a/pymode/libs3/rope/base/utils.py +++ /dev/null @@ -1,78 +0,0 @@ -import warnings - - -def saveit(func): - """A decorator that caches the return value of a function""" - - name = '_' + func.__name__ - def _wrapper(self, *args, **kwds): - if not hasattr(self, name): - setattr(self, name, func(self, *args, **kwds)) - return getattr(self, name) - return _wrapper - -cacheit = saveit - -def prevent_recursion(default): - """A decorator that returns the return value of `default` in recursions""" - def decorator(func): - name = '_calling_%s_' % func.__name__ - def newfunc(self, *args, **kwds): - if getattr(self, name, False): - return default() - setattr(self, name, True) - try: - return func(self, *args, **kwds) - finally: - setattr(self, name, False) - return newfunc - return decorator - - -def ignore_exception(exception_class): - """A decorator that ignores `exception_class` exceptions""" - def _decorator(func): - def newfunc(*args, **kwds): - try: - return func(*args, **kwds) - except exception_class: - pass - return newfunc - return _decorator - - -def deprecated(message=None): - """A decorator for deprecated functions""" - def _decorator(func, message=message): - if message is None: - message = '%s is deprecated' % func.__name__ - def newfunc(*args, **kwds): - warnings.warn(message, DeprecationWarning, stacklevel=2) - return func(*args, **kwds) - return newfunc - return _decorator - - -def cached(count): - """A caching decorator based on parameter objects""" - def decorator(func): - return _Cached(func, count) - return decorator - -class _Cached(object): - - def __init__(self, func, count): - self.func = func - self.cache = [] - self.count = count - - def __call__(self, *args, **kwds): - key = (args, kwds) - for cached_key, cached_result in self.cache: - if cached_key == key: - return cached_result - result = self.func(*args, **kwds) - self.cache.append((key, result)) - if len(self.cache) > self.count: - del self.cache[0] - return result diff --git a/pymode/libs3/rope/base/worder.py b/pymode/libs3/rope/base/worder.py deleted file mode 100644 index 08d75f34..00000000 --- a/pymode/libs3/rope/base/worder.py +++ /dev/null @@ -1,524 +0,0 @@ -import bisect -import keyword - -import rope.base.simplify - - -def get_name_at(resource, offset): - source_code = resource.read() - word_finder = Worder(source_code) - return word_finder.get_word_at(offset) - - -class Worder(object): - """A class for finding boundaries of words and expressions - - Note that in these methods, offset should be the index of the - character not the index of the character after it. - """ - - def __init__(self, code, handle_ignores=False): - simplified = rope.base.simplify.real_code(code) - self.code_finder = _RealFinder(simplified, code) - self.handle_ignores = handle_ignores - self.code = code - - def _init_ignores(self): - ignores = rope.base.simplify.ignored_regions(self.code) - self.dumb_finder = _RealFinder(self.code, self.code) - self.starts = [ignored[0] for ignored in ignores] - self.ends = [ignored[1] for ignored in ignores] - - def _context_call(self, name, offset): - if self.handle_ignores: - if not hasattr(self, 'starts'): - self._init_ignores() - start = bisect.bisect(self.starts, offset) - if start > 0 and offset < self.ends[start - 1]: - return getattr(self.dumb_finder, name)(offset) - return getattr(self.code_finder, name)(offset) - - def get_primary_at(self, offset): - return self._context_call('get_primary_at', offset) - - def get_word_at(self, offset): - return self._context_call('get_word_at', offset) - - def get_primary_range(self, offset): - return self._context_call('get_primary_range', offset) - - def get_splitted_primary_before(self, offset): - return self._context_call('get_splitted_primary_before', offset) - - def get_word_range(self, offset): - return self._context_call('get_word_range', offset) - - def is_function_keyword_parameter(self, offset): - return self.code_finder.is_function_keyword_parameter(offset) - - def is_a_class_or_function_name_in_header(self, offset): - return self.code_finder.is_a_class_or_function_name_in_header(offset) - - def is_from_statement_module(self, offset): - return self.code_finder.is_from_statement_module(offset) - - def is_from_aliased(self, offset): - return self.code_finder.is_from_aliased(offset) - - def find_parens_start_from_inside(self, offset): - return self.code_finder.find_parens_start_from_inside(offset) - - def is_a_name_after_from_import(self, offset): - return self.code_finder.is_a_name_after_from_import(offset) - - def is_from_statement(self, offset): - return self.code_finder.is_from_statement(offset) - - def get_from_aliased(self, offset): - return self.code_finder.get_from_aliased(offset) - - def is_import_statement(self, offset): - return self.code_finder.is_import_statement(offset) - - def is_assigned_here(self, offset): - return self.code_finder.is_assigned_here(offset) - - def is_a_function_being_called(self, offset): - return self.code_finder.is_a_function_being_called(offset) - - def get_word_parens_range(self, offset): - return self.code_finder.get_word_parens_range(offset) - - def is_name_assigned_in_class_body(self, offset): - return self.code_finder.is_name_assigned_in_class_body(offset) - - def is_on_function_call_keyword(self, offset): - return self.code_finder.is_on_function_call_keyword(offset) - - def _find_parens_start(self, offset): - return self.code_finder._find_parens_start(offset) - - def get_parameters(self, first, last): - return self.code_finder.get_parameters(first, last) - - def get_from_module(self, offset): - return self.code_finder.get_from_module(offset) - - def is_assigned_in_a_tuple_assignment(self, offset): - return self.code_finder.is_assigned_in_a_tuple_assignment(offset) - - def get_assignment_type(self, offset): - return self.code_finder.get_assignment_type(offset) - - def get_function_and_args_in_header(self, offset): - return self.code_finder.get_function_and_args_in_header(offset) - - def get_lambda_and_args(self, offset): - return self.code_finder.get_lambda_and_args(offset) - - def find_function_offset(self, offset): - return self.code_finder.find_function_offset(offset) - - -class _RealFinder(object): - - def __init__(self, code, raw): - self.code = code - self.raw = raw - - def _find_word_start(self, offset): - current_offset = offset - while current_offset >= 0 and self._is_id_char(current_offset): - current_offset -= 1 - return current_offset + 1 - - def _find_word_end(self, offset): - while offset + 1 < len(self.code) and self._is_id_char(offset + 1): - offset += 1 - return offset - - def _find_last_non_space_char(self, offset): - while offset >= 0 and self.code[offset].isspace(): - if self.code[offset] == '\n': - return offset - offset -= 1 - return max(-1, offset) - - def get_word_at(self, offset): - offset = self._get_fixed_offset(offset) - return self.raw[self._find_word_start(offset): - self._find_word_end(offset) + 1] - - def _get_fixed_offset(self, offset): - if offset >= len(self.code): - return offset - 1 - if not self._is_id_char(offset): - if offset > 0 and self._is_id_char(offset - 1): - return offset - 1 - if offset < len(self.code) - 1 and self._is_id_char(offset + 1): - return offset + 1 - return offset - - def _is_id_char(self, offset): - return self.code[offset].isalnum() or self.code[offset] == '_' - - def _find_string_start(self, offset): - kind = self.code[offset] - try: - return self.code.rindex(kind, 0, offset) - except ValueError: - return 0 - - def _find_parens_start(self, offset): - offset = self._find_last_non_space_char(offset - 1) - while offset >= 0 and self.code[offset] not in '[({': - if self.code[offset] not in ':,': - offset = self._find_primary_start(offset) - offset = self._find_last_non_space_char(offset - 1) - return offset - - def _find_atom_start(self, offset): - old_offset = offset - if self.code[offset] == '\n': - return offset + 1 - if self.code[offset].isspace(): - offset = self._find_last_non_space_char(offset) - if self.code[offset] in '\'"': - return self._find_string_start(offset) - if self.code[offset] in ')]}': - return self._find_parens_start(offset) - if self._is_id_char(offset): - return self._find_word_start(offset) - return old_offset - - def _find_primary_without_dot_start(self, offset): - """It tries to find the undotted primary start - - It is different from `self._get_atom_start()` in that it - follows function calls, too; such as in ``f(x)``. - - """ - last_atom = offset - offset = self._find_last_non_space_char(last_atom) - while offset > 0 and self.code[offset] in ')]': - last_atom = self._find_parens_start(offset) - offset = self._find_last_non_space_char(last_atom - 1) - if offset >= 0 and (self.code[offset] in '"\'})]' or - self._is_id_char(offset)): - atom_start = self._find_atom_start(offset) - if not keyword.iskeyword(self.code[atom_start:offset + 1]): - return atom_start - return last_atom - - def _find_primary_start(self, offset): - if offset >= len(self.code): - offset = len(self.code) - 1 - if self.code[offset] != '.': - offset = self._find_primary_without_dot_start(offset) - else: - offset = offset + 1 - while offset > 0: - prev = self._find_last_non_space_char(offset - 1) - if offset <= 0 or self.code[prev] != '.': - break - offset = self._find_primary_without_dot_start(prev - 1) - if not self._is_id_char(offset): - break - - return offset - - def get_primary_at(self, offset): - offset = self._get_fixed_offset(offset) - start, end = self.get_primary_range(offset) - return self.raw[start:end].strip() - - def get_splitted_primary_before(self, offset): - """returns expression, starting, starting_offset - - This function is used in `rope.codeassist.assist` function. - """ - if offset == 0: - return ('', '', 0) - end = offset - 1 - word_start = self._find_atom_start(end) - real_start = self._find_primary_start(end) - if self.code[word_start:offset].strip() == '': - word_start = end - if self.code[end].isspace(): - word_start = end - if self.code[real_start:word_start].strip() == '': - real_start = word_start - if real_start == word_start == end and not self._is_id_char(end): - return ('', '', offset) - if real_start == word_start: - return ('', self.raw[word_start:offset], word_start) - else: - if self.code[end] == '.': - return (self.raw[real_start:end], '', offset) - last_dot_position = word_start - if self.code[word_start] != '.': - last_dot_position = self._find_last_non_space_char(word_start - 1) - last_char_position = self._find_last_non_space_char(last_dot_position - 1) - if self.code[word_start].isspace(): - word_start = offset - return (self.raw[real_start:last_char_position + 1], - self.raw[word_start:offset], word_start) - - def _get_line_start(self, offset): - try: - return self.code.rindex('\n', 0, offset + 1) - except ValueError: - return 0 - - def _get_line_end(self, offset): - try: - return self.code.index('\n', offset) - except ValueError: - return len(self.code) - - def is_name_assigned_in_class_body(self, offset): - word_start = self._find_word_start(offset - 1) - word_end = self._find_word_end(offset) + 1 - if '.' in self.code[word_start:word_end]: - return False - line_start = self._get_line_start(word_start) - line = self.code[line_start:word_start].strip() - return not line and self.get_assignment_type(offset) == '=' - - def is_a_class_or_function_name_in_header(self, offset): - word_start = self._find_word_start(offset - 1) - line_start = self._get_line_start(word_start) - prev_word = self.code[line_start:word_start].strip() - return prev_word in ['def', 'class'] - - def _find_first_non_space_char(self, offset): - if offset >= len(self.code): - return len(self.code) - while offset < len(self.code) and self.code[offset].isspace(): - if self.code[offset] == '\n': - return offset - offset += 1 - return offset - - def is_a_function_being_called(self, offset): - word_end = self._find_word_end(offset) + 1 - next_char = self._find_first_non_space_char(word_end) - return next_char < len(self.code) and \ - self.code[next_char] == '(' and \ - not self.is_a_class_or_function_name_in_header(offset) - - def _find_import_end(self, start): - return self._get_line_end(start) - - def is_import_statement(self, offset): - try: - last_import = self.code.rindex('import ', 0, offset) - except ValueError: - return False - return self._find_import_end(last_import + 7) >= offset - - def is_from_statement(self, offset): - try: - last_from = self.code.rindex('from ', 0, offset) - from_import = self.code.index(' import ', last_from) - from_names = from_import + 8 - except ValueError: - return False - from_names = self._find_first_non_space_char(from_names) - return self._find_import_end(from_names) >= offset - - def is_from_statement_module(self, offset): - if offset >= len(self.code) - 1: - return False - stmt_start = self._find_primary_start(offset) - line_start = self._get_line_start(stmt_start) - prev_word = self.code[line_start:stmt_start].strip() - return prev_word == 'from' - - def is_a_name_after_from_import(self, offset): - try: - if len(self.code) > offset and self.code[offset] == '\n': - line_start = self._get_line_start(offset - 1) - else: - line_start = self._get_line_start(offset) - last_from = self.code.rindex('from ', line_start, offset) - from_import = self.code.index(' import ', last_from) - from_names = from_import + 8 - except ValueError: - return False - if from_names - 1 > offset: - return False - return self._find_import_end(from_names) >= offset - - def get_from_module(self, offset): - try: - last_from = self.code.rindex('from ', 0, offset) - import_offset = self.code.index(' import ', last_from) - end = self._find_last_non_space_char(import_offset) - return self.get_primary_at(end) - except ValueError: - pass - - def is_from_aliased(self, offset): - if not self.is_a_name_after_from_import(offset): - return False - try: - end = self._find_word_end(offset) - as_end = min(self._find_word_end(end + 1), len(self.code)) - as_start = self._find_word_start(as_end) - if self.code[as_start:as_end + 1] == 'as': - return True - except ValueError: - return False - - def get_from_aliased(self, offset): - try: - end = self._find_word_end(offset) - as_ = self._find_word_end(end + 1) - alias = self._find_word_end(as_ + 1) - start = self._find_word_start(alias) - return self.raw[start:alias + 1] - except ValueError: - pass - - def is_function_keyword_parameter(self, offset): - word_end = self._find_word_end(offset) - if word_end + 1 == len(self.code): - return False - next_char = self._find_first_non_space_char(word_end + 1) - equals = self.code[next_char:next_char + 2] - if equals == '==' or not equals.startswith('='): - return False - word_start = self._find_word_start(offset) - prev_char = self._find_last_non_space_char(word_start - 1) - return prev_char - 1 >= 0 and self.code[prev_char] in ',(' - - def is_on_function_call_keyword(self, offset): - stop = self._get_line_start(offset) - if self._is_id_char(offset): - offset = self._find_word_start(offset) - 1 - offset = self._find_last_non_space_char(offset) - if offset <= stop or self.code[offset] not in '(,': - return False - parens_start = self.find_parens_start_from_inside(offset) - return stop < parens_start - - def find_parens_start_from_inside(self, offset): - stop = self._get_line_start(offset) - opens = 1 - while offset > stop: - if self.code[offset] == '(': - break - if self.code[offset] != ',': - offset = self._find_primary_start(offset) - offset -= 1 - return max(stop, offset) - - def is_assigned_here(self, offset): - return self.get_assignment_type(offset) is not None - - def get_assignment_type(self, offset): - # XXX: does not handle tuple assignments - word_end = self._find_word_end(offset) - next_char = self._find_first_non_space_char(word_end + 1) - single = self.code[next_char:next_char + 1] - double = self.code[next_char:next_char + 2] - triple = self.code[next_char:next_char + 3] - if double not in ('==', '<=', '>=', '!='): - for op in [single, double, triple]: - if op.endswith('='): - return op - - def get_primary_range(self, offset): - start = self._find_primary_start(offset) - end = self._find_word_end(offset) + 1 - return (start, end) - - def get_word_range(self, offset): - offset = max(0, offset) - start = self._find_word_start(offset) - end = self._find_word_end(offset) + 1 - return (start, end) - - def get_word_parens_range(self, offset, opening='(', closing=')'): - end = self._find_word_end(offset) - start_parens = self.code.index(opening, end) - index = start_parens - open_count = 0 - while index < len(self.code): - if self.code[index] == opening: - open_count += 1 - if self.code[index] == closing: - open_count -= 1 - if open_count == 0: - return (start_parens, index + 1) - index += 1 - return (start_parens, index) - - def get_parameters(self, first, last): - keywords = [] - args = [] - current = self._find_last_non_space_char(last - 1) - while current > first: - primary_start = current - current = self._find_primary_start(current) - while current != first and self.code[current] not in '=,': - current = self._find_last_non_space_char(current - 1) - primary = self.raw[current + 1:primary_start + 1].strip() - if self.code[current] == '=': - primary_start = current - 1 - current -= 1 - while current != first and self.code[current] not in ',': - current = self._find_last_non_space_char(current - 1) - param_name = self.raw[current + 1:primary_start + 1].strip() - keywords.append((param_name, primary)) - else: - args.append(primary) - current = self._find_last_non_space_char(current - 1) - args.reverse() - keywords.reverse() - return args, keywords - - def is_assigned_in_a_tuple_assignment(self, offset): - start = self._get_line_start(offset) - end = self._get_line_end(offset) - primary_start = self._find_primary_start(offset) - primary_end = self._find_word_end(offset) - - prev_char_offset = self._find_last_non_space_char(primary_start - 1) - next_char_offset = self._find_first_non_space_char(primary_end + 1) - next_char = prev_char = '' - if prev_char_offset >= start: - prev_char = self.code[prev_char_offset] - if next_char_offset < end: - next_char = self.code[next_char_offset] - try: - equals_offset = self.code.index('=', start, end) - except ValueError: - return False - if prev_char not in '(,' and next_char not in ',)': - return False - parens_start = self.find_parens_start_from_inside(offset) - # XXX: only handling (x, y) = value - return offset < equals_offset and \ - self.code[start:parens_start].strip() == '' - - def get_function_and_args_in_header(self, offset): - offset = self.find_function_offset(offset) - lparens, rparens = self.get_word_parens_range(offset) - return self.raw[offset:rparens + 1] - - def find_function_offset(self, offset, definition='def '): - while True: - offset = self.code.index(definition, offset) - if offset == 0 or not self._is_id_char(offset - 1): - break - offset += 1 - def_ = offset + 4 - return self._find_first_non_space_char(def_) - - def get_lambda_and_args(self, offset): - offset = self.find_function_offset(offset, definition = 'lambda ') - lparens, rparens = self.get_word_parens_range(offset, opening=' ', closing=':') - return self.raw[offset:rparens + 1] - diff --git a/pymode/libs3/rope/contrib/__init__.py b/pymode/libs3/rope/contrib/__init__.py deleted file mode 100644 index 0d3f837e..00000000 --- a/pymode/libs3/rope/contrib/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -"""rope IDE tools package - -This package contains modules that can be used in IDEs -but do not depend on the UI. So these modules will be used -by `rope.ui` modules. - -""" diff --git a/pymode/libs3/rope/contrib/autoimport.py b/pymode/libs3/rope/contrib/autoimport.py deleted file mode 100644 index 4b7b5b05..00000000 --- a/pymode/libs3/rope/contrib/autoimport.py +++ /dev/null @@ -1,217 +0,0 @@ -import re - -from rope.base import (exceptions, pynames, resourceobserver, - taskhandle, pyobjects, builtins, resources) -from rope.refactor import importutils - - -class AutoImport(object): - """A class for finding the module that provides a name - - This class maintains a cache of global names in python modules. - Note that this cache is not accurate and might be out of date. - - """ - - def __init__(self, project, observe=True, underlined=False): - """Construct an AutoImport object - - If `observe` is `True`, listen for project changes and update - the cache. - - If `underlined` is `True`, underlined names are cached, too. - """ - self.project = project - self.underlined = underlined - self.names = project.data_files.read_data('globalnames') - if self.names is None: - self.names = {} - project.data_files.add_write_hook(self._write) - # XXX: using a filtered observer - observer = resourceobserver.ResourceObserver( - changed=self._changed, moved=self._moved, removed=self._removed) - if observe: - project.add_observer(observer) - - def import_assist(self, starting): - """Return a list of ``(name, module)`` tuples - - This function tries to find modules that have a global name - that starts with `starting`. - """ - # XXX: breaking if gave up! use generators - result = [] - for module in self.names: - for global_name in self.names[module]: - if global_name.startswith(starting): - result.append((global_name, module)) - return result - - def get_modules(self, name): - """Return the list of modules that have global `name`""" - result = [] - for module in self.names: - if name in self.names[module]: - result.append(module) - return result - - def get_all_names(self): - """Return the list of all cached global names""" - result = set() - for module in self.names: - result.update(set(self.names[module])) - return result - - def get_name_locations(self, name): - """Return a list of ``(resource, lineno)`` tuples""" - result = [] - pycore = self.project.pycore - for module in self.names: - if name in self.names[module]: - try: - pymodule = pycore.get_module(module) - if name in pymodule: - pyname = pymodule[name] - module, lineno = pyname.get_definition_location() - if module is not None: - resource = module.get_module().get_resource() - if resource is not None and lineno is not None: - result.append((resource, lineno)) - except exceptions.ModuleNotFoundError: - pass - return result - - def generate_cache(self, resources=None, underlined=None, - task_handle=taskhandle.NullTaskHandle()): - """Generate global name cache for project files - - If `resources` is a list of `rope.base.resource.File`\s, only - those files are searched; otherwise all python modules in the - project are cached. - - """ - if resources is None: - resources = self.project.pycore.get_python_files() - job_set = task_handle.create_jobset( - 'Generatig autoimport cache', len(resources)) - for file in resources: - job_set.started_job('Working on <%s>' % file.path) - self.update_resource(file, underlined) - job_set.finished_job() - - def generate_modules_cache(self, modules, underlined=None, - task_handle=taskhandle.NullTaskHandle()): - """Generate global name cache for modules listed in `modules`""" - job_set = task_handle.create_jobset( - 'Generatig autoimport cache for modules', len(modules)) - for modname in modules: - job_set.started_job('Working on <%s>' % modname) - if modname.endswith('.*'): - mod = self.project.pycore.find_module(modname[:-2]) - if mod: - for sub in submodules(mod): - self.update_resource(sub, underlined) - else: - self.update_module(modname, underlined) - job_set.finished_job() - - def clear_cache(self): - """Clear all entries in global-name cache - - It might be a good idea to use this function before - regenerating global names. - - """ - self.names.clear() - - def find_insertion_line(self, code): - """Guess at what line the new import should be inserted""" - match = re.search(r'^(def|class)\s+', code) - if match is not None: - code = code[:match.start()] - try: - pymodule = self.project.pycore.get_string_module(code) - except exceptions.ModuleSyntaxError: - return 1 - testmodname = '__rope_testmodule_rope' - importinfo = importutils.NormalImport(((testmodname, None),)) - module_imports = importutils.get_module_imports( - self.project.pycore, pymodule) - module_imports.add_import(importinfo) - code = module_imports.get_changed_source() - offset = code.index(testmodname) - lineno = code.count('\n', 0, offset) + 1 - return lineno - - def update_resource(self, resource, underlined=None): - """Update the cache for global names in `resource`""" - try: - pymodule = self.project.pycore.resource_to_pyobject(resource) - modname = self._module_name(resource) - self._add_names(pymodule, modname, underlined) - except exceptions.ModuleSyntaxError: - pass - - def update_module(self, modname, underlined=None): - """Update the cache for global names in `modname` module - - `modname` is the name of a module. - """ - try: - pymodule = self.project.pycore.get_module(modname) - self._add_names(pymodule, modname, underlined) - except exceptions.ModuleNotFoundError: - pass - - def _module_name(self, resource): - return self.project.pycore.modname(resource) - - def _add_names(self, pymodule, modname, underlined): - if underlined is None: - underlined = self.underlined - globals = [] - if isinstance(pymodule, pyobjects.PyDefinedObject): - attributes = pymodule._get_structural_attributes() - else: - attributes = pymodule.get_attributes() - for name, pyname in attributes.items(): - if not underlined and name.startswith('_'): - continue - if isinstance(pyname, (pynames.AssignedName, pynames.DefinedName)): - globals.append(name) - if isinstance(pymodule, builtins.BuiltinModule): - globals.append(name) - self.names[modname] = globals - - def _write(self): - self.project.data_files.write_data('globalnames', self.names) - - def _changed(self, resource): - if not resource.is_folder(): - self.update_resource(resource) - - def _moved(self, resource, newresource): - if not resource.is_folder(): - modname = self._module_name(resource) - if modname in self.names: - del self.names[modname] - self.update_resource(newresource) - - def _removed(self, resource): - if not resource.is_folder(): - modname = self._module_name(resource) - if modname in self.names: - del self.names[modname] - - -def submodules(mod): - if isinstance(mod, resources.File): - if mod.name.endswith('.py') and mod.name != '__init__.py': - return set([mod]) - return set() - if not mod.has_child('__init__.py'): - return set() - result = set([mod]) - for child in mod.get_children(): - result |= submodules(child) - return result diff --git a/pymode/libs3/rope/contrib/changestack.py b/pymode/libs3/rope/contrib/changestack.py deleted file mode 100644 index 70f2271f..00000000 --- a/pymode/libs3/rope/contrib/changestack.py +++ /dev/null @@ -1,52 +0,0 @@ -"""For performing many refactorings as a single command - -`changestack` module can be used to perform many refactorings on top -of each other as one bigger command. It can be used like:: - - stack = ChangeStack(project, 'my big command') - - #.. - stack.push(refactoring1.get_changes()) - #.. - stack.push(refactoring2.get_changes()) - #.. - stack.push(refactoringX.get_changes()) - - stack.pop_all() - changes = stack.merged() - -Now `changes` can be previewed or performed as before. -""" - -from rope.base import change - - -class ChangeStack(object): - - def __init__(self, project, description='merged changes'): - self.project = project - self.description = description - self.stack = [] - - def push(self, changes): - self.stack.append(changes) - self.project.do(changes) - - def pop_all(self): - for i in range(len(self.stack)): - self.project.history.undo(drop=True) - - def merged(self): - result = change.ChangeSet(self.description) - for changes in self.stack: - for c in self._basic_changes(changes): - result.add_change(c) - return result - - def _basic_changes(self, changes): - if isinstance(changes, change.ChangeSet): - for child in changes.changes: - for atom in self._basic_changes(child): - yield atom - else: - yield changes diff --git a/pymode/libs3/rope/contrib/codeassist.py b/pymode/libs3/rope/contrib/codeassist.py deleted file mode 100644 index 994f9fef..00000000 --- a/pymode/libs3/rope/contrib/codeassist.py +++ /dev/null @@ -1,648 +0,0 @@ -import keyword -import sys -import warnings - -import rope.base.codeanalyze -import rope.base.evaluate -from rope.base import pyobjects, pyobjectsdef, pynames, builtins, exceptions, worder -from rope.base.codeanalyze import SourceLinesAdapter -from rope.contrib import fixsyntax -from rope.refactor import functionutils - - -def code_assist(project, source_code, offset, resource=None, - templates=None, maxfixes=1, later_locals=True): - """Return python code completions as a list of `CodeAssistProposal`\s - - `resource` is a `rope.base.resources.Resource` object. If - provided, relative imports are handled. - - `maxfixes` is the maximum number of errors to fix if the code has - errors in it. - - If `later_locals` is `False` names defined in this scope and after - this line is ignored. - - """ - if templates is not None: - warnings.warn('Codeassist no longer supports templates', - DeprecationWarning, stacklevel=2) - assist = _PythonCodeAssist( - project, source_code, offset, resource=resource, - maxfixes=maxfixes, later_locals=later_locals) - return assist() - - -def starting_offset(source_code, offset): - """Return the offset in which the completion should be inserted - - Usually code assist proposals should be inserted like:: - - completion = proposal.name - result = (source_code[:starting_offset] + - completion + source_code[offset:]) - - Where starting_offset is the offset returned by this function. - - """ - word_finder = worder.Worder(source_code, True) - expression, starting, starting_offset = \ - word_finder.get_splitted_primary_before(offset) - return starting_offset - - -def get_doc(project, source_code, offset, resource=None, maxfixes=1): - """Get the pydoc""" - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is None: - return None - pyobject = pyname.get_object() - return PyDocExtractor().get_doc(pyobject) - - -def get_calltip(project, source_code, offset, resource=None, - maxfixes=1, ignore_unknown=False, remove_self=False): - """Get the calltip of a function - - The format of the returned string is - ``module_name.holding_scope_names.function_name(arguments)``. For - classes `__init__()` and for normal objects `__call__()` function - is used. - - Note that the offset is on the function itself *not* after the its - open parenthesis. (Actually it used to be the other way but it - was easily confused when string literals were involved. So I - decided it is better for it not to try to be too clever when it - cannot be clever enough). You can use a simple search like:: - - offset = source_code.rindex('(', 0, offset) - 1 - - to handle simple situations. - - If `ignore_unknown` is `True`, `None` is returned for functions - without source-code like builtins and extensions. - - If `remove_self` is `True`, the first parameter whose name is self - will be removed for methods. - """ - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is None: - return None - pyobject = pyname.get_object() - return PyDocExtractor().get_calltip(pyobject, ignore_unknown, remove_self) - - -def get_definition_location(project, source_code, offset, - resource=None, maxfixes=1): - """Return the definition location of the python name at `offset` - - Return a (`rope.base.resources.Resource`, lineno) tuple. If no - `resource` is given and the definition is inside the same module, - the first element of the returned tuple would be `None`. If the - location cannot be determined ``(None, None)`` is returned. - - """ - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is not None: - module, lineno = pyname.get_definition_location() - if module is not None: - return module.get_module().get_resource(), lineno - return (None, None) - - -def find_occurrences(*args, **kwds): - import rope.contrib.findit - warnings.warn('Use `rope.contrib.findit.find_occurrences()` instead', - DeprecationWarning, stacklevel=2) - return rope.contrib.findit.find_occurrences(*args, **kwds) - - -class CompletionProposal(object): - """A completion proposal - - The `scope` instance variable shows where proposed name came from - and can be 'global', 'local', 'builtin', 'attribute', 'keyword', - 'imported', 'parameter_keyword'. - - The `type` instance variable shows the approximate type of the - proposed object and can be 'instance', 'class', 'function', 'module', - and `None`. - - All possible relations between proposal's `scope` and `type` are shown - in the table below (different scopes in rows and types in columns): - - | instance | class | function | module | None - local | + | + | + | + | - global | + | + | + | + | - builtin | + | + | + | | - attribute | + | + | + | + | - imported | + | + | + | + | - keyword | | | | | + - parameter_keyword | | | | | + - - """ - - def __init__(self, name, scope, pyname=None): - self.name = name - self.pyname = pyname - self.scope = self._get_scope(scope) - - def __str__(self): - return '%s (%s, %s)' % (self.name, self.scope, self.type) - - def __repr__(self): - return str(self) - - @property - def parameters(self): - """The names of the parameters the function takes. - - Returns None if this completion is not a function. - """ - pyname = self.pyname - if isinstance(pyname, pynames.ImportedName): - pyname = pyname._get_imported_pyname() - if isinstance(pyname, pynames.DefinedName): - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - return pyobject.get_param_names() - - @property - def type(self): - pyname = self.pyname - if isinstance(pyname, builtins.BuiltinName): - pyobject = pyname.get_object() - if isinstance(pyobject, builtins.BuiltinFunction): - return 'function' - elif isinstance(pyobject, builtins.BuiltinClass): - clsobj = pyobject.builtin - return 'class' - elif isinstance(pyobject, builtins.BuiltinObject) or \ - isinstance(pyobject, builtins.BuiltinName): - return 'instance' - elif isinstance(pyname, pynames.ImportedModule): - return 'module' - elif isinstance(pyname, pynames.ImportedName) or \ - isinstance(pyname, pynames.DefinedName): - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - return 'function' - if isinstance(pyobject, pyobjects.AbstractClass): - return 'class' - return 'instance' - - def _get_scope(self, scope): - if isinstance(self.pyname, builtins.BuiltinName): - return 'builtin' - if isinstance(self.pyname, pynames.ImportedModule) or \ - isinstance(self.pyname, pynames.ImportedName): - return 'imported' - return scope - - def get_doc(self): - """Get the proposed object's docstring. - - Returns None if it can not be get. - """ - if not self.pyname: - return None - pyobject = self.pyname.get_object() - if not hasattr(pyobject, 'get_doc'): - return None - return self.pyname.get_object().get_doc() - - @property - def kind(self): - warnings.warn("the proposal's `kind` property is deprecated, " \ - "use `scope` instead") - return self.scope - - -# leaved for backward compatibility -CodeAssistProposal = CompletionProposal - - -class NamedParamProposal(CompletionProposal): - """A parameter keyword completion proposal - - Holds reference to ``_function`` -- the function which - parameter ``name`` belongs to. This allows to determine - default value for this parameter. - """ - def __init__(self, name, function): - self.argname = name - name = '%s=' % name - super(NamedParamProposal, self).__init__(name, 'parameter_keyword') - self._function = function - - def get_default(self): - """Get a string representation of a param's default value. - - Returns None if there is no default value for this param. - """ - definfo = functionutils.DefinitionInfo.read(self._function) - for arg, default in definfo.args_with_defaults: - if self.argname == arg: - return default - return None - - -def sorted_proposals(proposals, scopepref=None, typepref=None): - """Sort a list of proposals - - Return a sorted list of the given `CodeAssistProposal`\s. - - `scopepref` can be a list of proposal scopes. Defaults to - ``['parameter_keyword', 'local', 'global', 'imported', - 'attribute', 'builtin', 'keyword']``. - - `typepref` can be a list of proposal types. Defaults to - ``['class', 'function', 'instance', 'module', None]``. - (`None` stands for completions with no type like keywords.) - """ - sorter = _ProposalSorter(proposals, scopepref, typepref) - return sorter.get_sorted_proposal_list() - - -def starting_expression(source_code, offset): - """Return the expression to complete""" - word_finder = worder.Worder(source_code, True) - expression, starting, starting_offset = \ - word_finder.get_splitted_primary_before(offset) - if expression: - return expression + '.' + starting - return starting - - -def default_templates(): - warnings.warn('default_templates() is deprecated.', - DeprecationWarning, stacklevel=2) - return {} - - -class _PythonCodeAssist(object): - - def __init__(self, project, source_code, offset, resource=None, - maxfixes=1, later_locals=True): - self.project = project - self.pycore = self.project.pycore - self.code = source_code - self.resource = resource - self.maxfixes = maxfixes - self.later_locals = later_locals - self.word_finder = worder.Worder(source_code, True) - self.expression, self.starting, self.offset = \ - self.word_finder.get_splitted_primary_before(offset) - - keywords = keyword.kwlist - - def _find_starting_offset(self, source_code, offset): - current_offset = offset - 1 - while current_offset >= 0 and (source_code[current_offset].isalnum() or - source_code[current_offset] in '_'): - current_offset -= 1; - return current_offset + 1 - - def _matching_keywords(self, starting): - result = [] - for kw in self.keywords: - if kw.startswith(starting): - result.append(CompletionProposal(kw, 'keyword')) - return result - - def __call__(self): - if self.offset > len(self.code): - return [] - completions = list(self._code_completions().values()) - if self.expression.strip() == '' and self.starting.strip() != '': - completions.extend(self._matching_keywords(self.starting)) - return completions - - def _dotted_completions(self, module_scope, holding_scope): - result = {} - found_pyname = rope.base.evaluate.eval_str(holding_scope, - self.expression) - if found_pyname is not None: - element = found_pyname.get_object() - compl_scope = 'attribute' - if isinstance(element, (pyobjectsdef.PyModule, - pyobjectsdef.PyPackage)): - compl_scope = 'imported' - for name, pyname in element.get_attributes().items(): - if name.startswith(self.starting): - result[name] = CompletionProposal(name, compl_scope, pyname) - return result - - def _undotted_completions(self, scope, result, lineno=None): - if scope.parent != None: - self._undotted_completions(scope.parent, result) - if lineno is None: - names = scope.get_propagated_names() - else: - names = scope.get_names() - for name, pyname in names.items(): - if name.startswith(self.starting): - compl_scope = 'local' - if scope.get_kind() == 'Module': - compl_scope = 'global' - if lineno is None or self.later_locals or \ - not self._is_defined_after(scope, pyname, lineno): - result[name] = CompletionProposal(name, compl_scope, - pyname) - - def _from_import_completions(self, pymodule): - module_name = self.word_finder.get_from_module(self.offset) - if module_name is None: - return {} - pymodule = self._find_module(pymodule, module_name) - result = {} - for name in pymodule: - if name.startswith(self.starting): - result[name] = CompletionProposal(name, scope='global', - pyname=pymodule[name]) - return result - - def _find_module(self, pymodule, module_name): - dots = 0 - while module_name[dots] == '.': - dots += 1 - pyname = pynames.ImportedModule(pymodule, - module_name[dots:], dots) - return pyname.get_object() - - def _is_defined_after(self, scope, pyname, lineno): - location = pyname.get_definition_location() - if location is not None and location[1] is not None: - if location[0] == scope.pyobject.get_module() and \ - lineno <= location[1] <= scope.get_end(): - return True - - def _code_completions(self): - lineno = self.code.count('\n', 0, self.offset) + 1 - fixer = fixsyntax.FixSyntax(self.pycore, self.code, - self.resource, self.maxfixes) - pymodule = fixer.get_pymodule() - module_scope = pymodule.get_scope() - code = pymodule.source_code - lines = code.split('\n') - result = {} - start = fixsyntax._logical_start(lines, lineno) - indents = fixsyntax._get_line_indents(lines[start - 1]) - inner_scope = module_scope.get_inner_scope_for_line(start, indents) - if self.word_finder.is_a_name_after_from_import(self.offset): - return self._from_import_completions(pymodule) - if self.expression.strip() != '': - result.update(self._dotted_completions(module_scope, inner_scope)) - else: - result.update(self._keyword_parameters(module_scope.pyobject, - inner_scope)) - self._undotted_completions(inner_scope, result, lineno=lineno) - return result - - def _keyword_parameters(self, pymodule, scope): - offset = self.offset - if offset == 0: - return {} - word_finder = worder.Worder(self.code, True) - lines = SourceLinesAdapter(self.code) - lineno = lines.get_line_number(offset) - if word_finder.is_on_function_call_keyword(offset - 1): - name_finder = rope.base.evaluate.ScopeNameFinder(pymodule) - function_parens = word_finder.\ - find_parens_start_from_inside(offset - 1) - primary = word_finder.get_primary_at(function_parens - 1) - try: - function_pyname = rope.base.evaluate.\ - eval_str(scope, primary) - except exceptions.BadIdentifierError as e: - return {} - if function_pyname is not None: - pyobject = function_pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - pass - elif isinstance(pyobject, pyobjects.AbstractClass) and \ - '__init__' in pyobject: - pyobject = pyobject['__init__'].get_object() - elif '__call__' in pyobject: - pyobject = pyobject['__call__'].get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - param_names = [] - param_names.extend( - pyobject.get_param_names(special_args=False)) - result = {} - for name in param_names: - if name.startswith(self.starting): - result[name + '='] = NamedParamProposal( - name, pyobject - ) - return result - return {} - - -class _ProposalSorter(object): - """Sort a list of code assist proposals""" - - def __init__(self, code_assist_proposals, scopepref=None, typepref=None): - self.proposals = code_assist_proposals - if scopepref is None: - scopepref = ['parameter_keyword', 'local', 'global', 'imported', - 'attribute', 'builtin', 'keyword'] - self.scopepref = scopepref - if typepref is None: - typepref = ['class', 'function', 'instance', 'module', None] - self.typerank = dict((type, index) - for index, type in enumerate(typepref)) - - def get_sorted_proposal_list(self): - """Return a list of `CodeAssistProposal`""" - proposals = {} - for proposal in self.proposals: - proposals.setdefault(proposal.scope, []).append(proposal) - result = [] - for scope in self.scopepref: - scope_proposals = proposals.get(scope, []) - scope_proposals = [proposal for proposal in scope_proposals - if proposal.type in self.typerank] - scope_proposals.sort(key = self._proposal_cmp) - result.extend(scope_proposals) - return result - - def _proposal_cmp(self, proposal): - def underline_count(name): - result = 0 - while result < len(name) and name[result] == '_': - result += 1 - return result - return (self.typerank.get(proposal.type, 100), underline_count(proposal.name), proposal.name) - - def _compare_underlined_names(self, name1, name2): - def underline_count(name): - result = 0 - while result < len(name) and name[result] == '_': - result += 1 - return result - underline_count1 = underline_count(name1) - underline_count2 = underline_count(name2) - if underline_count1 != underline_count2: - return cmp(underline_count1, underline_count2) - return cmp(name1, name2) - - -class PyDocExtractor(object): - - def get_doc(self, pyobject): - if isinstance(pyobject, pyobjects.AbstractFunction): - return self._get_function_docstring(pyobject) - elif isinstance(pyobject, pyobjects.AbstractClass): - return self._get_class_docstring(pyobject) - elif isinstance(pyobject, pyobjects.AbstractModule): - return self._trim_docstring(pyobject.get_doc()) - return None - - def get_calltip(self, pyobject, ignore_unknown=False, remove_self=False): - try: - if isinstance(pyobject, pyobjects.AbstractClass): - pyobject = pyobject['__init__'].get_object() - if not isinstance(pyobject, pyobjects.AbstractFunction): - pyobject = pyobject['__call__'].get_object() - except exceptions.AttributeNotFoundError: - return None - if ignore_unknown and not isinstance(pyobject, pyobjects.PyFunction): - return - if isinstance(pyobject, pyobjects.AbstractFunction): - result = self._get_function_signature(pyobject, add_module=True) - if remove_self and self._is_method(pyobject): - return result.replace('(self)', '()').replace('(self, ', '(') - return result - - def _get_class_docstring(self, pyclass): - contents = self._trim_docstring(pyclass.get_doc(), 2) - supers = [super.get_name() for super in pyclass.get_superclasses()] - doc = 'class %s(%s):\n\n' % (pyclass.get_name(), ', '.join(supers)) + contents - - if '__init__' in pyclass: - init = pyclass['__init__'].get_object() - if isinstance(init, pyobjects.AbstractFunction): - doc += '\n\n' + self._get_single_function_docstring(init) - return doc - - def _get_function_docstring(self, pyfunction): - functions = [pyfunction] - if self._is_method(pyfunction): - functions.extend(self._get_super_methods(pyfunction.parent, - pyfunction.get_name())) - return '\n\n'.join([self._get_single_function_docstring(function) - for function in functions]) - - def _is_method(self, pyfunction): - return isinstance(pyfunction, pyobjects.PyFunction) and \ - isinstance(pyfunction.parent, pyobjects.PyClass) - - def _get_single_function_docstring(self, pyfunction): - signature = self._get_function_signature(pyfunction) - docs = self._trim_docstring(pyfunction.get_doc(), indents=2) - return signature + ':\n\n' + docs - - def _get_super_methods(self, pyclass, name): - result = [] - for super_class in pyclass.get_superclasses(): - if name in super_class: - function = super_class[name].get_object() - if isinstance(function, pyobjects.AbstractFunction): - result.append(function) - result.extend(self._get_super_methods(super_class, name)) - return result - - def _get_function_signature(self, pyfunction, add_module=False): - location = self._location(pyfunction, add_module) - if isinstance(pyfunction, pyobjects.PyFunction): - info = functionutils.DefinitionInfo.read(pyfunction) - return location + info.to_string() - else: - return '%s(%s)' % (location + pyfunction.get_name(), - ', '.join(pyfunction.get_param_names())) - - def _location(self, pyobject, add_module=False): - location = [] - parent = pyobject.parent - while parent and not isinstance(parent, pyobjects.AbstractModule): - location.append(parent.get_name()) - location.append('.') - parent = parent.parent - if add_module: - if isinstance(pyobject, pyobjects.PyFunction): - module = pyobject.get_module() - location.insert(0, self._get_module(pyobject)) - if isinstance(parent, builtins.BuiltinModule): - location.insert(0, parent.get_name() + '.') - return ''.join(location) - - def _get_module(self, pyfunction): - module = pyfunction.get_module() - if module is not None: - resource = module.get_resource() - if resource is not None: - return pyfunction.pycore.modname(resource) + '.' - return '' - - def _trim_docstring(self, docstring, indents=0): - """The sample code from :PEP:`257`""" - if not docstring: - return '' - # Convert tabs to spaces (following normal Python rules) - # and split into a list of lines: - lines = docstring.expandtabs().splitlines() - # Determine minimum indentation (first line doesn't count): - indent = sys.maxsize - for line in lines[1:]: - stripped = line.lstrip() - if stripped: - indent = min(indent, len(line) - len(stripped)) - # Remove indentation (first line is special): - trimmed = [lines[0].strip()] - if indent < sys.maxsize: - for line in lines[1:]: - trimmed.append(line[indent:].rstrip()) - # Strip off trailing and leading blank lines: - while trimmed and not trimmed[-1]: - trimmed.pop() - while trimmed and not trimmed[0]: - trimmed.pop(0) - # Return a single string: - return '\n'.join((' ' * indents + line for line in trimmed)) - - -# Deprecated classes - -class TemplateProposal(CodeAssistProposal): - def __init__(self, name, template): - warnings.warn('TemplateProposal is deprecated.', - DeprecationWarning, stacklevel=2) - super(TemplateProposal, self).__init__(name, 'template') - self.template = template - - -class Template(object): - - def __init__(self, template): - self.template = template - warnings.warn('Template is deprecated.', - DeprecationWarning, stacklevel=2) - - def variables(self): - return [] - - def substitute(self, mapping): - return self.template - - def get_cursor_location(self, mapping): - return len(self.template) diff --git a/pymode/libs3/rope/contrib/finderrors.py b/pymode/libs3/rope/contrib/finderrors.py deleted file mode 100644 index c8cf7e15..00000000 --- a/pymode/libs3/rope/contrib/finderrors.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Finding bad name and attribute accesses - -`find_errors` function can be used to find possible bad name and -attribute accesses. As an example:: - - errors = find_errors(project, project.get_resource('mod.py')) - for error in errors: - print '%s: %s' % (error.lineno, error.error) - -prints possible errors for ``mod.py`` file. - -TODO: - -* use task handles -* reporting names at most once -* attributes of extension modules that don't appear in - extension_modules project config can be ignored -* not calling `PyScope.get_inner_scope_for_line()` if it is a - bottleneck; needs profiling -* not reporting occurrences where rope cannot infer the object -* rope saves multiple objects for some of the names in its objectdb - use all of them not to give false positives -* ... ;-) - -""" -from rope.base import ast, evaluate, pyobjects - - -def find_errors(project, resource): - """Find possible bad name and attribute accesses - - It returns a list of `Error`\s. - """ - pymodule = project.pycore.resource_to_pyobject(resource) - finder = _BadAccessFinder(pymodule) - ast.walk(pymodule.get_ast(), finder) - return finder.errors - - -class _BadAccessFinder(object): - - def __init__(self, pymodule): - self.pymodule = pymodule - self.scope = pymodule.get_scope() - self.errors = [] - - def _Name(self, node): - if isinstance(node.ctx, (ast.Store, ast.Param)): - return - scope = self.scope.get_inner_scope_for_line(node.lineno) - pyname = scope.lookup(node.id) - if pyname is None: - self._add_error(node, 'Unresolved variable') - elif self._is_defined_after(scope, pyname, node.lineno): - self._add_error(node, 'Defined later') - - def _Attribute(self, node): - if not isinstance(node.ctx, ast.Store): - scope = self.scope.get_inner_scope_for_line(node.lineno) - pyname = evaluate.eval_node(scope, node.value) - if pyname is not None and \ - pyname.get_object() != pyobjects.get_unknown(): - if node.attr not in pyname.get_object(): - self._add_error(node, 'Unresolved attribute') - ast.walk(node.value, self) - - def _add_error(self, node, msg): - if isinstance(node, ast.Attribute): - name = node.attr - else: - name = node.id - if name != 'None': - error = Error(node.lineno, msg + ' ' + name) - self.errors.append(error) - - def _is_defined_after(self, scope, pyname, lineno): - location = pyname.get_definition_location() - if location is not None and location[1] is not None: - if location[0] == self.pymodule and \ - lineno <= location[1] <= scope.get_end(): - return True - - -class Error(object): - - def __init__(self, lineno, error): - self.lineno = lineno - self.error = error - - def __str__(self): - return '%s: %s' % (self.lineno, self.error) diff --git a/pymode/libs3/rope/contrib/findit.py b/pymode/libs3/rope/contrib/findit.py deleted file mode 100644 index e8ddd7e5..00000000 --- a/pymode/libs3/rope/contrib/findit.py +++ /dev/null @@ -1,110 +0,0 @@ -import rope.base.codeanalyze -import rope.base.evaluate -import rope.base.pyobjects -from rope.base import taskhandle, exceptions, worder -from rope.contrib import fixsyntax -from rope.refactor import occurrences - - -def find_occurrences(project, resource, offset, unsure=False, resources=None, - in_hierarchy=False, task_handle=taskhandle.NullTaskHandle()): - """Return a list of `Location`\s - - If `unsure` is `True`, possible matches are returned, too. You - can use `Location.unsure` to see which are unsure occurrences. - `resources` can be a list of `rope.base.resource.File`\s that - should be searched for occurrences; if `None` all python files - in the project are searched. - - """ - name = worder.get_name_at(resource, offset) - this_pymodule = project.pycore.resource_to_pyobject(resource) - primary, pyname = rope.base.evaluate.eval_location2( - this_pymodule, offset) - def is_match(occurrence): - return unsure - finder = occurrences.create_finder( - project.pycore, name, pyname, unsure=is_match, - in_hierarchy=in_hierarchy, instance=primary) - if resources is None: - resources = project.pycore.get_python_files() - job_set = task_handle.create_jobset('Finding Occurrences', - count=len(resources)) - return _find_locations(finder, resources, job_set) - - -def find_implementations(project, resource, offset, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Find the places a given method is overridden. - - Finds the places a method is implemented. Returns a list of - `Location`\s. - """ - name = worder.get_name_at(resource, offset) - this_pymodule = project.pycore.resource_to_pyobject(resource) - pyname = rope.base.evaluate.eval_location(this_pymodule, offset) - if pyname is not None: - pyobject = pyname.get_object() - if not isinstance(pyobject, rope.base.pyobjects.PyFunction) or \ - pyobject.get_kind() != 'method': - raise exceptions.BadIdentifierError('Not a method!') - else: - raise exceptions.BadIdentifierError('Cannot resolve the identifier!') - def is_defined(occurrence): - if not occurrence.is_defined(): - return False - def not_self(occurrence): - if occurrence.get_pyname().get_object() == pyname.get_object(): - return False - filters = [is_defined, not_self, - occurrences.InHierarchyFilter(pyname, True)] - finder = occurrences.Finder(project.pycore, name, filters=filters) - if resources is None: - resources = project.pycore.get_python_files() - job_set = task_handle.create_jobset('Finding Implementations', - count=len(resources)) - return _find_locations(finder, resources, job_set) - - -def find_definition(project, code, offset, resource=None, maxfixes=1): - """Return the definition location of the python name at `offset` - - A `Location` object is returned if the definition location can be - determined, otherwise ``None`` is returned. - """ - fixer = fixsyntax.FixSyntax(project.pycore, code, resource, maxfixes) - main_module = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is not None: - module, lineno = pyname.get_definition_location() - name = rope.base.worder.Worder(code).get_word_at(offset) - if lineno is not None: - start = module.lines.get_line_start(lineno) - def check_offset(occurrence): - if occurrence.offset < start: - return False - pyname_filter = occurrences.PyNameFilter(pyname) - finder = occurrences.Finder(project.pycore, name, - [check_offset, pyname_filter]) - for occurrence in finder.find_occurrences(pymodule=module): - return Location(occurrence) - - -class Location(object): - - def __init__(self, occurrence): - self.resource = occurrence.resource - self.region = occurrence.get_word_range() - self.offset = self.region[0] - self.unsure = occurrence.is_unsure() - self.lineno = occurrence.lineno - - -def _find_locations(finder, resources, job_set): - result = [] - for resource in resources: - job_set.started_job(resource.path) - for occurrence in finder.find_occurrences(resource): - result.append(Location(occurrence)) - job_set.finished_job() - return result diff --git a/pymode/libs3/rope/contrib/fixmodnames.py b/pymode/libs3/rope/contrib/fixmodnames.py deleted file mode 100644 index 7092f131..00000000 --- a/pymode/libs3/rope/contrib/fixmodnames.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Fix the name of modules - -This module is useful when you want to rename many of the modules in -your project. That can happen specially when you want to change their -naming style. - -For instance:: - - fixer = FixModuleNames(project) - changes = fixer.get_changes(fixer=str.lower) - project.do(changes) - -Here it renames all modules and packages to use lower-cased chars. -You can tell it to use any other style by using the ``fixer`` -argument. - -""" -from rope.base import change, taskhandle -from rope.contrib import changestack -from rope.refactor import rename - - -class FixModuleNames(object): - - def __init__(self, project): - self.project = project - - def get_changes(self, fixer=str.lower, - task_handle=taskhandle.NullTaskHandle()): - """Fix module names - - `fixer` is a function that takes and returns a `str`. Given - the name of a module, it should return the fixed name. - - """ - stack = changestack.ChangeStack(self.project, 'Fixing module names') - jobset = task_handle.create_jobset('Fixing module names', - self._count_fixes(fixer) + 1) - try: - while True: - for resource in self._tobe_fixed(fixer): - jobset.started_job(resource.path) - renamer = rename.Rename(self.project, resource) - changes = renamer.get_changes(fixer(self._name(resource))) - stack.push(changes) - jobset.finished_job() - break - else: - break - finally: - jobset.started_job('Reverting to original state') - stack.pop_all() - jobset.finished_job() - return stack.merged() - - def _count_fixes(self, fixer): - return len(list(self._tobe_fixed(fixer))) - - def _tobe_fixed(self, fixer): - for resource in self.project.pycore.get_python_files(): - modname = self._name(resource) - if modname != fixer(modname): - yield resource - - def _name(self, resource): - modname = resource.name.rsplit('.', 1)[0] - if modname == '__init__': - modname = resource.parent.name - return modname diff --git a/pymode/libs3/rope/contrib/fixsyntax.py b/pymode/libs3/rope/contrib/fixsyntax.py deleted file mode 100644 index f7667e92..00000000 --- a/pymode/libs3/rope/contrib/fixsyntax.py +++ /dev/null @@ -1,178 +0,0 @@ -import rope.base.codeanalyze -import rope.base.evaluate -from rope.base import worder, exceptions, utils -from rope.base.codeanalyze import ArrayLinesAdapter, LogicalLineFinder - - -class FixSyntax(object): - - def __init__(self, pycore, code, resource, maxfixes=1): - self.pycore = pycore - self.code = code - self.resource = resource - self.maxfixes = maxfixes - - @utils.saveit - def get_pymodule(self): - """Get a `PyModule`""" - errors = [] - code = self.code - tries = 0 - while True: - try: - if tries == 0 and self.resource is not None and \ - self.resource.read() == code: - return self.pycore.resource_to_pyobject(self.resource, - force_errors=True) - return self.pycore.get_string_module( - code, resource=self.resource, force_errors=True) - except exceptions.ModuleSyntaxError as e: - if tries < self.maxfixes: - tries += 1 - self.commenter.comment(e.lineno) - code = '\n'.join(self.commenter.lines) - errors.append(' * line %s: %s ... fixed' % (e.lineno, - e.message_)) - else: - errors.append(' * line %s: %s ... raised!' % (e.lineno, - e.message_)) - new_message = ('\nSyntax errors in file %s:\n' % e.filename) \ - + '\n'.join(errors) - raise exceptions.ModuleSyntaxError(e.filename, e.lineno, - new_message) - - @property - @utils.saveit - def commenter(self): - return _Commenter(self.code) - - def pyname_at(self, offset): - pymodule = self.get_pymodule() - def old_pyname(): - word_finder = worder.Worder(self.code, True) - expression = word_finder.get_primary_at(offset) - expression = expression.replace('\\\n', ' ').replace('\n', ' ') - lineno = self.code.count('\n', 0, offset) - scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - return rope.base.evaluate.eval_str(scope, expression) - new_code = pymodule.source_code - def new_pyname(): - newoffset = self.commenter.transfered_offset(offset) - return rope.base.evaluate.eval_location(pymodule, newoffset) - if new_code.startswith(self.code[:offset + 1]): - return new_pyname() - result = old_pyname() - if result is None: - return new_pyname() - return result - - -class _Commenter(object): - - def __init__(self, code): - self.code = code - self.lines = self.code.split('\n') - self.lines.append('\n') - self.origs = list(range(len(self.lines) + 1)) - self.diffs = [0] * (len(self.lines) + 1) - - def comment(self, lineno): - start = _logical_start(self.lines, lineno, check_prev=True) - 1 - # using self._get_stmt_end() instead of self._get_block_end() - # to lower commented lines - end = self._get_stmt_end(start) - indents = _get_line_indents(self.lines[start]) - if 0 < start: - last_lineno = self._last_non_blank(start - 1) - last_line = self.lines[last_lineno] - if last_line.rstrip().endswith(':'): - indents = _get_line_indents(last_line) + 4 - self._set(start, ' ' * indents + 'pass') - for line in range(start + 1, end + 1): - self._set(line, self.lines[start]) - self._fix_incomplete_try_blocks(lineno, indents) - - def transfered_offset(self, offset): - lineno = self.code.count('\n', 0, offset) - diff = sum(self.diffs[:lineno]) - return offset + diff - - def _last_non_blank(self, start): - while start > 0 and self.lines[start].strip() == '': - start -= 1 - return start - - def _get_block_end(self, lineno): - end_line = lineno - base_indents = _get_line_indents(self.lines[lineno]) - for i in range(lineno + 1, len(self.lines)): - if _get_line_indents(self.lines[i]) >= base_indents: - end_line = i - else: - break - return end_line - - def _get_stmt_end(self, lineno): - end_line = lineno - base_indents = _get_line_indents(self.lines[lineno]) - for i in range(lineno + 1, len(self.lines)): - if _get_line_indents(self.lines[i]) <= base_indents: - return i - 1 - return lineno - - def _fix_incomplete_try_blocks(self, lineno, indents): - block_start = lineno - last_indents = current_indents = indents - while block_start > 0: - block_start = rope.base.codeanalyze.get_block_start( - ArrayLinesAdapter(self.lines), block_start) - 1 - if self.lines[block_start].strip().startswith('try:'): - indents = _get_line_indents(self.lines[block_start]) - if indents > last_indents: - continue - last_indents = indents - block_end = self._find_matching_deindent(block_start) - line = self.lines[block_end].strip() - if not (line.startswith('finally:') or - line.startswith('except ') or - line.startswith('except:')): - self._insert(block_end, ' ' * indents + 'finally:') - self._insert(block_end + 1, ' ' * indents + ' pass') - - def _find_matching_deindent(self, line_number): - indents = _get_line_indents(self.lines[line_number]) - current_line = line_number + 1 - while current_line < len(self.lines): - line = self.lines[current_line] - if not line.strip().startswith('#') and not line.strip() == '': - # HACK: We should have used logical lines here - if _get_line_indents(self.lines[current_line]) <= indents: - return current_line - current_line += 1 - return len(self.lines) - 1 - - def _set(self, lineno, line): - self.diffs[self.origs[lineno]] += len(line) - len(self.lines[lineno]) - self.lines[lineno] = line - - def _insert(self, lineno, line): - self.diffs[self.origs[lineno]] += len(line) + 1 - self.origs.insert(lineno, self.origs[lineno]) - self.lines.insert(lineno, line) - -def _logical_start(lines, lineno, check_prev=False): - logical_finder = LogicalLineFinder(ArrayLinesAdapter(lines)) - if check_prev: - prev = lineno - 1 - while prev > 0: - start, end = logical_finder.logical_line_in(prev) - if end is None or start <= lineno < end: - return start - if start <= prev: - break - prev -= 1 - return logical_finder.logical_line_in(lineno)[0] - - -def _get_line_indents(line): - return rope.base.codeanalyze.count_line_indents(line) diff --git a/pymode/libs3/rope/contrib/generate.py b/pymode/libs3/rope/contrib/generate.py deleted file mode 100644 index 4d850da0..00000000 --- a/pymode/libs3/rope/contrib/generate.py +++ /dev/null @@ -1,355 +0,0 @@ -import rope.base.evaluate -from rope.base import change, pyobjects, exceptions, pynames, worder, codeanalyze -from rope.refactor import sourceutils, importutils, functionutils, suites - - -def create_generate(kind, project, resource, offset): - """A factory for creating `Generate` objects - - `kind` can be 'variable', 'function', 'class', 'module' or - 'package'. - - """ - generate = eval('Generate' + kind.title()) - return generate(project, resource, offset) - - -def create_module(project, name, sourcefolder=None): - """Creates a module and returns a `rope.base.resources.File`""" - if sourcefolder is None: - sourcefolder = project.root - packages = name.split('.') - parent = sourcefolder - for package in packages[:-1]: - parent = parent.get_child(package) - return parent.create_file(packages[-1] + '.py') - -def create_package(project, name, sourcefolder=None): - """Creates a package and returns a `rope.base.resources.Folder`""" - if sourcefolder is None: - sourcefolder = project.root - packages = name.split('.') - parent = sourcefolder - for package in packages[:-1]: - parent = parent.get_child(package) - made_packages = parent.create_folder(packages[-1]) - made_packages.create_file('__init__.py') - return made_packages - - -class _Generate(object): - - def __init__(self, project, resource, offset): - self.project = project - self.resource = resource - self.info = self._generate_info(project, resource, offset) - self.name = self.info.get_name() - self._check_exceptional_conditions() - - def _generate_info(self, project, resource, offset): - return _GenerationInfo(project.pycore, resource, offset) - - def _check_exceptional_conditions(self): - if self.info.element_already_exists(): - raise exceptions.RefactoringError( - 'Element <%s> already exists.' % self.name) - if not self.info.primary_is_found(): - raise exceptions.RefactoringError( - 'Cannot determine the scope <%s> should be defined in.' % self.name) - - def get_changes(self): - changes = change.ChangeSet('Generate %s <%s>' % - (self._get_element_kind(), self.name)) - indents = self.info.get_scope_indents() - blanks = self.info.get_blank_lines() - base_definition = sourceutils.fix_indentation(self._get_element(), indents) - definition = '\n' * blanks[0] + base_definition + '\n' * blanks[1] - - resource = self.info.get_insertion_resource() - start, end = self.info.get_insertion_offsets() - - collector = codeanalyze.ChangeCollector(resource.read()) - collector.add_change(start, end, definition) - changes.add_change(change.ChangeContents( - resource, collector.get_changed())) - return changes - - def get_location(self): - return (self.info.get_insertion_resource(), - self.info.get_insertion_lineno()) - - def _get_element_kind(self): - raise NotImplementedError() - - def _get_element(self): - raise NotImplementedError() - - -class GenerateFunction(_Generate): - - def _generate_info(self, project, resource, offset): - return _FunctionGenerationInfo(project.pycore, resource, offset) - - def _get_element(self): - decorator = '' - args = [] - if self.info.is_static_method(): - decorator = '@staticmethod\n' - if self.info.is_method() or self.info.is_constructor() or \ - self.info.is_instance(): - args.append('self') - args.extend(self.info.get_passed_args()) - definition = '%sdef %s(%s):\n pass\n' % (decorator, self.name, - ', '.join(args)) - return definition - - def _get_element_kind(self): - return 'Function' - - -class GenerateVariable(_Generate): - - def _get_element(self): - return '%s = None\n' % self.name - - def _get_element_kind(self): - return 'Variable' - - -class GenerateClass(_Generate): - - def _get_element(self): - return 'class %s(object):\n pass\n' % self.name - - def _get_element_kind(self): - return 'Class' - - -class GenerateModule(_Generate): - - def get_changes(self): - package = self.info.get_package() - changes = change.ChangeSet('Generate Module <%s>' % self.name) - new_resource = self.project.get_file('%s/%s.py' % (package.path, self.name)) - if new_resource.exists(): - raise exceptions.RefactoringError( - 'Module <%s> already exists' % new_resource.path) - changes.add_change(change.CreateResource(new_resource)) - changes.add_change(_add_import_to_module( - self.project.pycore, self.resource, new_resource)) - return changes - - def get_location(self): - package = self.info.get_package() - return (package.get_child('%s.py' % self.name) , 1) - - -class GeneratePackage(_Generate): - - def get_changes(self): - package = self.info.get_package() - changes = change.ChangeSet('Generate Package <%s>' % self.name) - new_resource = self.project.get_folder('%s/%s' % (package.path, self.name)) - if new_resource.exists(): - raise exceptions.RefactoringError( - 'Package <%s> already exists' % new_resource.path) - changes.add_change(change.CreateResource(new_resource)) - changes.add_change(_add_import_to_module( - self.project.pycore, self.resource, new_resource)) - child = self.project.get_folder(package.path + '/' + self.name) - changes.add_change(change.CreateFile(child, '__init__.py')) - return changes - - def get_location(self): - package = self.info.get_package() - child = package.get_child(self.name) - return (child.get_child('__init__.py') , 1) - - -def _add_import_to_module(pycore, resource, imported): - pymodule = pycore.resource_to_pyobject(resource) - import_tools = importutils.ImportTools(pycore) - module_imports = import_tools.module_imports(pymodule) - module_name = pycore.modname(imported) - new_import = importutils.NormalImport(((module_name, None), )) - module_imports.add_import(new_import) - return change.ChangeContents(resource, module_imports.get_changed_source()) - - -class _GenerationInfo(object): - - def __init__(self, pycore, resource, offset): - self.pycore = pycore - self.resource = resource - self.offset = offset - self.source_pymodule = self.pycore.resource_to_pyobject(resource) - finder = rope.base.evaluate.ScopeNameFinder(self.source_pymodule) - self.primary, self.pyname = finder.get_primary_and_pyname_at(offset) - self._init_fields() - - def _init_fields(self): - self.source_scope = self._get_source_scope() - self.goal_scope = self._get_goal_scope() - self.goal_pymodule = self._get_goal_module(self.goal_scope) - - def _get_goal_scope(self): - if self.primary is None: - return self._get_source_scope() - pyobject = self.primary.get_object() - if isinstance(pyobject, pyobjects.PyDefinedObject): - return pyobject.get_scope() - elif isinstance(pyobject.get_type(), pyobjects.PyClass): - return pyobject.get_type().get_scope() - - def _get_goal_module(self, scope): - if scope is None: - return - while scope.parent is not None: - scope = scope.parent - return scope.pyobject - - def _get_source_scope(self): - module_scope = self.source_pymodule.get_scope() - lineno = self.source_pymodule.lines.get_line_number(self.offset) - return module_scope.get_inner_scope_for_line(lineno) - - def get_insertion_lineno(self): - lines = self.goal_pymodule.lines - if self.goal_scope == self.source_scope: - line_finder = self.goal_pymodule.logical_lines - lineno = lines.get_line_number(self.offset) - lineno = line_finder.logical_line_in(lineno)[0] - root = suites.ast_suite_tree(self.goal_scope.pyobject.get_ast()) - suite = root.find_suite(lineno) - indents = sourceutils.get_indents(lines, lineno) - while self.get_scope_indents() < indents: - lineno = suite.get_start() - indents = sourceutils.get_indents(lines, lineno) - suite = suite.parent - return lineno - else: - return min(self.goal_scope.get_end() + 1, lines.length()) - - def get_insertion_resource(self): - return self.goal_pymodule.get_resource() - - def get_insertion_offsets(self): - if self.goal_scope.get_kind() == 'Class': - start, end = sourceutils.get_body_region(self.goal_scope.pyobject) - if self.goal_pymodule.source_code[start:end].strip() == 'pass': - return start, end - lines = self.goal_pymodule.lines - start = lines.get_line_start(self.get_insertion_lineno()) - return (start, start) - - def get_scope_indents(self): - if self.goal_scope.get_kind() == 'Module': - return 0 - return sourceutils.get_indents(self.goal_pymodule.lines, - self.goal_scope.get_start()) + 4 - - def get_blank_lines(self): - if self.goal_scope.get_kind() == 'Module': - base_blanks = 2 - if self.goal_pymodule.source_code.strip() == '': - base_blanks = 0 - if self.goal_scope.get_kind() == 'Class': - base_blanks = 1 - if self.goal_scope.get_kind() == 'Function': - base_blanks = 0 - if self.goal_scope == self.source_scope: - return (0, base_blanks) - return (base_blanks, 0) - - def get_package(self): - primary = self.primary - if self.primary is None: - return self.pycore.get_source_folders()[0] - if isinstance(primary.get_object(), pyobjects.PyPackage): - return primary.get_object().get_resource() - raise exceptions.RefactoringError( - 'A module/package can be only created in a package.') - - def primary_is_found(self): - return self.goal_scope is not None - - def element_already_exists(self): - if self.pyname is None or isinstance(self.pyname, pynames.UnboundName): - return False - return self.get_name() in self.goal_scope.get_defined_names() - - def get_name(self): - return worder.get_name_at(self.resource, self.offset) - - -class _FunctionGenerationInfo(_GenerationInfo): - - def _get_goal_scope(self): - if self.is_constructor(): - return self.pyname.get_object().get_scope() - if self.is_instance(): - return self.pyname.get_object().get_type().get_scope() - if self.primary is None: - return self._get_source_scope() - pyobject = self.primary.get_object() - if isinstance(pyobject, pyobjects.PyDefinedObject): - return pyobject.get_scope() - elif isinstance(pyobject.get_type(), pyobjects.PyClass): - return pyobject.get_type().get_scope() - - def element_already_exists(self): - if self.pyname is None or isinstance(self.pyname, pynames.UnboundName): - return False - return self.get_name() in self.goal_scope.get_defined_names() - - def is_static_method(self): - return self.primary is not None and \ - isinstance(self.primary.get_object(), pyobjects.PyClass) - - def is_method(self): - return self.primary is not None and \ - isinstance(self.primary.get_object().get_type(), pyobjects.PyClass) - - def is_constructor(self): - return self.pyname is not None and \ - isinstance(self.pyname.get_object(), pyobjects.PyClass) - - def is_instance(self): - if self.pyname is None: - return False - pyobject = self.pyname.get_object() - return isinstance(pyobject.get_type(), pyobjects.PyClass) - - def get_name(self): - if self.is_constructor(): - return '__init__' - if self.is_instance(): - return '__call__' - return worder.get_name_at(self.resource, self.offset) - - def get_passed_args(self): - result = [] - source = self.source_pymodule.source_code - finder = worder.Worder(source) - if finder.is_a_function_being_called(self.offset): - start, end = finder.get_primary_range(self.offset) - parens_start, parens_end = finder.get_word_parens_range(end - 1) - call = source[start:parens_end] - parser = functionutils._FunctionParser(call, False) - args, keywords = parser.get_parameters() - for arg in args: - if self._is_id(arg): - result.append(arg) - else: - result.append('arg%d' % len(result)) - for name, value in keywords: - result.append(name) - return result - - def _is_id(self, arg): - def id_or_underline(c): - return c.isalpha() or c == '_' - for c in arg: - if not id_or_underline(c) and not c.isdigit(): - return False - return id_or_underline(arg[0]) diff --git a/pymode/libs3/rope/refactor/__init__.py b/pymode/libs3/rope/refactor/__init__.py deleted file mode 100644 index 10d734c3..00000000 --- a/pymode/libs3/rope/refactor/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -"""rope refactor package - -This package contains modules that perform python refactorings. -Refactoring classes perform refactorings in 4 steps: - -1. Collect some data for performing the refactoring and use them - to construct a refactoring class. Like:: - - renamer = Rename(project, resource, offset) - -2. Some refactorings give you useful information about the - refactoring after their construction. Like:: - - print(renamer.get_old_name()) - -3. Give the refactoring class more information about how to - perform the refactoring and get the changes this refactoring is - going to make. This is done by calling `get_changes` method of the - refactoring class. Like:: - - changes = renamer.get_changes(new_name) - -4. You can commit the changes. Like:: - - project.do(changes) - -These steps are like the steps IDEs usually do for performing a -refactoring. These are the things an IDE does in each step: - -1. Construct a refactoring object by giving it information like - resource, offset and ... . Some of the refactoring problems (like - performing rename refactoring on language keywords) can be reported - here. -2. Print some information about the refactoring and ask the user - about the information that are necessary for completing the - refactoring (like new name). -3. Call the `get_changes` by passing it information asked from - the user (if necessary) and get and preview the changes returned by - it. -4. perform the refactoring. - -From ``0.5m5`` release the `get_changes()` method of some time- -consuming refactorings take an optional `rope.base.taskhandle. -TaskHandle` parameter. You can use this object for stopping or -monitoring the progress of refactorings. - -""" -from rope.refactor.importutils import ImportOrganizer -from rope.refactor.topackage import ModuleToPackage - - -__all__ = ['rename', 'move', 'inline', 'extract', 'restructure', 'topackage', - 'importutils', 'usefunction', 'change_signature', - 'encapsulate_field', 'introduce_factory', 'introduce_parameter', - 'localtofield', 'method_object', 'multiproject'] diff --git a/pymode/libs3/rope/refactor/change_signature.py b/pymode/libs3/rope/refactor/change_signature.py deleted file mode 100644 index e7ab25a9..00000000 --- a/pymode/libs3/rope/refactor/change_signature.py +++ /dev/null @@ -1,340 +0,0 @@ -import copy - -import rope.base.exceptions -from rope.base import pyobjects, taskhandle, evaluate, worder, codeanalyze, utils -from rope.base.change import ChangeContents, ChangeSet -from rope.refactor import occurrences, functionutils - - -class ChangeSignature(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.resource = resource - self.offset = offset - self._set_name_and_pyname() - if self.pyname is None or self.pyname.get_object() is None or \ - not isinstance(self.pyname.get_object(), pyobjects.PyFunction): - raise rope.base.exceptions.RefactoringError( - 'Change method signature should be performed on functions') - - def _set_name_and_pyname(self): - self.name = worder.get_name_at(self.resource, self.offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) - self.primary, self.pyname = evaluate.eval_location2( - this_pymodule, self.offset) - if self.pyname is None: - return - pyobject = self.pyname.get_object() - if isinstance(pyobject, pyobjects.PyClass) and \ - '__init__' in pyobject: - self.pyname = pyobject['__init__'] - self.name = '__init__' - pyobject = self.pyname.get_object() - self.others = None - if self.name == '__init__' and \ - isinstance(pyobject, pyobjects.PyFunction) and \ - isinstance(pyobject.parent, pyobjects.PyClass): - pyclass = pyobject.parent - self.others = (pyclass.get_name(), - pyclass.parent[pyclass.get_name()]) - - def _change_calls(self, call_changer, in_hierarchy=None, resources=None, - handle=taskhandle.NullTaskHandle()): - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Changing signature of <%s>' % self.name) - job_set = handle.create_jobset('Collecting Changes', len(resources)) - finder = occurrences.create_finder( - self.pycore, self.name, self.pyname, instance=self.primary, - in_hierarchy=in_hierarchy and self.is_method()) - if self.others: - name, pyname = self.others - constructor_finder = occurrences.create_finder( - self.pycore, name, pyname, only_calls=True) - finder = _MultipleFinders([finder, constructor_finder]) - for file in resources: - job_set.started_job(file.path) - change_calls = _ChangeCallsInModule( - self.pycore, finder, file, call_changer) - changed_file = change_calls.get_changed_module() - if changed_file is not None: - changes.add_change(ChangeContents(file, changed_file)) - job_set.finished_job() - return changes - - def get_args(self): - """Get function arguments. - - Return a list of ``(name, default)`` tuples for all but star - and double star arguments. For arguments that don't have a - default, `None` will be used. - """ - return self._definfo().args_with_defaults - - def is_method(self): - pyfunction = self.pyname.get_object() - return isinstance(pyfunction.parent, pyobjects.PyClass) - - @utils.deprecated('Use `ChangeSignature.get_args()` instead') - def get_definition_info(self): - return self._definfo() - - def _definfo(self): - return functionutils.DefinitionInfo.read(self.pyname.get_object()) - - @utils.deprecated() - def normalize(self): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentNormalizer()]) - return self._change_calls(changer) - - @utils.deprecated() - def remove(self, index): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentRemover(index)]) - return self._change_calls(changer) - - @utils.deprecated() - def add(self, index, name, default=None, value=None): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentAdder(index, name, default, value)]) - return self._change_calls(changer) - - @utils.deprecated() - def inline_default(self, index): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentDefaultInliner(index)]) - return self._change_calls(changer) - - @utils.deprecated() - def reorder(self, new_ordering): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentReorderer(new_ordering)]) - return self._change_calls(changer) - - def get_changes(self, changers, in_hierarchy=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get changes caused by this refactoring - - `changers` is a list of `_ArgumentChanger`\s. If `in_hierarchy` - is `True` the changers are applyed to all matching methods in - the class hierarchy. - `resources` can be a list of `rope.base.resource.File`\s that - should be searched for occurrences; if `None` all python files - in the project are searched. - - """ - function_changer = _FunctionChangers(self.pyname.get_object(), - self._definfo(), changers) - return self._change_calls(function_changer, in_hierarchy, - resources, task_handle) - - -class _FunctionChangers(object): - - def __init__(self, pyfunction, definition_info, changers=None): - self.pyfunction = pyfunction - self.definition_info = definition_info - self.changers = changers - self.changed_definition_infos = self._get_changed_definition_infos() - - def _get_changed_definition_infos(self): - result = [] - definition_info = self.definition_info - result.append(definition_info) - for changer in self.changers: - definition_info = copy.deepcopy(definition_info) - changer.change_definition_info(definition_info) - result.append(definition_info) - return result - - def change_definition(self, call): - return self.changed_definition_infos[-1].to_string() - - def change_call(self, primary, pyname, call): - call_info = functionutils.CallInfo.read( - primary, pyname, self.definition_info, call) - mapping = functionutils.ArgumentMapping(self.definition_info, call_info) - - for definition_info, changer in zip(self.changed_definition_infos, self.changers): - changer.change_argument_mapping(definition_info, mapping) - - return mapping.to_call_info(self.changed_definition_infos[-1]).to_string() - - -class _ArgumentChanger(object): - - def change_definition_info(self, definition_info): - pass - - def change_argument_mapping(self, definition_info, argument_mapping): - pass - - -class ArgumentNormalizer(_ArgumentChanger): - pass - - -class ArgumentRemover(_ArgumentChanger): - - def __init__(self, index): - self.index = index - - def change_definition_info(self, call_info): - if self.index < len(call_info.args_with_defaults): - del call_info.args_with_defaults[self.index] - elif self.index == len(call_info.args_with_defaults) and \ - call_info.args_arg is not None: - call_info.args_arg = None - elif (self.index == len(call_info.args_with_defaults) and - call_info.args_arg is None and call_info.keywords_arg is not None) or \ - (self.index == len(call_info.args_with_defaults) + 1 and - call_info.args_arg is not None and call_info.keywords_arg is not None): - call_info.keywords_arg = None - - def change_argument_mapping(self, definition_info, mapping): - if self.index < len(definition_info.args_with_defaults): - name = definition_info.args_with_defaults[0] - if name in mapping.param_dict: - del mapping.param_dict[name] - - -class ArgumentAdder(_ArgumentChanger): - - def __init__(self, index, name, default=None, value=None): - self.index = index - self.name = name - self.default = default - self.value = value - - def change_definition_info(self, definition_info): - for pair in definition_info.args_with_defaults: - if pair[0] == self.name: - raise rope.base.exceptions.RefactoringError( - 'Adding duplicate parameter: <%s>.' % self.name) - definition_info.args_with_defaults.insert(self.index, - (self.name, self.default)) - - def change_argument_mapping(self, definition_info, mapping): - if self.value is not None: - mapping.param_dict[self.name] = self.value - - -class ArgumentDefaultInliner(_ArgumentChanger): - - def __init__(self, index): - self.index = index - self.remove = False - - def change_definition_info(self, definition_info): - if self.remove: - definition_info.args_with_defaults[self.index] = \ - (definition_info.args_with_defaults[self.index][0], None) - - def change_argument_mapping(self, definition_info, mapping): - default = definition_info.args_with_defaults[self.index][1] - name = definition_info.args_with_defaults[self.index][0] - if default is not None and name not in mapping.param_dict: - mapping.param_dict[name] = default - - -class ArgumentReorderer(_ArgumentChanger): - - def __init__(self, new_order, autodef=None): - """Construct an `ArgumentReorderer` - - Note that the `new_order` is a list containing the new - position of parameters; not the position each parameter - is going to be moved to. (changed in ``0.5m4``) - - For example changing ``f(a, b, c)`` to ``f(c, a, b)`` - requires passing ``[2, 0, 1]`` and *not* ``[1, 2, 0]``. - - The `autodef` (automatic default) argument, forces rope to use - it as a default if a default is needed after the change. That - happens when an argument without default is moved after - another that has a default value. Note that `autodef` should - be a string or `None`; the latter disables adding automatic - default. - - """ - self.new_order = new_order - self.autodef = autodef - - def change_definition_info(self, definition_info): - new_args = list(definition_info.args_with_defaults) - for new_index, index in enumerate(self.new_order): - new_args[new_index] = definition_info.args_with_defaults[index] - seen_default = False - for index, (arg, default) in enumerate(list(new_args)): - if default is not None: - seen_default = True - if seen_default and default is None and self.autodef is not None: - new_args[index] = (arg, self.autodef) - definition_info.args_with_defaults = new_args - - -class _ChangeCallsInModule(object): - - def __init__(self, pycore, occurrence_finder, resource, call_changer): - self.pycore = pycore - self.occurrence_finder = occurrence_finder - self.resource = resource - self.call_changer = call_changer - - def get_changed_module(self): - word_finder = worder.Worder(self.source) - change_collector = codeanalyze.ChangeCollector(self.source) - for occurrence in self.occurrence_finder.find_occurrences(self.resource): - if not occurrence.is_called() and not occurrence.is_defined(): - continue - start, end = occurrence.get_primary_range() - begin_parens, end_parens = word_finder.get_word_parens_range(end - 1) - if occurrence.is_called(): - primary, pyname = occurrence.get_primary_and_pyname() - changed_call = self.call_changer.change_call( - primary, pyname, self.source[start:end_parens]) - else: - changed_call = self.call_changer.change_definition( - self.source[start:end_parens]) - if changed_call is not None: - change_collector.add_change(start, end_parens, changed_call) - return change_collector.get_changed() - - @property - @utils.saveit - def pymodule(self): - return self.pycore.resource_to_pyobject(self.resource) - - @property - @utils.saveit - def source(self): - if self.resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def lines(self): - return self.pymodule.lines - - -class _MultipleFinders(object): - - def __init__(self, finders): - self.finders = finders - - def find_occurrences(self, resource=None, pymodule=None): - all_occurrences = [] - for finder in self.finders: - all_occurrences.extend(finder.find_occurrences(resource, pymodule)) - all_occurrences.sort(key = lambda o: o.get_primary_range()) - return all_occurrences - diff --git a/pymode/libs3/rope/refactor/encapsulate_field.py b/pymode/libs3/rope/refactor/encapsulate_field.py deleted file mode 100644 index 0e6fea22..00000000 --- a/pymode/libs3/rope/refactor/encapsulate_field.py +++ /dev/null @@ -1,202 +0,0 @@ -from rope.base import pynames, taskhandle, evaluate, exceptions, worder, utils -from rope.base.change import ChangeSet, ChangeContents -from rope.refactor import sourceutils, occurrences - - -class EncapsulateField(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.name = worder.get_name_at(resource, offset) - this_pymodule = self.pycore.resource_to_pyobject(resource) - self.pyname = evaluate.eval_location(this_pymodule, offset) - if not self._is_an_attribute(self.pyname): - raise exceptions.RefactoringError( - 'Encapsulate field should be performed on class attributes.') - self.resource = self.pyname.get_definition_location()[0].get_resource() - - def get_changes(self, getter=None, setter=None, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes this refactoring makes - - If `getter` is not `None`, that will be the name of the - getter, otherwise ``get_${field_name}`` will be used. The - same is true for `setter` and if it is None set_${field_name} is - used. - - `resources` can be a list of `rope.base.resource.File`\s that - the refactoring should be applied on; if `None` all python - files in the project are searched. - - """ - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Encapsulate field <%s>' % self.name) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - if getter is None: - getter = 'get_' + self.name - if setter is None: - setter = 'set_' + self.name - renamer = GetterSetterRenameInModule( - self.pycore, self.name, self.pyname, getter, setter) - for file in resources: - job_set.started_job(file.path) - if file == self.resource: - result = self._change_holding_module(changes, renamer, - getter, setter) - changes.add_change(ChangeContents(self.resource, result)) - else: - result = renamer.get_changed_module(file) - if result is not None: - changes.add_change(ChangeContents(file, result)) - job_set.finished_job() - return changes - - def get_field_name(self): - """Get the name of the field to be encapsulated""" - return self.name - - def _is_an_attribute(self, pyname): - if pyname is not None and isinstance(pyname, pynames.AssignedName): - pymodule, lineno = self.pyname.get_definition_location() - scope = pymodule.get_scope().\ - get_inner_scope_for_line(lineno) - if scope.get_kind() == 'Class': - return pyname in list(scope.get_names().values()) - parent = scope.parent - if parent is not None and parent.get_kind() == 'Class': - return pyname in list(parent.get_names().values()) - return False - - def _get_defining_class_scope(self): - defining_scope = self._get_defining_scope() - if defining_scope.get_kind() == 'Function': - defining_scope = defining_scope.parent - return defining_scope - - def _get_defining_scope(self): - pymodule, line = self.pyname.get_definition_location() - return pymodule.get_scope().get_inner_scope_for_line(line) - - def _change_holding_module(self, changes, renamer, getter, setter): - pymodule = self.pycore.resource_to_pyobject(self.resource) - class_scope = self._get_defining_class_scope() - defining_object = self._get_defining_scope().pyobject - start, end = sourceutils.get_body_region(defining_object) - - new_source = renamer.get_changed_module(pymodule=pymodule, - skip_start=start, skip_end=end) - if new_source is not None: - pymodule = self.pycore.get_string_module(new_source, self.resource) - class_scope = pymodule.get_scope().\ - get_inner_scope_for_line(class_scope.get_start()) - indents = sourceutils.get_indent(self.pycore) * ' ' - getter = 'def %s(self):\n%sreturn self.%s' % \ - (getter, indents, self.name) - setter = 'def %s(self, value):\n%sself.%s = value' % \ - (setter, indents, self.name) - new_source = sourceutils.add_methods(pymodule, class_scope, - [getter, setter]) - return new_source - - -class GetterSetterRenameInModule(object): - - def __init__(self, pycore, name, pyname, getter, setter): - self.pycore = pycore - self.name = name - self.finder = occurrences.create_finder(pycore, name, pyname) - self.getter = getter - self.setter = setter - - def get_changed_module(self, resource=None, pymodule=None, - skip_start=0, skip_end=0): - change_finder = _FindChangesForModule(self, resource, pymodule, - skip_start, skip_end) - return change_finder.get_changed_module() - - -class _FindChangesForModule(object): - - def __init__(self, finder, resource, pymodule, skip_start, skip_end): - self.pycore = finder.pycore - self.finder = finder.finder - self.getter = finder.getter - self.setter = finder.setter - self.resource = resource - self.pymodule = pymodule - self.last_modified = 0 - self.last_set = None - self.set_index = None - self.skip_start = skip_start - self.skip_end = skip_end - - def get_changed_module(self): - result = [] - for occurrence in self.finder.find_occurrences(self.resource, - self.pymodule): - start, end = occurrence.get_word_range() - if self.skip_start <= start < self.skip_end: - continue - self._manage_writes(start, result) - result.append(self.source[self.last_modified:start]) - if self._is_assigned_in_a_tuple_assignment(occurrence): - raise exceptions.RefactoringError( - 'Cannot handle tuple assignments in encapsulate field.') - if occurrence.is_written(): - assignment_type = self.worder.get_assignment_type(start) - if assignment_type == '=': - result.append(self.setter + '(') - else: - var_name = self.source[occurrence.get_primary_range()[0]: - start] + self.getter + '()' - result.append(self.setter + '(' + var_name - + ' %s ' % assignment_type[:-1]) - current_line = self.lines.get_line_number(start) - start_line, end_line = self.pymodule.logical_lines.\ - logical_line_in(current_line) - self.last_set = self.lines.get_line_end(end_line) - end = self.source.index('=', end) + 1 - self.set_index = len(result) - else: - result.append(self.getter + '()') - self.last_modified = end - if self.last_modified != 0: - self._manage_writes(len(self.source), result) - result.append(self.source[self.last_modified:]) - return ''.join(result) - return None - - def _manage_writes(self, offset, result): - if self.last_set is not None and self.last_set <= offset: - result.append(self.source[self.last_modified:self.last_set]) - set_value = ''.join(result[self.set_index:]).strip() - del result[self.set_index:] - result.append(set_value + ')') - self.last_modified = self.last_set - self.last_set = None - - def _is_assigned_in_a_tuple_assignment(self, occurance): - offset = occurance.get_word_range()[0] - return self.worder.is_assigned_in_a_tuple_assignment(offset) - - @property - @utils.saveit - def source(self): - if self.resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def lines(self): - if self.pymodule is None: - self.pymodule = self.pycore.resource_to_pyobject(self.resource) - return self.pymodule.lines - - @property - @utils.saveit - def worder(self): - return worder.Worder(self.source) diff --git a/pymode/libs3/rope/refactor/extract.py b/pymode/libs3/rope/refactor/extract.py deleted file mode 100644 index bb672322..00000000 --- a/pymode/libs3/rope/refactor/extract.py +++ /dev/null @@ -1,789 +0,0 @@ -import re - -from rope.base import ast, codeanalyze -from rope.base.change import ChangeSet, ChangeContents -from rope.base.exceptions import RefactoringError -from rope.refactor import (sourceutils, similarfinder, - patchedast, suites, usefunction) - - -# Extract refactoring has lots of special cases. I tried to split it -# to smaller parts to make it more manageable: -# -# _ExtractInfo: holds information about the refactoring; it is passed -# to the parts that need to have information about the refactoring -# -# _ExtractCollector: merely saves all of the information necessary for -# performing the refactoring. -# -# _DefinitionLocationFinder: finds where to insert the definition. -# -# _ExceptionalConditionChecker: checks for exceptional conditions in -# which the refactoring cannot be applied. -# -# _ExtractMethodParts: generates the pieces of code (like definition) -# needed for performing extract method. -# -# _ExtractVariableParts: like _ExtractMethodParts for variables. -# -# _ExtractPerformer: Uses above classes to collect refactoring -# changes. -# -# There are a few more helper functions and classes used by above -# classes. -class _ExtractRefactoring(object): - - def __init__(self, project, resource, start_offset, end_offset, - variable=False): - self.project = project - self.pycore = project.pycore - self.resource = resource - self.start_offset = self._fix_start(resource.read(), start_offset) - self.end_offset = self._fix_end(resource.read(), end_offset) - - def _fix_start(self, source, offset): - while offset < len(source) and source[offset].isspace(): - offset += 1 - return offset - - def _fix_end(self, source, offset): - while offset > 0 and source[offset - 1].isspace(): - offset -= 1 - return offset - - def get_changes(self, extracted_name, similar=False, global_=False): - """Get the changes this refactoring makes - - :parameters: - - `similar`: if `True`, similar expressions/statements are also - replaced. - - `global_`: if `True`, the extracted method/variable will - be global. - - """ - info = _ExtractInfo( - self.project, self.resource, self.start_offset, self.end_offset, - extracted_name, variable=self.kind == 'variable', - similar=similar, make_global=global_) - new_contents = _ExtractPerformer(info).extract() - changes = ChangeSet('Extract %s <%s>' % (self.kind, - extracted_name)) - changes.add_change(ChangeContents(self.resource, new_contents)) - return changes - - -class ExtractMethod(_ExtractRefactoring): - - def __init__(self, *args, **kwds): - super(ExtractMethod, self).__init__(*args, **kwds) - - kind = 'method' - - -class ExtractVariable(_ExtractRefactoring): - - def __init__(self, *args, **kwds): - kwds = dict(kwds) - kwds['variable'] = True - super(ExtractVariable, self).__init__(*args, **kwds) - - kind = 'variable' - - -class _ExtractInfo(object): - """Holds information about the extract to be performed""" - - def __init__(self, project, resource, start, end, new_name, - variable, similar, make_global): - self.pycore = project.pycore - self.resource = resource - self.pymodule = self.pycore.resource_to_pyobject(resource) - self.global_scope = self.pymodule.get_scope() - self.source = self.pymodule.source_code - self.lines = self.pymodule.lines - self.new_name = new_name - self.variable = variable - self.similar = similar - self._init_parts(start, end) - self._init_scope() - self.make_global = make_global - - def _init_parts(self, start, end): - self.region = (self._choose_closest_line_end(start), - self._choose_closest_line_end(end, end=True)) - - start = self.logical_lines.logical_line_in( - self.lines.get_line_number(self.region[0]))[0] - end = self.logical_lines.logical_line_in( - self.lines.get_line_number(self.region[1]))[1] - self.region_lines = (start, end) - - self.lines_region = (self.lines.get_line_start(self.region_lines[0]), - self.lines.get_line_end(self.region_lines[1])) - - @property - def logical_lines(self): - return self.pymodule.logical_lines - - def _init_scope(self): - start_line = self.region_lines[0] - scope = self.global_scope.get_inner_scope_for_line(start_line) - if scope.get_kind() != 'Module' and scope.get_start() == start_line: - scope = scope.parent - self.scope = scope - self.scope_region = self._get_scope_region(self.scope) - - def _get_scope_region(self, scope): - return (self.lines.get_line_start(scope.get_start()), - self.lines.get_line_end(scope.get_end()) + 1) - - def _choose_closest_line_end(self, offset, end=False): - lineno = self.lines.get_line_number(offset) - line_start = self.lines.get_line_start(lineno) - line_end = self.lines.get_line_end(lineno) - if self.source[line_start:offset].strip() == '': - if end: - return line_start - 1 - else: - return line_start - elif self.source[offset:line_end].strip() == '': - return min(line_end, len(self.source)) - return offset - - @property - def one_line(self): - return self.region != self.lines_region and \ - (self.logical_lines.logical_line_in(self.region_lines[0]) == - self.logical_lines.logical_line_in(self.region_lines[1])) - - @property - def global_(self): - return self.scope.parent is None - - @property - def method(self): - return self.scope.parent is not None and \ - self.scope.parent.get_kind() == 'Class' - - @property - def indents(self): - return sourceutils.get_indents(self.pymodule.lines, - self.region_lines[0]) - - @property - def scope_indents(self): - if self.global_: - return 0 - return sourceutils.get_indents(self.pymodule.lines, - self.scope.get_start()) - - @property - def extracted(self): - return self.source[self.region[0]:self.region[1]] - - _returned = None - @property - def returned(self): - """Does the extracted piece contain return statement""" - if self._returned is None: - node = _parse_text(self.extracted) - self._returned = usefunction._returns_last(node) - return self._returned - - -class _ExtractCollector(object): - """Collects information needed for performing the extract""" - - def __init__(self, info): - self.definition = None - self.body_pattern = None - self.checks = {} - self.replacement_pattern = None - self.matches = None - self.replacements = None - self.definition_location = None - - -class _ExtractPerformer(object): - - def __init__(self, info): - self.info = info - _ExceptionalConditionChecker()(self.info) - - def extract(self): - extract_info = self._collect_info() - content = codeanalyze.ChangeCollector(self.info.source) - definition = extract_info.definition - lineno, indents = extract_info.definition_location - offset = self.info.lines.get_line_start(lineno) - indented = sourceutils.fix_indentation(definition, indents) - content.add_change(offset, offset, indented) - self._replace_occurrences(content, extract_info) - return content.get_changed() - - def _replace_occurrences(self, content, extract_info): - for match in extract_info.matches: - replacement = similarfinder.CodeTemplate( - extract_info.replacement_pattern) - mapping = {} - for name in replacement.get_names(): - node = match.get_ast(name) - if node: - start, end = patchedast.node_region(match.get_ast(name)) - mapping[name] = self.info.source[start:end] - else: - mapping[name] = name - region = match.get_region() - content.add_change(region[0], region[1], - replacement.substitute(mapping)) - - def _collect_info(self): - extract_collector = _ExtractCollector(self.info) - self._find_definition(extract_collector) - self._find_matches(extract_collector) - self._find_definition_location(extract_collector) - return extract_collector - - def _find_matches(self, collector): - regions = self._where_to_search() - finder = similarfinder.SimilarFinder(self.info.pymodule) - matches = [] - for start, end in regions: - matches.extend((finder.get_matches(collector.body_pattern, - collector.checks, start, end))) - collector.matches = matches - - def _where_to_search(self): - if self.info.similar: - if self.info.make_global or self.info.global_: - return [(0, len(self.info.pymodule.source_code))] - if self.info.method and not self.info.variable: - class_scope = self.info.scope.parent - regions = [] - method_kind = _get_function_kind(self.info.scope) - for scope in class_scope.get_scopes(): - if method_kind == 'method' and \ - _get_function_kind(scope) != 'method': - continue - start = self.info.lines.get_line_start(scope.get_start()) - end = self.info.lines.get_line_end(scope.get_end()) - regions.append((start, end)) - return regions - else: - if self.info.variable: - return [self.info.scope_region] - else: - return [self.info._get_scope_region(self.info.scope.parent)] - else: - return [self.info.region] - - def _find_definition_location(self, collector): - matched_lines = [] - for match in collector.matches: - start = self.info.lines.get_line_number(match.get_region()[0]) - start_line = self.info.logical_lines.logical_line_in(start)[0] - matched_lines.append(start_line) - location_finder = _DefinitionLocationFinder(self.info, matched_lines) - collector.definition_location = (location_finder.find_lineno(), - location_finder.find_indents()) - - def _find_definition(self, collector): - if self.info.variable: - parts = _ExtractVariableParts(self.info) - else: - parts = _ExtractMethodParts(self.info) - collector.definition = parts.get_definition() - collector.body_pattern = parts.get_body_pattern() - collector.replacement_pattern = parts.get_replacement_pattern() - collector.checks = parts.get_checks() - - -class _DefinitionLocationFinder(object): - - def __init__(self, info, matched_lines): - self.info = info - self.matched_lines = matched_lines - # This only happens when subexpressions cannot be matched - if not matched_lines: - self.matched_lines.append(self.info.region_lines[0]) - - def find_lineno(self): - if self.info.variable and not self.info.make_global: - return self._get_before_line() - if self.info.make_global or self.info.global_: - toplevel = self._find_toplevel(self.info.scope) - ast = self.info.pymodule.get_ast() - newlines = sorted(self.matched_lines + [toplevel.get_end() + 1]) - return suites.find_visible(ast, newlines) - return self._get_after_scope() - - def _find_toplevel(self, scope): - toplevel = scope - if toplevel.parent is not None: - while toplevel.parent.parent is not None: - toplevel = toplevel.parent - return toplevel - - def find_indents(self): - if self.info.variable and not self.info.make_global: - return sourceutils.get_indents(self.info.lines, - self._get_before_line()) - else: - if self.info.global_ or self.info.make_global: - return 0 - return self.info.scope_indents - - def _get_before_line(self): - ast = self.info.scope.pyobject.get_ast() - return suites.find_visible(ast, self.matched_lines) - - def _get_after_scope(self): - return self.info.scope.get_end() + 1 - - -class _ExceptionalConditionChecker(object): - - def __call__(self, info): - self.base_conditions(info) - if info.one_line: - self.one_line_conditions(info) - else: - self.multi_line_conditions(info) - - def base_conditions(self, info): - if info.region[1] > info.scope_region[1]: - raise RefactoringError('Bad region selected for extract method') - end_line = info.region_lines[1] - end_scope = info.global_scope.get_inner_scope_for_line(end_line) - if end_scope != info.scope and end_scope.get_end() != end_line: - raise RefactoringError('Bad region selected for extract method') - try: - extracted = info.source[info.region[0]:info.region[1]] - if info.one_line: - extracted = '(%s)' % extracted - if _UnmatchedBreakOrContinueFinder.has_errors(extracted): - raise RefactoringError('A break/continue without having a ' - 'matching for/while loop.') - except SyntaxError: - raise RefactoringError('Extracted piece should ' - 'contain complete statements.') - - def one_line_conditions(self, info): - if self._is_region_on_a_word(info): - raise RefactoringError('Should extract complete statements.') - if info.variable and not info.one_line: - raise RefactoringError('Extract variable should not ' - 'span multiple lines.') - - def multi_line_conditions(self, info): - node = _parse_text(info.source[info.region[0]:info.region[1]]) - count = usefunction._return_count(node) - if count > 1: - raise RefactoringError('Extracted piece can have only one ' - 'return statement.') - if usefunction._yield_count(node): - raise RefactoringError('Extracted piece cannot ' - 'have yield statements.') - if count == 1 and not usefunction._returns_last(node): - raise RefactoringError('Return should be the last statement.') - if info.region != info.lines_region: - raise RefactoringError('Extracted piece should ' - 'contain complete statements.') - - def _is_region_on_a_word(self, info): - if info.region[0] > 0 and self._is_on_a_word(info, info.region[0] - 1) or \ - self._is_on_a_word(info, info.region[1] - 1): - return True - - def _is_on_a_word(self, info, offset): - prev = info.source[offset] - if not (prev.isalnum() or prev == '_') or \ - offset + 1 == len(info.source): - return False - next = info.source[offset + 1] - return next.isalnum() or next == '_' - - -class _ExtractMethodParts(object): - - def __init__(self, info): - self.info = info - self.info_collector = self._create_info_collector() - - def get_definition(self): - if self.info.global_: - return '\n%s\n' % self._get_function_definition() - else: - return '\n%s' % self._get_function_definition() - - def get_replacement_pattern(self): - variables = [] - variables.extend(self._find_function_arguments()) - variables.extend(self._find_function_returns()) - return similarfinder.make_pattern(self._get_call(), variables) - - def get_body_pattern(self): - variables = [] - variables.extend(self._find_function_arguments()) - variables.extend(self._find_function_returns()) - variables.extend(self._find_temps()) - return similarfinder.make_pattern(self._get_body(), variables) - - def _get_body(self): - result = sourceutils.fix_indentation(self.info.extracted, 0) - if self.info.one_line: - result = '(%s)' % result - return result - - def _find_temps(self): - return usefunction.find_temps(self.info.pycore.project, - self._get_body()) - - def get_checks(self): - if self.info.method and not self.info.make_global: - if _get_function_kind(self.info.scope) == 'method': - class_name = similarfinder._pydefined_to_str( - self.info.scope.parent.pyobject) - return {self._get_self_name(): 'type=' + class_name} - return {} - - def _create_info_collector(self): - zero = self.info.scope.get_start() - 1 - start_line = self.info.region_lines[0] - zero - end_line = self.info.region_lines[1] - zero - info_collector = _FunctionInformationCollector(start_line, end_line, - self.info.global_) - body = self.info.source[self.info.scope_region[0]: - self.info.scope_region[1]] - node = _parse_text(body) - ast.walk(node, info_collector) - return info_collector - - def _get_function_definition(self): - args = self._find_function_arguments() - returns = self._find_function_returns() - result = [] - if self.info.method and not self.info.make_global and \ - _get_function_kind(self.info.scope) != 'method': - result.append('@staticmethod\n') - result.append('def %s:\n' % self._get_function_signature(args)) - unindented_body = self._get_unindented_function_body(returns) - indents = sourceutils.get_indent(self.info.pycore) - function_body = sourceutils.indent_lines(unindented_body, indents) - result.append(function_body) - definition = ''.join(result) - - return definition + '\n' - - def _get_function_signature(self, args): - args = list(args) - prefix = '' - if self._extracting_method(): - self_name = self._get_self_name() - if self_name is None: - raise RefactoringError('Extracting a method from a function ' - 'with no self argument.') - if self_name in args: - args.remove(self_name) - args.insert(0, self_name) - return prefix + self.info.new_name + \ - '(%s)' % self._get_comma_form(args) - - def _extracting_method(self): - return self.info.method and not self.info.make_global and \ - _get_function_kind(self.info.scope) == 'method' - - def _get_self_name(self): - param_names = self.info.scope.pyobject.get_param_names() - if param_names: - return param_names[0] - - def _get_function_call(self, args): - prefix = '' - if self.info.method and not self.info.make_global: - if _get_function_kind(self.info.scope) == 'method': - self_name = self._get_self_name() - if self_name in args: - args.remove(self_name) - prefix = self_name + '.' - else: - prefix = self.info.scope.parent.pyobject.get_name() + '.' - return prefix + '%s(%s)' % (self.info.new_name, - self._get_comma_form(args)) - - def _get_comma_form(self, names): - result = '' - if names: - result += names[0] - for name in names[1:]: - result += ', ' + name - return result - - def _get_call(self): - if self.info.one_line: - args = self._find_function_arguments() - return self._get_function_call(args) - args = self._find_function_arguments() - returns = self._find_function_returns() - call_prefix = '' - if returns: - call_prefix = self._get_comma_form(returns) + ' = ' - if self.info.returned: - call_prefix = 'return ' - return call_prefix + self._get_function_call(args) - - def _find_function_arguments(self): - # if not make_global, do not pass any global names; they are - # all visible. - if self.info.global_ and not self.info.make_global: - return () - if not self.info.one_line: - result = (self.info_collector.prewritten & - self.info_collector.read) - result |= (self.info_collector.prewritten & - self.info_collector.postread & - (self.info_collector.maybe_written - - self.info_collector.written)) - return list(result) - start = self.info.region[0] - if start == self.info.lines_region[0]: - start = start + re.search('\S', self.info.extracted).start() - function_definition = self.info.source[start:self.info.region[1]] - read = _VariableReadsAndWritesFinder.find_reads_for_one_liners( - function_definition) - return list(self.info_collector.prewritten.intersection(read)) - - def _find_function_returns(self): - if self.info.one_line or self.info.returned: - return [] - written = self.info_collector.written | \ - self.info_collector.maybe_written - return list(written & self.info_collector.postread) - - def _get_unindented_function_body(self, returns): - if self.info.one_line: - return 'return ' + _join_lines(self.info.extracted) - extracted_body = self.info.extracted - unindented_body = sourceutils.fix_indentation(extracted_body, 0) - if returns: - unindented_body += '\nreturn %s' % self._get_comma_form(returns) - return unindented_body - - -class _ExtractVariableParts(object): - - def __init__(self, info): - self.info = info - - def get_definition(self): - result = self.info.new_name + ' = ' + \ - _join_lines(self.info.extracted) + '\n' - return result - - def get_body_pattern(self): - return '(%s)' % self.info.extracted.strip() - - def get_replacement_pattern(self): - return self.info.new_name - - def get_checks(self): - return {} - - -class _FunctionInformationCollector(object): - - def __init__(self, start, end, is_global): - self.start = start - self.end = end - self.is_global = is_global - self.prewritten = set() - self.maybe_written = set() - self.written = set() - self.read = set() - self.postread = set() - self.postwritten = set() - self.host_function = True - self.conditional = False - - def _read_variable(self, name, lineno): - if self.start <= lineno <= self.end: - if name not in self.written: - self.read.add(name) - if self.end < lineno: - if name not in self.postwritten: - self.postread.add(name) - - def _written_variable(self, name, lineno): - if self.start <= lineno <= self.end: - if self.conditional: - self.maybe_written.add(name) - else: - self.written.add(name) - if self.start > lineno: - self.prewritten.add(name) - if self.end < lineno: - self.postwritten.add(name) - - def _FunctionDef(self, node): - if not self.is_global and self.host_function: - self.host_function = False - for name in _get_argnames(node.args): - self._written_variable(name, node.lineno) - for child in node.body: - ast.walk(child, self) - else: - self._written_variable(node.name, node.lineno) - visitor = _VariableReadsAndWritesFinder() - for child in node.body: - ast.walk(child, visitor) - for name in visitor.read - visitor.written: - self._read_variable(name, node.lineno) - - def _Name(self, node): - if isinstance(node.ctx, (ast.Store, ast.AugStore)): - self._written_variable(node.id, node.lineno) - if not isinstance(node.ctx, ast.Store): - self._read_variable(node.id, node.lineno) - - def _Assign(self, node): - ast.walk(node.value, self) - for child in node.targets: - ast.walk(child, self) - - def _ClassDef(self, node): - self._written_variable(node.name, node.lineno) - - def _handle_conditional_node(self, node): - self.conditional = True - try: - for child in ast.get_child_nodes(node): - ast.walk(child, self) - finally: - self.conditional = False - - def _If(self, node): - self._handle_conditional_node(node) - - def _While(self, node): - self._handle_conditional_node(node) - - def _For(self, node): - self._handle_conditional_node(node) - - - -def _get_argnames(arguments): - result = [node.arg for node in arguments.args - if isinstance(node, ast.arg)] - if arguments.vararg: - result.append(arguments.vararg) - if arguments.kwarg: - result.append(arguments.kwarg) - return result - - -class _VariableReadsAndWritesFinder(object): - - def __init__(self): - self.written = set() - self.read = set() - - def _Name(self, node): - if isinstance(node.ctx, (ast.Store, ast.AugStore)): - self.written.add(node.id) - if not isinstance(node, ast.Store): - self.read.add(node.id) - - def _FunctionDef(self, node): - self.written.add(node.name) - visitor = _VariableReadsAndWritesFinder() - for child in ast.get_child_nodes(node): - ast.walk(child, visitor) - self.read.update(visitor.read - visitor.written) - - def _Class(self, node): - self.written.add(node.name) - - @staticmethod - def find_reads_and_writes(code): - if code.strip() == '': - return set(), set() - if isinstance(code, str): - code = code.encode('utf-8') - node = _parse_text(code) - visitor = _VariableReadsAndWritesFinder() - ast.walk(node, visitor) - return visitor.read, visitor.written - - @staticmethod - def find_reads_for_one_liners(code): - if code.strip() == '': - return set(), set() - node = _parse_text(code) - visitor = _VariableReadsAndWritesFinder() - ast.walk(node, visitor) - return visitor.read - - -class _UnmatchedBreakOrContinueFinder(object): - - def __init__(self): - self.error = False - self.loop_count = 0 - - def _For(self, node): - self.loop_encountered(node) - - def _While(self, node): - self.loop_encountered(node) - - def loop_encountered(self, node): - self.loop_count += 1 - for child in node.body: - ast.walk(child, self) - self.loop_count -= 1 - if node.orelse: - ast.walk(node.orelse, self) - - def _Break(self, node): - self.check_loop() - - def _Continue(self, node): - self.check_loop() - - def check_loop(self): - if self.loop_count < 1: - self.error = True - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - @staticmethod - def has_errors(code): - if code.strip() == '': - return False - node = _parse_text(code) - visitor = _UnmatchedBreakOrContinueFinder() - ast.walk(node, visitor) - return visitor.error - -def _get_function_kind(scope): - return scope.pyobject.get_kind() - - -def _parse_text(body): - body = sourceutils.fix_indentation(body, 0) - node = ast.parse(body) - return node - -def _join_lines(code): - lines = [] - for line in code.splitlines(): - if line.endswith('\\'): - lines.append(line[:-1].strip()) - else: - lines.append(line.strip()) - return ' '.join(lines) diff --git a/pymode/libs3/rope/refactor/functionutils.py b/pymode/libs3/rope/refactor/functionutils.py deleted file mode 100644 index a653b9db..00000000 --- a/pymode/libs3/rope/refactor/functionutils.py +++ /dev/null @@ -1,222 +0,0 @@ -import rope.base.exceptions -import rope.base.pyobjects -from rope.base.builtins import Lambda -from rope.base import worder - - -class DefinitionInfo(object): - - def __init__(self, function_name, is_method, args_with_defaults, - args_arg, keywords_arg): - self.function_name = function_name - self.is_method = is_method - self.args_with_defaults = args_with_defaults - self.args_arg = args_arg - self.keywords_arg = keywords_arg - - def to_string(self): - return '%s(%s)' % (self.function_name, self.arguments_to_string()) - - def arguments_to_string(self, from_index=0): - params = [] - for arg, default in self.args_with_defaults: - if default is not None: - params.append('%s=%s' % (arg, default)) - else: - params.append(arg) - if self.args_arg is not None: - params.append('*' + self.args_arg) - if self.keywords_arg: - params.append('**' + self.keywords_arg) - return ', '.join(params[from_index:]) - - @staticmethod - def _read(pyfunction, code): - scope = pyfunction.get_scope() - parent = scope.parent - parameter_names = pyfunction.get_param_names() - kind = pyfunction.get_kind() - is_method = kind == 'method' - is_lambda = kind == 'lambda' - info = _FunctionParser(code, is_method, is_lambda) - args, keywords = info.get_parameters() - args_arg = None - keywords_arg = None - if args and args[-1].startswith('**'): - keywords_arg = args[-1][2:] - del args[-1] - if args and args[-1].startswith('*'): - args_arg = args[-1][1:] - del args[-1] - args_with_defaults = [(name, None) for name in args] - args_with_defaults.extend(keywords) - return DefinitionInfo(info.get_function_name(), is_method, - args_with_defaults, args_arg, keywords_arg) - - @staticmethod - def read(pyfunction): - pymodule = pyfunction.get_module() - word_finder = worder.Worder(pymodule.source_code) - lineno = pyfunction.get_ast().lineno - start = pymodule.lines.get_line_start(lineno) - if isinstance(pyfunction, Lambda): - call = word_finder.get_lambda_and_args(start) - else: - call = word_finder.get_function_and_args_in_header(start) - return DefinitionInfo._read(pyfunction, call) - - -class CallInfo(object): - - def __init__(self, function_name, args, keywords, args_arg, - keywords_arg, implicit_arg, constructor): - self.function_name = function_name - self.args = args - self.keywords = keywords - self.args_arg = args_arg - self.keywords_arg = keywords_arg - self.implicit_arg = implicit_arg - self.constructor = constructor - - def to_string(self): - function = self.function_name - if self.implicit_arg: - function = self.args[0] + '.' + self.function_name - params = [] - start = 0 - if self.implicit_arg or self.constructor: - start = 1 - if self.args[start:]: - params.extend(self.args[start:]) - if self.keywords: - params.extend(['%s=%s' % (name, value) for name, value in self.keywords]) - if self.args_arg is not None: - params.append('*' + self.args_arg) - if self.keywords_arg: - params.append('**' + self.keywords_arg) - return '%s(%s)' % (function, ', '.join(params)) - - @staticmethod - def read(primary, pyname, definition_info, code): - is_method_call = CallInfo._is_method_call(primary, pyname) - is_constructor = CallInfo._is_class(pyname) - is_classmethod = CallInfo._is_classmethod(pyname) - info = _FunctionParser(code, is_method_call or is_classmethod) - args, keywords = info.get_parameters() - args_arg = None - keywords_arg = None - if args and args[-1].startswith('**'): - keywords_arg = args[-1][2:] - del args[-1] - if args and args[-1].startswith('*'): - args_arg = args[-1][1:] - del args[-1] - if is_constructor: - args.insert(0, definition_info.args_with_defaults[0][0]) - return CallInfo(info.get_function_name(), args, keywords, args_arg, - keywords_arg, is_method_call or is_classmethod, - is_constructor) - - @staticmethod - def _is_method_call(primary, pyname): - return primary is not None and \ - isinstance(primary.get_object().get_type(), - rope.base.pyobjects.PyClass) and \ - CallInfo._is_method(pyname) - - @staticmethod - def _is_class(pyname): - return pyname is not None and \ - isinstance(pyname.get_object(), - rope.base.pyobjects.PyClass) - - @staticmethod - def _is_method(pyname): - if pyname is not None and \ - isinstance(pyname.get_object(), rope.base.pyobjects.PyFunction): - return pyname.get_object().get_kind() == 'method' - return False - - @staticmethod - def _is_classmethod(pyname): - if pyname is not None and \ - isinstance(pyname.get_object(), rope.base.pyobjects.PyFunction): - return pyname.get_object().get_kind() == 'classmethod' - return False - - -class ArgumentMapping(object): - - def __init__(self, definition_info, call_info): - self.call_info = call_info - self.param_dict = {} - self.keyword_args = [] - self.args_arg = [] - for index, value in enumerate(call_info.args): - if index < len(definition_info.args_with_defaults): - name = definition_info.args_with_defaults[index][0] - self.param_dict[name] = value - else: - self.args_arg.append(value) - for name, value in call_info.keywords: - index = -1 - for pair in definition_info.args_with_defaults: - if pair[0] == name: - self.param_dict[name] = value - break - else: - self.keyword_args.append((name, value)) - - def to_call_info(self, definition_info): - args = [] - keywords = [] - for index in range(len(definition_info.args_with_defaults)): - name = definition_info.args_with_defaults[index][0] - if name in self.param_dict: - args.append(self.param_dict[name]) - else: - for i in range(index, len(definition_info.args_with_defaults)): - name = definition_info.args_with_defaults[i][0] - if name in self.param_dict: - keywords.append((name, self.param_dict[name])) - break - args.extend(self.args_arg) - keywords.extend(self.keyword_args) - return CallInfo(self.call_info.function_name, args, keywords, - self.call_info.args_arg, self.call_info.keywords_arg, - self.call_info.implicit_arg, self.call_info.constructor) - - -class _FunctionParser(object): - - def __init__(self, call, implicit_arg, is_lambda=False): - self.call = call - self.implicit_arg = implicit_arg - self.word_finder = worder.Worder(self.call) - if is_lambda: - self.last_parens = self.call.rindex(':') - else: - self.last_parens = self.call.rindex(')') - self.first_parens = self.word_finder._find_parens_start(self.last_parens) - - def get_parameters(self): - args, keywords = self.word_finder.get_parameters(self.first_parens, - self.last_parens) - if self.is_called_as_a_method(): - instance = self.call[:self.call.rindex('.', 0, self.first_parens)] - args.insert(0, instance.strip()) - return args, keywords - - def get_instance(self): - if self.is_called_as_a_method(): - return self.word_finder.get_primary_at( - self.call.rindex('.', 0, self.first_parens) - 1) - - def get_function_name(self): - if self.is_called_as_a_method(): - return self.word_finder.get_word_at(self.first_parens - 1) - else: - return self.word_finder.get_primary_at(self.first_parens - 1) - - def is_called_as_a_method(self): - return self.implicit_arg and '.' in self.call[:self.first_parens] diff --git a/pymode/libs3/rope/refactor/importutils/__init__.py b/pymode/libs3/rope/refactor/importutils/__init__.py deleted file mode 100644 index 2a86edb0..00000000 --- a/pymode/libs3/rope/refactor/importutils/__init__.py +++ /dev/null @@ -1,299 +0,0 @@ -"""A package for handling imports - -This package provides tools for modifying module imports after -refactorings or as a separate task. - -""" -import rope.base.evaluate -from rope.base.change import ChangeSet, ChangeContents -from rope.refactor import occurrences, rename -from rope.refactor.importutils import module_imports, actions -from rope.refactor.importutils.importinfo import NormalImport, FromImport -import rope.base.codeanalyze - - -class ImportOrganizer(object): - """Perform some import-related commands - - Each method returns a `rope.base.change.Change` object. - - """ - - def __init__(self, project): - self.project = project - self.pycore = project.pycore - self.import_tools = ImportTools(self.pycore) - - def organize_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.organize_imports, resource, offset) - - def expand_star_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.expand_stars, resource, offset) - - def froms_to_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.froms_to_imports, resource, offset) - - def relatives_to_absolutes(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.relatives_to_absolutes, resource, offset) - - def handle_long_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.handle_long_imports, resource, offset) - - def _perform_command_on_import_tools(self, method, resource, offset): - pymodule = self.pycore.resource_to_pyobject(resource) - before_performing = pymodule.source_code - import_filter = None - if offset is not None: - import_filter = self._line_filter( - pymodule.lines.get_line_number(offset)) - result = method(pymodule, import_filter=import_filter) - if result is not None and result != before_performing: - changes = ChangeSet(method.__name__.replace('_', ' ') + - ' in <%s>' % resource.path) - changes.add_change(ChangeContents(resource, result)) - return changes - - def _line_filter(self, lineno): - def import_filter(import_stmt): - return import_stmt.start_line <= lineno < import_stmt.end_line - return import_filter - - -class ImportTools(object): - - def __init__(self, pycore): - self.pycore = pycore - - def get_import(self, resource): - """The import statement for `resource`""" - module_name = self.pycore.modname(resource) - return NormalImport(((module_name, None), )) - - def get_from_import(self, resource, name): - """The from import statement for `name` in `resource`""" - module_name = self.pycore.modname(resource) - names = [] - if isinstance(name, list): - names = [(imported, None) for imported in name] - else: - names = [(name, None),] - return FromImport(module_name, 0, tuple(names)) - - def module_imports(self, module, imports_filter=None): - return module_imports.ModuleImports(self.pycore, module, - imports_filter) - - def froms_to_imports(self, pymodule, import_filter=None): - pymodule = self._clean_up_imports(pymodule, import_filter) - module_imports = self.module_imports(pymodule, import_filter) - for import_stmt in module_imports.imports: - if import_stmt.readonly or \ - not self._is_transformable_to_normal(import_stmt.import_info): - continue - pymodule = self._from_to_normal(pymodule, import_stmt) - - # Adding normal imports in place of froms - module_imports = self.module_imports(pymodule, import_filter) - for import_stmt in module_imports.imports: - if not import_stmt.readonly and \ - self._is_transformable_to_normal(import_stmt.import_info): - import_stmt.import_info = \ - NormalImport(((import_stmt.import_info.module_name, None),)) - module_imports.remove_duplicates() - return module_imports.get_changed_source() - - def expand_stars(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - module_imports.expand_stars() - return module_imports.get_changed_source() - - def _from_to_normal(self, pymodule, import_stmt): - resource = pymodule.get_resource() - from_import = import_stmt.import_info - module_name = from_import.module_name - for name, alias in from_import.names_and_aliases: - imported = name - if alias is not None: - imported = alias - occurrence_finder = occurrences.create_finder( - self.pycore, imported, pymodule[imported], imports=False) - source = rename.rename_in_module( - occurrence_finder, module_name + '.' + name, - pymodule=pymodule, replace_primary=True) - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - return pymodule - - def _clean_up_imports(self, pymodule, import_filter): - resource = pymodule.get_resource() - module_with_imports = self.module_imports(pymodule, import_filter) - module_with_imports.expand_stars() - source = module_with_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - source = self.relatives_to_absolutes(pymodule) - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - - module_with_imports = self.module_imports(pymodule, import_filter) - module_with_imports.remove_duplicates() - module_with_imports.remove_unused_imports() - source = module_with_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - return pymodule - - def relatives_to_absolutes(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - to_be_absolute_list = module_imports.get_relative_to_absolute_list() - for name, absolute_name in to_be_absolute_list: - pymodule = self._rename_in_module(pymodule, name, absolute_name) - module_imports = self.module_imports(pymodule, import_filter) - module_imports.get_relative_to_absolute_list() - source = module_imports.get_changed_source() - if source is None: - source = pymodule.source_code - return source - - def _is_transformable_to_normal(self, import_info): - if not isinstance(import_info, FromImport): - return False - return True - - def organize_imports(self, pymodule, - unused=True, duplicates=True, - selfs=True, sort=True, import_filter=None): - if unused or duplicates: - module_imports = self.module_imports(pymodule, import_filter) - if unused: - module_imports.remove_unused_imports() - if duplicates: - module_imports.remove_duplicates() - source = module_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module( - source, pymodule.get_resource()) - if selfs: - pymodule = self._remove_self_imports(pymodule, import_filter) - if sort: - return self.sort_imports(pymodule, import_filter) - else: - return pymodule.source_code - - def _remove_self_imports(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - to_be_fixed, to_be_renamed = module_imports.get_self_import_fix_and_rename_list() - for name in to_be_fixed: - try: - pymodule = self._rename_in_module(pymodule, name, '', till_dot=True) - except ValueError: - # There is a self import with direct access to it - return pymodule - for name, new_name in to_be_renamed: - pymodule = self._rename_in_module(pymodule, name, new_name) - module_imports = self.module_imports(pymodule, import_filter) - module_imports.get_self_import_fix_and_rename_list() - source = module_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module(source, pymodule.get_resource()) - return pymodule - - def _rename_in_module(self, pymodule, name, new_name, till_dot=False): - old_name = name.split('.')[-1] - old_pyname = rope.base.evaluate.eval_str(pymodule.get_scope(), name) - occurrence_finder = occurrences.create_finder( - self.pycore, old_name, old_pyname, imports=False) - changes = rope.base.codeanalyze.ChangeCollector(pymodule.source_code) - for occurrence in occurrence_finder.find_occurrences(pymodule=pymodule): - start, end = occurrence.get_primary_range() - if till_dot: - new_end = pymodule.source_code.index('.', end) + 1 - space = pymodule.source_code[end:new_end - 1].strip() - if not space == '': - for c in space: - if not c.isspace() and c not in '\\': - raise ValueError() - end = new_end - changes.add_change(start, end, new_name) - source = changes.get_changed() - if source is not None: - pymodule = self.pycore.get_string_module(source, pymodule.get_resource()) - return pymodule - - def sort_imports(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - module_imports.sort_imports() - return module_imports.get_changed_source() - - def handle_long_imports(self, pymodule, maxdots=2, maxlength=27, - import_filter=None): - # IDEA: `maxdots` and `maxlength` can be specified in project config - # adding new from imports - module_imports = self.module_imports(pymodule, import_filter) - to_be_fixed = module_imports.handle_long_imports(maxdots, maxlength) - # performing the renaming - pymodule = self.pycore.get_string_module( - module_imports.get_changed_source(), - resource=pymodule.get_resource()) - for name in to_be_fixed: - pymodule = self._rename_in_module(pymodule, name, - name.split('.')[-1]) - # organizing imports - return self.organize_imports(pymodule, selfs=False, sort=False, - import_filter=import_filter) - - -def get_imports(pycore, pydefined): - """A shortcut for getting the `ImportInfo`\s used in a scope""" - pymodule = pydefined.get_module() - module = module_imports.ModuleImports(pycore, pymodule) - if pymodule == pydefined: - return [stmt.import_info for stmt in module.imports] - return module.get_used_imports(pydefined) - - -def get_module_imports(pycore, pymodule): - """A shortcut for creating a `module_imports.ModuleImports` object""" - return module_imports.ModuleImports(pycore, pymodule) - - -def add_import(pycore, pymodule, module_name, name=None): - imports = get_module_imports(pycore, pymodule) - candidates = [] - names = [] - # from mod import name - if name is not None: - from_import = FromImport(module_name, 0, [(name, None)]) - names.append(name) - candidates.append(from_import) - # from pkg import mod - if '.' in module_name: - pkg, mod = module_name.rsplit('.', 1) - candidates.append(FromImport(pkg, 0, [(mod, None)])) - if name: - names.append(mod + '.' + name) - else: - names.append(mod) - # import mod - normal_import = NormalImport([(module_name, None)]) - if name: - names.append(module_name + '.' + name) - else: - names.append(module_name) - - candidates.append(normal_import) - - visitor = actions.AddingVisitor(pycore, candidates) - selected_import = normal_import - for import_statement in imports.imports: - if import_statement.accept(visitor): - selected_import = visitor.import_info - break - imports.add_import(selected_import) - imported_name = names[candidates.index(selected_import)] - return imports.get_changed_source(), imported_name diff --git a/pymode/libs3/rope/refactor/importutils/actions.py b/pymode/libs3/rope/refactor/importutils/actions.py deleted file mode 100644 index 4851d02f..00000000 --- a/pymode/libs3/rope/refactor/importutils/actions.py +++ /dev/null @@ -1,359 +0,0 @@ -import os -import sys - -from rope.base import pyobjects, exceptions, stdmods -from rope.refactor import occurrences -from rope.refactor.importutils import importinfo - - -class ImportInfoVisitor(object): - - def dispatch(self, import_): - try: - method_name = 'visit' + import_.import_info.__class__.__name__ - method = getattr(self, method_name) - return method(import_, import_.import_info) - except exceptions.ModuleNotFoundError: - pass - - def visitEmptyImport(self, import_stmt, import_info): - pass - - def visitNormalImport(self, import_stmt, import_info): - pass - - def visitFromImport(self, import_stmt, import_info): - pass - - -class RelativeToAbsoluteVisitor(ImportInfoVisitor): - - def __init__(self, pycore, current_folder): - self.to_be_absolute = [] - self.pycore = pycore - self.folder = current_folder - self.context = importinfo.ImportContext(pycore, current_folder) - - def visitNormalImport(self, import_stmt, import_info): - self.to_be_absolute.extend(self._get_relative_to_absolute_list(import_info)) - new_pairs = [] - for name, alias in import_info.names_and_aliases: - resource = self.pycore.find_module(name, folder=self.folder) - if resource is None: - new_pairs.append((name, alias)) - continue - absolute_name = self.pycore.modname(resource) - new_pairs.append((absolute_name, alias)) - if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): - import_stmt.import_info = importinfo.NormalImport(new_pairs) - - def _get_relative_to_absolute_list(self, import_info): - result = [] - for name, alias in import_info.names_and_aliases: - if alias is not None: - continue - resource = self.pycore.find_module(name, folder=self.folder) - if resource is None: - continue - absolute_name = self.pycore.modname(resource) - if absolute_name != name: - result.append((name, absolute_name)) - return result - - def visitFromImport(self, import_stmt, import_info): - resource = import_info.get_imported_resource(self.context) - if resource is None: - return None - absolute_name = self.pycore.modname(resource) - if import_info.module_name != absolute_name: - import_stmt.import_info = importinfo.FromImport( - absolute_name, 0, import_info.names_and_aliases) - - -class FilteringVisitor(ImportInfoVisitor): - - def __init__(self, pycore, folder, can_select): - self.to_be_absolute = [] - self.pycore = pycore - self.can_select = self._transform_can_select(can_select) - self.context = importinfo.ImportContext(pycore, folder) - - def _transform_can_select(self, can_select): - def can_select_name_and_alias(name, alias): - imported = name - if alias is not None: - imported = alias - return can_select(imported) - return can_select_name_and_alias - - def visitNormalImport(self, import_stmt, import_info): - new_pairs = [] - for name, alias in import_info.names_and_aliases: - if self.can_select(name, alias): - new_pairs.append((name, alias)) - return importinfo.NormalImport(new_pairs) - - def visitFromImport(self, import_stmt, import_info): - if _is_future(import_info): - return import_info - new_pairs = [] - if import_info.is_star_import(): - for name in import_info.get_imported_names(self.context): - if self.can_select(name, None): - new_pairs.append(import_info.names_and_aliases[0]) - break - else: - for name, alias in import_info.names_and_aliases: - if self.can_select(name, alias): - new_pairs.append((name, alias)) - return importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - - -class RemovingVisitor(ImportInfoVisitor): - - def __init__(self, pycore, folder, can_select): - self.to_be_absolute = [] - self.pycore = pycore - self.filtering = FilteringVisitor(pycore, folder, can_select) - - def dispatch(self, import_): - result = self.filtering.dispatch(import_) - if result is not None: - import_.import_info = result - - -class AddingVisitor(ImportInfoVisitor): - """A class for adding imports - - Given a list of `ImportInfo`\s, it tries to add each import to the - module and returns `True` and gives up when an import can be added - to older ones. - - """ - - def __init__(self, pycore, import_list): - self.pycore = pycore - self.import_list = import_list - self.import_info = None - - def dispatch(self, import_): - for import_info in self.import_list: - self.import_info = import_info - if ImportInfoVisitor.dispatch(self, import_): - return True - - # TODO: Handle adding relative and absolute imports - def visitNormalImport(self, import_stmt, import_info): - if not isinstance(self.import_info, import_info.__class__): - return False - # Adding ``import x`` and ``import x.y`` that results ``import x.y`` - if len(import_info.names_and_aliases) == \ - len(self.import_info.names_and_aliases) == 1: - imported1 = import_info.names_and_aliases[0] - imported2 = self.import_info.names_and_aliases[0] - if imported1[1] == imported2[1] is None: - if imported1[0].startswith(imported2[0] + '.'): - return True - if imported2[0].startswith(imported1[0] + '.'): - import_stmt.import_info = self.import_info - return True - # Multiple imports using a single import statement is discouraged - # so we won't bother adding them. - if self.import_info._are_name_and_alias_lists_equal( - import_info.names_and_aliases, self.import_info.names_and_aliases): - return True - - def visitFromImport(self, import_stmt, import_info): - if isinstance(self.import_info, import_info.__class__) and \ - import_info.module_name == self.import_info.module_name and \ - import_info.level == self.import_info.level: - if import_info.is_star_import(): - return True - if self.import_info.is_star_import(): - import_stmt.import_info = self.import_info - return True - new_pairs = list(import_info.names_and_aliases) - for pair in self.import_info.names_and_aliases: - if pair not in new_pairs: - new_pairs.append(pair) - import_stmt.import_info = importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - return True - - -class ExpandStarsVisitor(ImportInfoVisitor): - - def __init__(self, pycore, folder, can_select): - self.pycore = pycore - self.filtering = FilteringVisitor(pycore, folder, can_select) - self.context = importinfo.ImportContext(pycore, folder) - - def visitNormalImport(self, import_stmt, import_info): - self.filtering.dispatch(import_stmt) - - def visitFromImport(self, import_stmt, import_info): - if import_info.is_star_import(): - new_pairs = [] - for name in import_info.get_imported_names(self.context): - new_pairs.append((name, None)) - new_import = importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - import_stmt.import_info = \ - self.filtering.visitFromImport(None, new_import) - else: - self.filtering.dispatch(import_stmt) - - -class SelfImportVisitor(ImportInfoVisitor): - - def __init__(self, pycore, current_folder, resource): - self.pycore = pycore - self.folder = current_folder - self.resource = resource - self.to_be_fixed = set() - self.to_be_renamed = set() - self.context = importinfo.ImportContext(pycore, current_folder) - - def visitNormalImport(self, import_stmt, import_info): - new_pairs = [] - for name, alias in import_info.names_and_aliases: - resource = self.pycore.find_module(name, folder=self.folder) - if resource is not None and resource == self.resource: - imported = name - if alias is not None: - imported = alias - self.to_be_fixed.add(imported) - else: - new_pairs.append((name, alias)) - if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): - import_stmt.import_info = importinfo.NormalImport(new_pairs) - - def visitFromImport(self, import_stmt, import_info): - resource = import_info.get_imported_resource(self.context) - if resource is None: - return - if resource == self.resource: - self._importing_names_from_self(import_info, import_stmt) - return - pymodule = self.pycore.resource_to_pyobject(resource) - new_pairs = [] - for name, alias in import_info.names_and_aliases: - try: - result = pymodule[name].get_object() - if isinstance(result, pyobjects.PyModule) and \ - result.get_resource() == self.resource: - imported = name - if alias is not None: - imported = alias - self.to_be_fixed.add(imported) - else: - new_pairs.append((name, alias)) - except exceptions.AttributeNotFoundError: - new_pairs.append((name, alias)) - if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): - import_stmt.import_info = importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - - def _importing_names_from_self(self, import_info, import_stmt): - if not import_info.is_star_import(): - for name, alias in import_info.names_and_aliases: - if alias is not None: - self.to_be_renamed.add((alias, name)) - import_stmt.empty_import() - - -class SortingVisitor(ImportInfoVisitor): - - def __init__(self, pycore, current_folder): - self.pycore = pycore - self.folder = current_folder - self.standard = set() - self.third_party = set() - self.in_project = set() - self.future = set() - self.context = importinfo.ImportContext(pycore, current_folder) - - def visitNormalImport(self, import_stmt, import_info): - if import_info.names_and_aliases: - name, alias = import_info.names_and_aliases[0] - resource = self.pycore.find_module( - name, folder=self.folder) - self._check_imported_resource(import_stmt, resource, name) - - def visitFromImport(self, import_stmt, import_info): - resource = import_info.get_imported_resource(self.context) - self._check_imported_resource(import_stmt, resource, - import_info.module_name) - - def _check_imported_resource(self, import_stmt, resource, imported_name): - info = import_stmt.import_info - if resource is not None and resource.project == self.pycore.project: - self.in_project.add(import_stmt) - elif _is_future(info): - self.future.add(import_stmt) - elif imported_name.split('.')[0] in stdmods.standard_modules(): - self.standard.add(import_stmt) - else: - self.third_party.add(import_stmt) - - -class LongImportVisitor(ImportInfoVisitor): - - def __init__(self, current_folder, pycore, maxdots, maxlength): - self.maxdots = maxdots - self.maxlength = maxlength - self.to_be_renamed = set() - self.current_folder = current_folder - self.pycore = pycore - self.new_imports = [] - - def visitNormalImport(self, import_stmt, import_info): - new_pairs = [] - for name, alias in import_info.names_and_aliases: - if alias is None and self._is_long(name): - self.to_be_renamed.add(name) - last_dot = name.rindex('.') - from_ = name[:last_dot] - imported = name[last_dot + 1:] - self.new_imports.append( - importinfo.FromImport(from_, 0, ((imported, None), ))) - - def _is_long(self, name): - return name.count('.') > self.maxdots or \ - ('.' in name and len(name) > self.maxlength) - - -class RemovePyNameVisitor(ImportInfoVisitor): - - def __init__(self, pycore, pymodule, pyname, folder): - self.pymodule = pymodule - self.pyname = pyname - self.context = importinfo.ImportContext(pycore, folder) - - def visitFromImport(self, import_stmt, import_info): - new_pairs = [] - if not import_info.is_star_import(): - for name, alias in import_info.names_and_aliases: - try: - pyname = self.pymodule[alias or name] - if occurrences.same_pyname(self.pyname, pyname): - continue - except exceptions.AttributeNotFoundError: - pass - new_pairs.append((name, alias)) - return importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - - def dispatch(self, import_): - result = ImportInfoVisitor.dispatch(self, import_) - if result is not None: - import_.import_info = result - - -def _is_future(info): - return isinstance(info, importinfo.FromImport) and \ - info.module_name == '__future__' diff --git a/pymode/libs3/rope/refactor/importutils/importinfo.py b/pymode/libs3/rope/refactor/importutils/importinfo.py deleted file mode 100644 index cbf49d48..00000000 --- a/pymode/libs3/rope/refactor/importutils/importinfo.py +++ /dev/null @@ -1,201 +0,0 @@ -class ImportStatement(object): - """Represent an import in a module - - `readonly` attribute controls whether this import can be changed - by import actions or not. - - """ - - def __init__(self, import_info, start_line, end_line, - main_statement=None, blank_lines=0): - self.start_line = start_line - self.end_line = end_line - self.readonly = False - self.main_statement = main_statement - self._import_info = None - self.import_info = import_info - self._is_changed = False - self.new_start = None - self.blank_lines = blank_lines - - def _get_import_info(self): - return self._import_info - - def _set_import_info(self, new_import): - if not self.readonly and \ - new_import is not None and not new_import == self._import_info: - self._is_changed = True - self._import_info = new_import - - import_info = property(_get_import_info, _set_import_info) - - def get_import_statement(self): - if self._is_changed or self.main_statement is None: - return self.import_info.get_import_statement() - else: - return self.main_statement - - def empty_import(self): - self.import_info = ImportInfo.get_empty_import() - - def move(self, lineno, blank_lines=0): - self.new_start = lineno - self.blank_lines = blank_lines - - def get_old_location(self): - return self.start_line, self.end_line - - def get_new_start(self): - return self.new_start - - def is_changed(self): - return self._is_changed or (self.new_start is not None or - self.new_start != self.start_line) - - def accept(self, visitor): - return visitor.dispatch(self) - - -class ImportInfo(object): - - def get_imported_primaries(self, context): - pass - - def get_imported_names(self, context): - return [primary.split('.')[0] - for primary in self.get_imported_primaries(context)] - - def get_import_statement(self): - pass - - def is_empty(self): - pass - - def __hash__(self): - return hash(self.get_import_statement()) - - def _are_name_and_alias_lists_equal(self, list1, list2): - if len(list1) != len(list2): - return False - for pair1, pair2 in list(zip(list1, list2)): - if pair1 != pair2: - return False - return True - - def __eq__(self, obj): - return isinstance(obj, self.__class__) and \ - self.get_import_statement() == obj.get_import_statement() - - def __ne__(self, obj): - return not self.__eq__(obj) - - @staticmethod - def get_empty_import(): - return EmptyImport() - - -class NormalImport(ImportInfo): - - def __init__(self, names_and_aliases): - self.names_and_aliases = names_and_aliases - - def get_imported_primaries(self, context): - result = [] - for name, alias in self.names_and_aliases: - if alias: - result.append(alias) - else: - result.append(name) - return result - - def get_import_statement(self): - result = 'import ' - for name, alias in self.names_and_aliases: - result += name - if alias: - result += ' as ' + alias - result += ', ' - return result[:-2] - - def is_empty(self): - return len(self.names_and_aliases) == 0 - - -class FromImport(ImportInfo): - - def __init__(self, module_name, level, names_and_aliases): - self.module_name = module_name - self.level = level - self.names_and_aliases = names_and_aliases - - def get_imported_primaries(self, context): - if self.names_and_aliases[0][0] == '*': - module = self.get_imported_module(context) - return [name for name in module - if not name.startswith('_')] - result = [] - for name, alias in self.names_and_aliases: - if alias: - result.append(alias) - else: - result.append(name) - return result - - def get_imported_resource(self, context): - """Get the imported resource - - Returns `None` if module was not found. - """ - if self.level == 0: - return context.pycore.find_module( - self.module_name, folder=context.folder) - else: - return context.pycore.find_relative_module( - self.module_name, context.folder, self.level) - - def get_imported_module(self, context): - """Get the imported `PyModule` - - Raises `rope.base.exceptions.ModuleNotFoundError` if module - could not be found. - """ - if self.level == 0: - return context.pycore.get_module( - self.module_name, context.folder) - else: - return context.pycore.get_relative_module( - self.module_name, context.folder, self.level) - - def get_import_statement(self): - result = 'from ' + '.' * self.level + self.module_name + ' import ' - for name, alias in self.names_and_aliases: - result += name - if alias: - result += ' as ' + alias - result += ', ' - return result[:-2] - - def is_empty(self): - return len(self.names_and_aliases) == 0 - - def is_star_import(self): - return len(self.names_and_aliases) > 0 and \ - self.names_and_aliases[0][0] == '*' - - -class EmptyImport(ImportInfo): - - names_and_aliases = [] - - def is_empty(self): - return True - - def get_imported_primaries(self, context): - return [] - - -class ImportContext(object): - - def __init__(self, pycore, folder): - self.pycore = pycore - self.folder = folder diff --git a/pymode/libs3/rope/refactor/importutils/module_imports.py b/pymode/libs3/rope/refactor/importutils/module_imports.py deleted file mode 100644 index cf9004f8..00000000 --- a/pymode/libs3/rope/refactor/importutils/module_imports.py +++ /dev/null @@ -1,451 +0,0 @@ -import functools -import rope.base.pynames -from rope.base import ast, utils -from rope.refactor.importutils import importinfo -from rope.refactor.importutils import actions - - -class ModuleImports(object): - - def __init__(self, pycore, pymodule, import_filter=None): - self.pycore = pycore - self.pymodule = pymodule - self.separating_lines = 0 - self.filter = import_filter - - @property - @utils.saveit - def imports(self): - finder = _GlobalImportFinder(self.pymodule, self.pycore) - result = finder.find_import_statements() - self.separating_lines = finder.get_separating_line_count() - if self.filter is not None: - for import_stmt in result: - if not self.filter(import_stmt): - import_stmt.readonly = True - return result - - def _get_unbound_names(self, defined_pyobject): - visitor = _GlobalUnboundNameFinder(self.pymodule, defined_pyobject) - ast.walk(self.pymodule.get_ast(), visitor) - return visitor.unbound - - def remove_unused_imports(self): - can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule)) - visitor = actions.RemovingVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - import_statement.accept(visitor) - - def get_used_imports(self, defined_pyobject): - result = [] - can_select = _OneTimeSelector(self._get_unbound_names(defined_pyobject)) - visitor = actions.FilteringVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - new_import = import_statement.accept(visitor) - if new_import is not None and not new_import.is_empty(): - result.append(new_import) - return result - - def get_changed_source(self): - imports = self.imports - after_removing = self._remove_imports(imports) - imports = [stmt for stmt in imports - if not stmt.import_info.is_empty()] - - first_non_blank = self._first_non_blank_line(after_removing, 0) - first_import = self._first_import_line() - 1 - result = [] - # Writing module docs - result.extend(after_removing[first_non_blank:first_import]) - # Writing imports - sorted_imports = sorted(imports, key = functools.cmp_to_key(self._compare_import_locations)) - for stmt in sorted_imports: - start = self._get_import_location(stmt) - if stmt != sorted_imports[0]: - result.append('\n' * stmt.blank_lines) - result.append(stmt.get_import_statement() + '\n') - if sorted_imports and first_non_blank < len(after_removing): - result.append('\n' * self.separating_lines) - - # Writing the body - first_after_imports = self._first_non_blank_line(after_removing, - first_import) - result.extend(after_removing[first_after_imports:]) - return ''.join(result) - - def _get_import_location(self, stmt): - start = stmt.get_new_start() - if start is None: - start = stmt.get_old_location()[0] - return start - - def _compare_import_locations(self, stmt1, stmt2): - def get_location(stmt): - if stmt.get_new_start() is not None: - return stmt.get_new_start() - else: - return stmt.get_old_location()[0] - return get_location(stmt1) - get_location(stmt2) - - def _remove_imports(self, imports): - lines = self.pymodule.source_code.splitlines(True) - after_removing = [] - last_index = 0 - for stmt in imports: - start, end = stmt.get_old_location() - after_removing.extend(lines[last_index:start - 1]) - last_index = end - 1 - for i in range(start, end): - after_removing.append('') - after_removing.extend(lines[last_index:]) - return after_removing - - def _first_non_blank_line(self, lines, lineno): - result = lineno - for line in lines[lineno:]: - if line.strip() == '': - result += 1 - else: - break - return result - - def add_import(self, import_info): - visitor = actions.AddingVisitor(self.pycore, [import_info]) - for import_statement in self.imports: - if import_statement.accept(visitor): - break - else: - lineno = self._get_new_import_lineno() - blanks = self._get_new_import_blanks() - self.imports.append(importinfo.ImportStatement( - import_info, lineno, lineno, - blank_lines=blanks)) - - def _get_new_import_blanks(self): - return 0 - - def _get_new_import_lineno(self): - if self.imports: - return self.imports[-1].end_line - return 1 - - def filter_names(self, can_select): - visitor = actions.RemovingVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - import_statement.accept(visitor) - - def expand_stars(self): - can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule)) - visitor = actions.ExpandStarsVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - import_statement.accept(visitor) - - def remove_duplicates(self): - added_imports = [] - for import_stmt in self.imports: - visitor = actions.AddingVisitor(self.pycore, - [import_stmt.import_info]) - for added_import in added_imports: - if added_import.accept(visitor): - import_stmt.empty_import() - else: - added_imports.append(import_stmt) - - def get_relative_to_absolute_list(self): - visitor = rope.refactor.importutils.actions.RelativeToAbsoluteVisitor( - self.pycore, self._current_folder()) - for import_stmt in self.imports: - if not import_stmt.readonly: - import_stmt.accept(visitor) - return visitor.to_be_absolute - - def get_self_import_fix_and_rename_list(self): - visitor = rope.refactor.importutils.actions.SelfImportVisitor( - self.pycore, self._current_folder(), self.pymodule.get_resource()) - for import_stmt in self.imports: - if not import_stmt.readonly: - import_stmt.accept(visitor) - return visitor.to_be_fixed, visitor.to_be_renamed - - def _current_folder(self): - return self.pymodule.get_resource().parent - - def sort_imports(self): - # IDEA: Sort from import list - visitor = actions.SortingVisitor(self.pycore, self._current_folder()) - for import_statement in self.imports: - import_statement.accept(visitor) - in_projects = sorted(visitor.in_project, key = self._compare_imports) - third_party = sorted(visitor.third_party, key = self._compare_imports) - standards = sorted(visitor.standard, key = self._compare_imports) - future = sorted(visitor.future, key = self._compare_imports) - blank_lines = 0 - last_index = self._first_import_line() - last_index = self._move_imports(future, last_index, 0) - last_index = self._move_imports(standards, last_index, 1) - last_index = self._move_imports(third_party, last_index, 1) - last_index = self._move_imports(in_projects, last_index, 1) - self.separating_lines = 2 - - def _first_import_line(self): - nodes = self.pymodule.get_ast().body - lineno = 0 - if self.pymodule.get_doc() is not None: - lineno = 1 - if len(nodes) > lineno: - lineno = self.pymodule.logical_lines.logical_line_in( - nodes[lineno].lineno)[0] - else: - lineno = self.pymodule.lines.length() - while lineno > 1: - line = self.pymodule.lines.get_line(lineno - 1) - if line.strip() == '': - lineno -= 1 - else: - break - return lineno - - def _compare_imports(self, stmt): - str = stmt.get_import_statement() - return (str.startswith('from '), str) - - def _move_imports(self, imports, index, blank_lines): - if imports: - imports[0].move(index, blank_lines) - index += 1 - if len(imports) > 1: - for stmt in imports[1:]: - stmt.move(index) - index += 1 - return index - - def handle_long_imports(self, maxdots, maxlength): - visitor = actions.LongImportVisitor( - self._current_folder(), self.pycore, maxdots, maxlength) - for import_statement in self.imports: - if not import_statement.readonly: - import_statement.accept(visitor) - for import_info in visitor.new_imports: - self.add_import(import_info) - return visitor.to_be_renamed - - def remove_pyname(self, pyname): - """Removes pyname when imported in ``from mod import x``""" - visitor = actions.RemovePyNameVisitor(self.pycore, self.pymodule, - pyname, self._current_folder()) - for import_stmt in self.imports: - import_stmt.accept(visitor) - - -class _OneTimeSelector(object): - - def __init__(self, names): - self.names = names - self.selected_names = set() - - def __call__(self, imported_primary): - if self._can_name_be_added(imported_primary): - for name in self._get_dotted_tokens(imported_primary): - self.selected_names.add(name) - return True - return False - - def _get_dotted_tokens(self, imported_primary): - tokens = imported_primary.split('.') - for i in range(len(tokens)): - yield '.'.join(tokens[:i + 1]) - - def _can_name_be_added(self, imported_primary): - for name in self._get_dotted_tokens(imported_primary): - if name in self.names and name not in self.selected_names: - return True - return False - - -class _UnboundNameFinder(object): - - def __init__(self, pyobject): - self.pyobject = pyobject - - def _visit_child_scope(self, node): - pyobject = self.pyobject.get_module().get_scope().\ - get_inner_scope_for_line(node.lineno).pyobject - visitor = _LocalUnboundNameFinder(pyobject, self) - for child in ast.get_child_nodes(node): - ast.walk(child, visitor) - - def _FunctionDef(self, node): - self._visit_child_scope(node) - - def _ClassDef(self, node): - self._visit_child_scope(node) - - def _Name(self, node): - if self._get_root()._is_node_interesting(node) and \ - not self.is_bound(node.id): - self.add_unbound(node.id) - - def _Attribute(self, node): - result = [] - while isinstance(node, ast.Attribute): - result.append(node.attr) - node = node.value - if isinstance(node, ast.Name): - result.append(node.id) - primary = '.'.join(reversed(result)) - if self._get_root()._is_node_interesting(node) and \ - not self.is_bound(primary): - self.add_unbound(primary) - else: - ast.walk(node, self) - - def _get_root(self): - pass - - def is_bound(self, name, propagated=False): - pass - - def add_unbound(self, name): - pass - - -class _GlobalUnboundNameFinder(_UnboundNameFinder): - - def __init__(self, pymodule, wanted_pyobject): - super(_GlobalUnboundNameFinder, self).__init__(pymodule) - self.unbound = set() - self.names = set() - for name, pyname in pymodule._get_structural_attributes().items(): - if not isinstance(pyname, (rope.base.pynames.ImportedName, - rope.base.pynames.ImportedModule)): - self.names.add(name) - wanted_scope = wanted_pyobject.get_scope() - self.start = wanted_scope.get_start() - self.end = wanted_scope.get_end() + 1 - - def _get_root(self): - return self - - def is_bound(self, primary, propagated=False): - name = primary.split('.')[0] - if name in self.names: - return True - return False - - def add_unbound(self, name): - names = name.split('.') - for i in range(len(names)): - self.unbound.add('.'.join(names[:i + 1])) - - def _is_node_interesting(self, node): - return self.start <= node.lineno < self.end - - -class _LocalUnboundNameFinder(_UnboundNameFinder): - - def __init__(self, pyobject, parent): - super(_LocalUnboundNameFinder, self).__init__(pyobject) - self.parent = parent - - def _get_root(self): - return self.parent._get_root() - - def is_bound(self, primary, propagated=False): - name = primary.split('.')[0] - if propagated: - names = self.pyobject.get_scope().get_propagated_names() - else: - names = self.pyobject.get_scope().get_names() - if name in names or self.parent.is_bound(name, propagated=True): - return True - return False - - def add_unbound(self, name): - self.parent.add_unbound(name) - - -class _GlobalImportFinder(object): - - def __init__(self, pymodule, pycore): - self.current_folder = None - if pymodule.get_resource(): - self.current_folder = pymodule.get_resource().parent - self.pymodule = pymodule - self.pycore = pycore - self.imports = [] - self.pymodule = pymodule - self.lines = self.pymodule.lines - - def visit_import(self, node, end_line): - start_line = node.lineno - import_statement = importinfo.ImportStatement( - importinfo.NormalImport(self._get_names(node.names)), - start_line, end_line, self._get_text(start_line, end_line), - blank_lines=self._count_empty_lines_before(start_line)) - self.imports.append(import_statement) - - def _count_empty_lines_before(self, lineno): - result = 0 - for current in range(lineno - 1, 0, -1): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result - - def _count_empty_lines_after(self, lineno): - result = 0 - for current in range(lineno + 1, self.lines.length()): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result - - def get_separating_line_count(self): - if not self.imports: - return 0 - return self._count_empty_lines_after(self.imports[-1].end_line - 1) - - def _get_text(self, start_line, end_line): - result = [] - for index in range(start_line, end_line): - result.append(self.lines.get_line(index)) - return '\n'.join(result) - - def visit_from(self, node, end_line): - level = 0 - if node.level: - level = node.level - import_info = importinfo.FromImport( - node.module or '', # see comment at rope.base.ast.walk - level, self._get_names(node.names)) - start_line = node.lineno - self.imports.append(importinfo.ImportStatement( - import_info, node.lineno, end_line, - self._get_text(start_line, end_line), - blank_lines=self._count_empty_lines_before(start_line))) - - def _get_names(self, alias_names): - result = [] - for alias in alias_names: - result.append((alias.name, alias.asname)) - return result - - def find_import_statements(self): - nodes = self.pymodule.get_ast().body - for index, node in enumerate(nodes): - if isinstance(node, (ast.Import, ast.ImportFrom)): - lines = self.pymodule.logical_lines - end_line = lines.logical_line_in(node.lineno)[1] + 1 - if isinstance(node, ast.Import): - self.visit_import(node, end_line) - if isinstance(node, ast.ImportFrom): - self.visit_from(node, end_line) - return self.imports diff --git a/pymode/libs3/rope/refactor/inline.py b/pymode/libs3/rope/refactor/inline.py deleted file mode 100644 index cfd64a7e..00000000 --- a/pymode/libs3/rope/refactor/inline.py +++ /dev/null @@ -1,615 +0,0 @@ -# Known Bugs when inlining a function/method -# The values passed to function are inlined using _inlined_variable. -# This may cause two problems, illustrated in the examples below -# -# def foo(var1): -# var1 = var1*10 -# return var1 -# -# If a call to foo(20) is inlined, the result of inlined function is 20, -# but it should be 200. -# -# def foo(var1): -# var2 = var1*10 -# return var2 -# -# 2- If a call to foo(10+10) is inlined the result of inlined function is 110 -# but it should be 200. - -import re - -import rope.base.exceptions -import rope.refactor.functionutils -from rope.base import (pynames, pyobjects, codeanalyze, - taskhandle, evaluate, worder, utils) -from rope.base.change import ChangeSet, ChangeContents -from rope.refactor import (occurrences, rename, sourceutils, - importutils, move, change_signature) - -def unique_prefix(): - n = 0 - while True: - yield "__" + str(n) + "__" - n += 1 - -def create_inline(project, resource, offset): - """Create a refactoring object for inlining - - Based on `resource` and `offset` it returns an instance of - `InlineMethod`, `InlineVariable` or `InlineParameter`. - - """ - pycore = project.pycore - pyname = _get_pyname(pycore, resource, offset) - message = 'Inline refactoring should be performed on ' \ - 'a method, local variable or parameter.' - if pyname is None: - raise rope.base.exceptions.RefactoringError(message) - if isinstance(pyname, pynames.ImportedName): - pyname = pyname._get_imported_pyname() - if isinstance(pyname, pynames.AssignedName): - return InlineVariable(project, resource, offset) - if isinstance(pyname, pynames.ParameterName): - return InlineParameter(project, resource, offset) - if isinstance(pyname.get_object(), pyobjects.PyFunction): - return InlineMethod(project, resource, offset) - else: - raise rope.base.exceptions.RefactoringError(message) - - -class _Inliner(object): - - def __init__(self, project, resource, offset): - self.project = project - self.pycore = project.pycore - self.pyname = _get_pyname(self.pycore, resource, offset) - range_finder = worder.Worder(resource.read()) - self.region = range_finder.get_primary_range(offset) - self.name = range_finder.get_word_at(offset) - self.offset = offset - self.original = resource - - def get_changes(self, *args, **kwds): - pass - - def get_kind(self): - """Return either 'variable', 'method' or 'parameter'""" - - -class InlineMethod(_Inliner): - - def __init__(self, *args, **kwds): - super(InlineMethod, self).__init__(*args, **kwds) - self.pyfunction = self.pyname.get_object() - self.pymodule = self.pyfunction.get_module() - self.resource = self.pyfunction.get_module().get_resource() - self.occurrence_finder = occurrences.create_finder( - self.pycore, self.name, self.pyname) - self.normal_generator = _DefinitionGenerator(self.project, - self.pyfunction) - self._init_imports() - - def _init_imports(self): - body = sourceutils.get_body(self.pyfunction) - body, imports = move.moving_code_with_imports( - self.pycore, self.resource, body) - self.imports = imports - self.others_generator = _DefinitionGenerator( - self.project, self.pyfunction, body=body) - - def _get_scope_range(self): - scope = self.pyfunction.get_scope() - lines = self.pymodule.lines - logicals = self.pymodule.logical_lines - start_line = scope.get_start() - if self.pyfunction.decorators: - decorators = self.pyfunction.decorators - if hasattr(decorators[0], 'lineno'): - start_line = decorators[0].lineno - start_offset = lines.get_line_start(start_line) - end_offset = min(lines.get_line_end(scope.end) + 1, - len(self.pymodule.source_code)) - return (start_offset, end_offset) - - def get_changes(self, remove=True, only_current=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes this refactoring makes - - If `remove` is `False` the definition will not be removed. If - `only_current` is `True`, the the current occurrence will be - inlined, only. - """ - changes = ChangeSet('Inline method <%s>' % self.name) - if resources is None: - resources = self.pycore.get_python_files() - if only_current: - resources = [self.original] - if remove: - resources.append(self.resource) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - for file in resources: - job_set.started_job(file.path) - if file == self.resource: - changes.add_change(self._defining_file_changes( - changes, remove=remove, only_current=only_current)) - else: - aim = None - if only_current and self.original == file: - aim = self.offset - handle = _InlineFunctionCallsForModuleHandle( - self.pycore, file, self.others_generator, aim) - result = move.ModuleSkipRenamer( - self.occurrence_finder, file, handle).get_changed_module() - if result is not None: - result = _add_imports(self.pycore, result, - file, self.imports) - if remove: - result = _remove_from(self.pycore, self.pyname, - result, file) - changes.add_change(ChangeContents(file, result)) - job_set.finished_job() - return changes - - def _get_removed_range(self): - scope = self.pyfunction.get_scope() - lines = self.pymodule.lines - logical = self.pymodule.logical_lines - start_line = scope.get_start() - start, end = self._get_scope_range() - end_line = scope.get_end() - for i in range(end_line + 1, lines.length()): - if lines.get_line(i).strip() == '': - end_line = i - else: - break - end = min(lines.get_line_end(end_line) + 1, - len(self.pymodule.source_code)) - return (start, end) - - def _defining_file_changes(self, changes, remove, only_current): - start_offset, end_offset = self._get_removed_range() - aim = None - if only_current: - if self.resource == self.original: - aim = self.offset - else: - # we don't want to change any of them - aim = len(self.resource.read()) + 100 - handle = _InlineFunctionCallsForModuleHandle( - self.pycore, self.resource, - self.normal_generator, aim_offset=aim) - replacement = None - if remove: - replacement = self._get_method_replacement() - result = move.ModuleSkipRenamer( - self.occurrence_finder, self.resource, handle, start_offset, - end_offset, replacement).get_changed_module() - return ChangeContents(self.resource, result) - - def _get_method_replacement(self): - if self._is_the_last_method_of_a_class(): - indents = sourceutils.get_indents( - self.pymodule.lines, self.pyfunction.get_scope().get_start()) - return ' ' * indents + 'pass\n' - return '' - - def _is_the_last_method_of_a_class(self): - pyclass = self.pyfunction.parent - if not isinstance(pyclass, pyobjects.PyClass): - return False - class_start, class_end = sourceutils.get_body_region(pyclass) - source = self.pymodule.source_code - lines = self.pymodule.lines - func_start, func_end = self._get_scope_range() - if source[class_start:func_start].strip() == '' and \ - source[func_end:class_end].strip() == '': - return True - return False - - def get_kind(self): - return 'method' - - -class InlineVariable(_Inliner): - - def __init__(self, *args, **kwds): - super(InlineVariable, self).__init__(*args, **kwds) - self.pymodule = self.pyname.get_definition_location()[0] - self.resource = self.pymodule.get_resource() - self._check_exceptional_conditions() - self._init_imports() - - def _check_exceptional_conditions(self): - if len(self.pyname.assignments) != 1: - raise rope.base.exceptions.RefactoringError( - 'Local variable should be assigned once for inlining.') - - def get_changes(self, remove=True, only_current=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - if resources is None: - if rename._is_local(self.pyname): - resources = [self.resource] - else: - resources = self.pycore.get_python_files() - if only_current: - resources = [self.original] - if remove and self.original != self.resource: - resources.append(self.resource) - changes = ChangeSet('Inline variable <%s>' % self.name) - jobset = task_handle.create_jobset('Calculating changes', - len(resources)) - - for resource in resources: - jobset.started_job(resource.path) - if resource == self.resource: - source = self._change_main_module(remove, only_current) - changes.add_change(ChangeContents(self.resource, source)) - else: - result = self._change_module(resource, remove, only_current) - if result is not None: - result = _add_imports(self.pycore, result, - resource, self.imports) - changes.add_change(ChangeContents(resource, result)) - jobset.finished_job() - return changes - - def _change_main_module(self, remove, only_current): - region = None - if only_current and self.original == self.resource: - region = self.region - return _inline_variable(self.pycore, self.pymodule, self.pyname, - self.name, remove=remove, region=region) - - def _init_imports(self): - vardef = _getvardef(self.pymodule, self.pyname) - self.imported, self.imports = move.moving_code_with_imports( - self.pycore, self.resource, vardef) - - def _change_module(self, resource, remove, only_current): - filters = [occurrences.NoImportsFilter(), - occurrences.PyNameFilter(self.pyname)] - if only_current and resource == self.original: - def check_aim(occurrence): - start, end = occurrence.get_primary_range() - if self.offset < start or end < self.offset: - return False - filters.insert(0, check_aim) - finder = occurrences.Finder(self.pycore, self.name, filters=filters) - changed = rename.rename_in_module( - finder, self.imported, resource=resource, replace_primary=True) - if changed and remove: - changed = _remove_from(self.pycore, self.pyname, changed, resource) - return changed - - def get_kind(self): - return 'variable' - - -class InlineParameter(_Inliner): - - def __init__(self, *args, **kwds): - super(InlineParameter, self).__init__(*args, **kwds) - resource, offset = self._function_location() - index = self.pyname.index - self.changers = [change_signature.ArgumentDefaultInliner(index)] - self.signature = change_signature.ChangeSignature(self.project, - resource, offset) - - def _function_location(self): - pymodule, lineno = self.pyname.get_definition_location() - resource = pymodule.get_resource() - start = pymodule.lines.get_line_start(lineno) - word_finder = worder.Worder(pymodule.source_code) - offset = word_finder.find_function_offset(start) - return resource, offset - - def get_changes(self, **kwds): - """Get the changes needed by this refactoring - - See `rope.refactor.change_signature.ChangeSignature.get_changes()` - for arguments. - """ - return self.signature.get_changes(self.changers, **kwds) - - def get_kind(self): - return 'parameter' - - -def _join_lines(lines): - definition_lines = [] - for unchanged_line in lines: - line = unchanged_line.strip() - if line.endswith('\\'): - line = line[:-1].strip() - definition_lines.append(line) - joined = ' '.join(definition_lines) - return joined - - -class _DefinitionGenerator(object): - unique_prefix = unique_prefix() - def __init__(self, project, pyfunction, body=None): - self.pycore = project.pycore - self.pyfunction = pyfunction - self.pymodule = pyfunction.get_module() - self.resource = self.pymodule.get_resource() - self.definition_info = self._get_definition_info() - self.definition_params = self._get_definition_params() - self._calculated_definitions = {} - if body is not None: - self.body = body - else: - self.body = sourceutils.get_body(self.pyfunction) - - def _get_definition_info(self): - return rope.refactor.functionutils.DefinitionInfo.read(self.pyfunction) - - def _get_definition_params(self): - definition_info = self.definition_info - paramdict = dict([pair for pair in definition_info.args_with_defaults]) - if definition_info.args_arg is not None or \ - definition_info.keywords_arg is not None: - raise rope.base.exceptions.RefactoringError( - 'Cannot inline functions with list and keyword arguements.') - if self.pyfunction.get_kind() == 'classmethod': - paramdict[definition_info.args_with_defaults[0][0]] = \ - self.pyfunction.parent.get_name() - return paramdict - - def get_function_name(self): - return self.pyfunction.get_name() - - def get_definition(self, primary, pyname, call, host_vars=[],returns=False): - # caching already calculated definitions - return self._calculate_definition(primary, pyname, call, - host_vars, returns) - - def _calculate_header(self, primary, pyname, call): - # A header is created which initializes parameters - # to the values passed to the function. - call_info = rope.refactor.functionutils.CallInfo.read( - primary, pyname, self.definition_info, call) - paramdict = self.definition_params - mapping = rope.refactor.functionutils.ArgumentMapping( - self.definition_info, call_info) - for param_name, value in mapping.param_dict.items(): - paramdict[param_name] = value - header = '' - to_be_inlined = [] - mod = self.pycore.get_string_module(self.body) - all_names = mod.get_scope().get_names() - assigned_names = [name for name in all_names if - isinstance(all_names[name], rope.base.pynamesdef.AssignedName)] - for name, value in paramdict.items(): - if name != value and value is not None: - header += name + ' = ' + value.replace('\n', ' ') + '\n' - to_be_inlined.append(name) - return header, to_be_inlined - - def _calculate_definition(self, primary, pyname, call, host_vars, returns): - - header, to_be_inlined = self._calculate_header(primary, pyname, call) - - source = header + self.body - mod = self.pycore.get_string_module(source) - name_dict = mod.get_scope().get_names() - all_names = [x for x in name_dict if - not isinstance(name_dict[x], rope.base.builtins.BuiltinName)] - - # If there is a name conflict, all variable names - # inside the inlined function are renamed - if len(set(all_names).intersection(set(host_vars))) > 0: - - prefix = _DefinitionGenerator.unique_prefix.next() - guest = self.pycore.get_string_module(source, self.resource) - - to_be_inlined = [prefix+item for item in to_be_inlined] - for item in all_names: - pyname = guest[item] - occurrence_finder = occurrences.create_finder( - self.pycore, item, pyname) - source = rename.rename_in_module(occurrence_finder, - prefix+item, pymodule=guest) - guest = self.pycore.get_string_module(source, self.resource) - - #parameters not reassigned inside the functions are now inlined. - for name in to_be_inlined: - pymodule = self.pycore.get_string_module(source, self.resource) - pyname = pymodule[name] - source = _inline_variable(self.pycore, pymodule, pyname, name) - - return self._replace_returns_with(source, returns) - - def _replace_returns_with(self, source, returns): - result = [] - returned = None - last_changed = 0 - for match in _DefinitionGenerator._get_return_pattern().finditer(source): - for key, value in match.groupdict().items(): - if value and key == 'return': - result.append(source[last_changed:match.start('return')]) - if returns: - self._check_nothing_after_return(source, - match.end('return')) - returned = _join_lines( - source[match.end('return'): len(source)].splitlines()) - last_changed = len(source) - else: - current = match.end('return') - while current < len(source) and source[current] in ' \t': - current += 1 - last_changed = current - if current == len(source) or source[current] == '\n': - result.append('pass') - result.append(source[last_changed:]) - return ''.join(result), returned - - def _check_nothing_after_return(self, source, offset): - lines = codeanalyze.SourceLinesAdapter(source) - lineno = lines.get_line_number(offset) - logical_lines = codeanalyze.LogicalLineFinder(lines) - lineno = logical_lines.logical_line_in(lineno)[1] - if source[lines.get_line_end(lineno):len(source)].strip() != '': - raise rope.base.exceptions.RefactoringError( - 'Cannot inline functions with statements after return statement.') - - @classmethod - def _get_return_pattern(cls): - if not hasattr(cls, '_return_pattern'): - def named_pattern(name, list_): - return "(?P<%s>" % name + "|".join(list_) + ")" - comment_pattern = named_pattern('comment', [r'#[^\n]*']) - string_pattern = named_pattern('string', - [codeanalyze.get_string_pattern()]) - return_pattern = r'\b(?Preturn)\b' - cls._return_pattern = re.compile(comment_pattern + "|" + - string_pattern + "|" + - return_pattern) - return cls._return_pattern - - -class _InlineFunctionCallsForModuleHandle(object): - - def __init__(self, pycore, resource, - definition_generator, aim_offset=None): - """Inlines occurrences - - If `aim` is not `None` only the occurrences that intersect - `aim` offset will be inlined. - - """ - self.pycore = pycore - self.generator = definition_generator - self.resource = resource - self.aim = aim_offset - - def occurred_inside_skip(self, change_collector, occurrence): - if not occurrence.is_defined(): - raise rope.base.exceptions.RefactoringError( - 'Cannot inline functions that reference themselves') - - def occurred_outside_skip(self, change_collector, occurrence): - start, end = occurrence.get_primary_range() - # we remove out of date imports later - if occurrence.is_in_import_statement(): - return - # the function is referenced outside an import statement - if not occurrence.is_called(): - raise rope.base.exceptions.RefactoringError( - 'Reference to inlining function other than function call' - ' in ' % (self.resource.path, start)) - if self.aim is not None and (self.aim < start or self.aim > end): - return - end_parens = self._find_end_parens(self.source, end - 1) - lineno = self.lines.get_line_number(start) - start_line, end_line = self.pymodule.logical_lines.\ - logical_line_in(lineno) - line_start = self.lines.get_line_start(start_line) - line_end = self.lines.get_line_end(end_line) - - - returns = self.source[line_start:start].strip() != '' or \ - self.source[end_parens:line_end].strip() != '' - indents = sourceutils.get_indents(self.lines, start_line) - primary, pyname = occurrence.get_primary_and_pyname() - - host = self.pycore.resource_to_pyobject(self.resource) - scope = host.scope.get_inner_scope_for_line(lineno) - definition, returned = self.generator.get_definition( - primary, pyname, self.source[start:end_parens], scope.get_names(), returns=returns) - - end = min(line_end + 1, len(self.source)) - change_collector.add_change(line_start, end, - sourceutils.fix_indentation(definition, indents)) - if returns: - name = returned - if name is None: - name = 'None' - change_collector.add_change( - line_end, end, self.source[line_start:start] + name + - self.source[end_parens:end]) - - def _find_end_parens(self, source, offset): - finder = worder.Worder(source) - return finder.get_word_parens_range(offset)[1] - - @property - @utils.saveit - def pymodule(self): - return self.pycore.resource_to_pyobject(self.resource) - - @property - @utils.saveit - def source(self): - if self.resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def lines(self): - return self.pymodule.lines - - -def _inline_variable(pycore, pymodule, pyname, name, - remove=True, region=None): - definition = _getvardef(pymodule, pyname) - start, end = _assigned_lineno(pymodule, pyname) - - occurrence_finder = occurrences.create_finder(pycore, name, pyname) - changed_source = rename.rename_in_module( - occurrence_finder, definition, pymodule=pymodule, - replace_primary=True, writes=False, region=region) - if changed_source is None: - changed_source = pymodule.source_code - if remove: - lines = codeanalyze.SourceLinesAdapter(changed_source) - source = changed_source[:lines.get_line_start(start)] + \ - changed_source[lines.get_line_end(end) + 1:] - else: - source = changed_source - return source - -def _getvardef(pymodule, pyname): - assignment = pyname.assignments[0] - lines = pymodule.lines - start, end = _assigned_lineno(pymodule, pyname) - definition_with_assignment = _join_lines( - [lines.get_line(n) for n in range(start, end + 1)]) - if assignment.levels: - raise rope.base.exceptions.RefactoringError( - 'Cannot inline tuple assignments.') - definition = definition_with_assignment[definition_with_assignment.\ - index('=') + 1:].strip() - return definition - -def _assigned_lineno(pymodule, pyname): - definition_line = pyname.assignments[0].ast_node.lineno - return pymodule.logical_lines.logical_line_in(definition_line) - -def _add_imports(pycore, source, resource, imports): - if not imports: - return source - pymodule = pycore.get_string_module(source, resource) - module_import = importutils.get_module_imports(pycore, pymodule) - for import_info in imports: - module_import.add_import(import_info) - source = module_import.get_changed_source() - pymodule = pycore.get_string_module(source, resource) - import_tools = importutils.ImportTools(pycore) - return import_tools.organize_imports(pymodule, unused=False, sort=False) - -def _get_pyname(pycore, resource, offset): - pymodule = pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(pymodule, offset) - if isinstance(pyname, pynames.ImportedName): - pyname = pyname._get_imported_pyname() - return pyname - -def _remove_from(pycore, pyname, source, resource): - pymodule = pycore.get_string_module(source, resource) - module_import = importutils.get_module_imports(pycore, pymodule) - module_import.remove_pyname(pyname) - return module_import.get_changed_source() diff --git a/pymode/libs3/rope/refactor/introduce_factory.py b/pymode/libs3/rope/refactor/introduce_factory.py deleted file mode 100644 index 5a885587..00000000 --- a/pymode/libs3/rope/refactor/introduce_factory.py +++ /dev/null @@ -1,133 +0,0 @@ -import rope.base.exceptions -import rope.base.pyobjects -from rope.base import taskhandle, evaluate -from rope.base.change import (ChangeSet, ChangeContents) -from rope.refactor import rename, occurrences, sourceutils, importutils - - -class IntroduceFactory(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.offset = offset - - this_pymodule = self.pycore.resource_to_pyobject(resource) - self.old_pyname = evaluate.eval_location(this_pymodule, offset) - if self.old_pyname is None or not isinstance(self.old_pyname.get_object(), - rope.base.pyobjects.PyClass): - raise rope.base.exceptions.RefactoringError( - 'Introduce factory should be performed on a class.') - self.old_name = self.old_pyname.get_object().get_name() - self.pymodule = self.old_pyname.get_object().get_module() - self.resource = self.pymodule.get_resource() - - def get_changes(self, factory_name, global_factory=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes this refactoring makes - - `factory_name` indicates the name of the factory function to - be added. If `global_factory` is `True` the factory will be - global otherwise a static method is added to the class. - - `resources` can be a list of `rope.base.resource.File`\s that - this refactoring should be applied on; if `None` all python - files in the project are searched. - - """ - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Introduce factory method <%s>' % factory_name) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - self._change_module(resources, changes, factory_name, - global_factory, job_set) - return changes - - def get_name(self): - """Return the name of the class""" - return self.old_name - - def _change_module(self, resources, changes, - factory_name, global_, job_set): - if global_: - replacement = '__rope_factory_%s_' % factory_name - else: - replacement = self._new_function_name(factory_name, global_) - - for file_ in resources: - job_set.started_job(file_.path) - if file_ == self.resource: - self._change_resource(changes, factory_name, global_) - job_set.finished_job() - continue - changed_code = self._rename_occurrences(file_, replacement, - global_) - if changed_code is not None: - if global_: - new_pymodule = self.pycore.get_string_module(changed_code, - self.resource) - modname = self.pycore.modname(self.resource) - changed_code, imported = importutils.add_import( - self.pycore, new_pymodule, modname, factory_name) - changed_code = changed_code.replace(replacement, imported) - changes.add_change(ChangeContents(file_, changed_code)) - job_set.finished_job() - - def _change_resource(self, changes, factory_name, global_): - class_scope = self.old_pyname.get_object().get_scope() - source_code = self._rename_occurrences( - self.resource, self._new_function_name(factory_name, - global_), global_) - if source_code is None: - source_code = self.pymodule.source_code - else: - self.pymodule = self.pycore.get_string_module( - source_code, resource=self.resource) - lines = self.pymodule.lines - start = self._get_insertion_offset(class_scope, lines) - result = source_code[:start] - result += self._get_factory_method(lines, class_scope, - factory_name, global_) - result += source_code[start:] - changes.add_change(ChangeContents(self.resource, result)) - - def _get_insertion_offset(self, class_scope, lines): - start_line = class_scope.get_end() - if class_scope.get_scopes(): - start_line = class_scope.get_scopes()[-1].get_end() - start = lines.get_line_end(start_line) + 1 - return start - - def _get_factory_method(self, lines, class_scope, - factory_name, global_): - unit_indents = ' ' * sourceutils.get_indent(self.pycore) - if global_: - if self._get_scope_indents(lines, class_scope) > 0: - raise rope.base.exceptions.RefactoringError( - 'Cannot make global factory method for nested classes.') - return ('\ndef %s(*args, **kwds):\n%sreturn %s(*args, **kwds)\n' % - (factory_name, unit_indents, self.old_name)) - unindented_factory = \ - ('@staticmethod\ndef %s(*args, **kwds):\n' % factory_name + - '%sreturn %s(*args, **kwds)\n' % (unit_indents, self.old_name)) - indents = self._get_scope_indents(lines, class_scope) + \ - sourceutils.get_indent(self.pycore) - return '\n' + sourceutils.indent_lines(unindented_factory, indents) - - def _get_scope_indents(self, lines, scope): - return sourceutils.get_indents(lines, scope.get_start()) - - def _new_function_name(self, factory_name, global_): - if global_: - return factory_name - else: - return self.old_name + '.' + factory_name - - def _rename_occurrences(self, file_, changed_name, global_factory): - finder = occurrences.create_finder(self.pycore, self.old_name, - self.old_pyname, only_calls=True) - result = rename.rename_in_module(finder, changed_name, resource=file_, - replace_primary=global_factory) - return result - -IntroduceFactoryRefactoring = IntroduceFactory diff --git a/pymode/libs3/rope/refactor/introduce_parameter.py b/pymode/libs3/rope/refactor/introduce_parameter.py deleted file mode 100644 index 312c61aa..00000000 --- a/pymode/libs3/rope/refactor/introduce_parameter.py +++ /dev/null @@ -1,95 +0,0 @@ -import rope.base.change -from rope.base import exceptions, evaluate, worder, codeanalyze -from rope.refactor import functionutils, sourceutils, occurrences - - -class IntroduceParameter(object): - """Introduce parameter refactoring - - This refactoring adds a new parameter to a function and replaces - references to an expression in it with the new parameter. - - The parameter finding part is different from finding similar - pieces in extract refactorings. In this refactoring parameters - are found based on the object they reference to. For instance - in:: - - class A(object): - var = None - - class B(object): - a = A() - - b = B() - a = b.a - - def f(a): - x = b.a.var + a.var - - using this refactoring on ``a.var`` with ``p`` as the new - parameter name, will result in:: - - def f(p=a.var): - x = p + p - - """ - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.resource = resource - self.offset = offset - self.pymodule = self.pycore.resource_to_pyobject(self.resource) - scope = self.pymodule.get_scope().get_inner_scope_for_offset(offset) - if scope.get_kind() != 'Function': - raise exceptions.RefactoringError( - 'Introduce parameter should be performed inside functions') - self.pyfunction = scope.pyobject - self.name, self.pyname = self._get_name_and_pyname() - if self.pyname is None: - raise exceptions.RefactoringError( - 'Cannot find the definition of <%s>' % self.name) - - def _get_primary(self): - word_finder = worder.Worder(self.resource.read()) - return word_finder.get_primary_at(self.offset) - - def _get_name_and_pyname(self): - return (worder.get_name_at(self.resource, self.offset), - evaluate.eval_location(self.pymodule, self.offset)) - - def get_changes(self, new_parameter): - definition_info = functionutils.DefinitionInfo.read(self.pyfunction) - definition_info.args_with_defaults.append((new_parameter, - self._get_primary())) - collector = codeanalyze.ChangeCollector(self.resource.read()) - header_start, header_end = self._get_header_offsets() - body_start, body_end = sourceutils.get_body_region(self.pyfunction) - collector.add_change(header_start, header_end, - definition_info.to_string()) - self._change_function_occurances(collector, body_start, - body_end, new_parameter) - changes = rope.base.change.ChangeSet('Introduce parameter <%s>' % - new_parameter) - change = rope.base.change.ChangeContents(self.resource, - collector.get_changed()) - changes.add_change(change) - return changes - - def _get_header_offsets(self): - lines = self.pymodule.lines - start_line = self.pyfunction.get_scope().get_start() - end_line = self.pymodule.logical_lines.\ - logical_line_in(start_line)[1] - start = lines.get_line_start(start_line) - end = lines.get_line_end(end_line) - start = self.pymodule.source_code.find('def', start) + 4 - end = self.pymodule.source_code.rfind(':', start, end) - return start, end - - def _change_function_occurances(self, collector, function_start, - function_end, new_name): - finder = occurrences.create_finder(self.pycore, self.name, self.pyname) - for occurrence in finder.find_occurrences(resource=self.resource): - start, end = occurrence.get_primary_range() - if function_start <= start < function_end: - collector.add_change(start, end, new_name) diff --git a/pymode/libs3/rope/refactor/localtofield.py b/pymode/libs3/rope/refactor/localtofield.py deleted file mode 100644 index 391fcac9..00000000 --- a/pymode/libs3/rope/refactor/localtofield.py +++ /dev/null @@ -1,50 +0,0 @@ -from rope.base import pynames, evaluate, exceptions, worder -from rope.refactor.rename import Rename - - -class LocalToField(object): - - def __init__(self, project, resource, offset): - self.project = project - self.pycore = project.pycore - self.resource = resource - self.offset = offset - - def get_changes(self): - name = worder.get_name_at(self.resource, self.offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) - pyname = evaluate.eval_location(this_pymodule, self.offset) - if not self._is_a_method_local(pyname): - raise exceptions.RefactoringError( - 'Convert local variable to field should be performed on \n' - 'a local variable of a method.') - - pymodule, lineno = pyname.get_definition_location() - function_scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - # Not checking redefinition - #self._check_redefinition(name, function_scope) - - new_name = self._get_field_name(function_scope.pyobject, name) - changes = Rename(self.project, self.resource, self.offset).\ - get_changes(new_name, resources=[self.resource]) - return changes - - def _check_redefinition(self, name, function_scope): - class_scope = function_scope.parent - if name in class_scope.pyobject: - raise exceptions.RefactoringError( - 'The field %s already exists' % name) - - def _get_field_name(self, pyfunction, name): - self_name = pyfunction.get_param_names()[0] - new_name = self_name + '.' + name - return new_name - - def _is_a_method_local(self, pyname): - pymodule, lineno = pyname.get_definition_location() - holding_scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - parent = holding_scope.parent - return isinstance(pyname, pynames.AssignedName) and \ - pyname in list(holding_scope.get_names().values()) and \ - holding_scope.get_kind() == 'Function' and \ - parent is not None and parent.get_kind() == 'Class' diff --git a/pymode/libs3/rope/refactor/method_object.py b/pymode/libs3/rope/refactor/method_object.py deleted file mode 100644 index b3dd6bdd..00000000 --- a/pymode/libs3/rope/refactor/method_object.py +++ /dev/null @@ -1,87 +0,0 @@ -import warnings - -from rope.base import pyobjects, exceptions, change, evaluate, codeanalyze -from rope.refactor import sourceutils, occurrences, rename - - -class MethodObject(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None or not isinstance(pyname.get_object(), - pyobjects.PyFunction): - raise exceptions.RefactoringError( - 'Replace method with method object refactoring should be ' - 'performed on a function.') - self.pyfunction = pyname.get_object() - self.pymodule = self.pyfunction.get_module() - self.resource = self.pymodule.get_resource() - - def get_new_class(self, name): - body = sourceutils.fix_indentation( - self._get_body(), sourceutils.get_indent(self.pycore) * 2) - return 'class %s(object):\n\n%s%sdef __call__(self):\n%s' % \ - (name, self._get_init(), - ' ' * sourceutils.get_indent(self.pycore), body) - - def get_changes(self, classname=None, new_class_name=None): - if new_class_name is not None: - warnings.warn( - 'new_class_name parameter is deprecated; use classname', - DeprecationWarning, stacklevel=2) - classname = new_class_name - collector = codeanalyze.ChangeCollector(self.pymodule.source_code) - start, end = sourceutils.get_body_region(self.pyfunction) - indents = sourceutils.get_indents( - self.pymodule.lines, self.pyfunction.get_scope().get_start()) + \ - sourceutils.get_indent(self.pycore) - new_contents = ' ' * indents + 'return %s(%s)()\n' % \ - (classname, ', '.join(self._get_parameter_names())) - collector.add_change(start, end, new_contents) - insertion = self._get_class_insertion_point() - collector.add_change(insertion, insertion, - '\n\n' + self.get_new_class(classname)) - changes = change.ChangeSet('Replace method with method object refactoring') - changes.add_change(change.ChangeContents(self.resource, - collector.get_changed())) - return changes - - def _get_class_insertion_point(self): - current = self.pyfunction - while current.parent != self.pymodule: - current = current.parent - end = self.pymodule.lines.get_line_end(current.get_scope().get_end()) - return min(end + 1, len(self.pymodule.source_code)) - - def _get_body(self): - body = sourceutils.get_body(self.pyfunction) - for param in self._get_parameter_names(): - body = param + ' = None\n' + body - pymod = self.pycore.get_string_module(body, self.resource) - pyname = pymod[param] - finder = occurrences.create_finder(self.pycore, param, pyname) - result = rename.rename_in_module(finder, 'self.' + param, - pymodule=pymod) - body = result[result.index('\n') + 1:] - return body - - def _get_init(self): - params = self._get_parameter_names() - indents = ' ' * sourceutils.get_indent(self.pycore) - if not params: - return '' - header = indents + 'def __init__(self' - body = '' - for arg in params: - new_name = arg - if arg == 'self': - new_name = 'host' - header += ', %s' % new_name - body += indents * 2 + 'self.%s = %s\n' % (arg, new_name) - header += '):' - return '%s\n%s\n' % (header, body) - - def _get_parameter_names(self): - return self.pyfunction.get_param_names() diff --git a/pymode/libs3/rope/refactor/move.py b/pymode/libs3/rope/refactor/move.py deleted file mode 100644 index eade323b..00000000 --- a/pymode/libs3/rope/refactor/move.py +++ /dev/null @@ -1,628 +0,0 @@ -"""A module containing classes for move refactoring - -`create_move()` is a factory for creating move refactoring objects -based on inputs. - -""" -from rope.base import pyobjects, codeanalyze, exceptions, pynames, taskhandle, evaluate, worder -from rope.base.change import ChangeSet, ChangeContents, MoveResource -from rope.refactor import importutils, rename, occurrences, sourceutils, functionutils - - -def create_move(project, resource, offset=None): - """A factory for creating Move objects - - Based on `resource` and `offset`, return one of `MoveModule`, - `MoveGlobal` or `MoveMethod` for performing move refactoring. - - """ - if offset is None: - return MoveModule(project, resource) - this_pymodule = project.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None: - raise exceptions.RefactoringError( - 'Move only works on classes, functions, modules and methods.') - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.PyModule) or \ - isinstance(pyobject, pyobjects.PyPackage): - return MoveModule(project, pyobject.get_resource()) - if isinstance(pyobject, pyobjects.PyFunction) and \ - isinstance(pyobject.parent, pyobjects.PyClass): - return MoveMethod(project, resource, offset) - if isinstance(pyobject, pyobjects.PyDefinedObject) and \ - isinstance(pyobject.parent, pyobjects.PyModule): - return MoveGlobal(project, resource, offset) - raise exceptions.RefactoringError( - 'Move only works on global classes/functions, modules and methods.') - - -class MoveMethod(object): - """For moving methods - - It makes a new method in the destination class and changes - the body of the old method to call the new method. You can - inline the old method to change all of its occurrences. - - """ - - def __init__(self, project, resource, offset): - self.project = project - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - self.method_name = worder.get_name_at(resource, offset) - self.pyfunction = pyname.get_object() - if self.pyfunction.get_kind() != 'method': - raise exceptions.RefactoringError('Only normal methods' - ' can be moved.') - - def get_changes(self, dest_attr, new_name=None, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Return the changes needed for this refactoring - - Parameters: - - - `dest_attr`: the name of the destination attribute - - `new_name`: the name of the new method; if `None` uses - the old name - - `resources` can be a list of `rope.base.resources.File`\s to - apply this refactoring on. If `None`, the restructuring - will be applied to all python files. - - """ - changes = ChangeSet('Moving method <%s>' % self.method_name) - if resources is None: - resources = self.pycore.get_python_files() - if new_name is None: - new_name = self.get_method_name() - resource1, start1, end1, new_content1 = \ - self._get_changes_made_by_old_class(dest_attr, new_name) - collector1 = codeanalyze.ChangeCollector(resource1.read()) - collector1.add_change(start1, end1, new_content1) - - resource2, start2, end2, new_content2 = \ - self._get_changes_made_by_new_class(dest_attr, new_name) - if resource1 == resource2: - collector1.add_change(start2, end2, new_content2) - else: - collector2 = codeanalyze.ChangeCollector(resource2.read()) - collector2.add_change(start2, end2, new_content2) - result = collector2.get_changed() - import_tools = importutils.ImportTools(self.pycore) - new_imports = self._get_used_imports(import_tools) - if new_imports: - goal_pymodule = self.pycore.get_string_module(result, - resource2) - result = _add_imports_to_module( - import_tools, goal_pymodule, new_imports) - if resource2 in resources: - changes.add_change(ChangeContents(resource2, result)) - - if resource1 in resources: - changes.add_change(ChangeContents(resource1, - collector1.get_changed())) - return changes - - def get_method_name(self): - return self.method_name - - def _get_used_imports(self, import_tools): - return importutils.get_imports(self.pycore, self.pyfunction) - - def _get_changes_made_by_old_class(self, dest_attr, new_name): - pymodule = self.pyfunction.get_module() - indents = self._get_scope_indents(self.pyfunction) - body = 'return self.%s.%s(%s)\n' % (dest_attr, new_name, - self._get_passed_arguments_string()) - region = sourceutils.get_body_region(self.pyfunction) - return (pymodule.get_resource(), region[0], region[1], - sourceutils.fix_indentation(body, indents)) - - def _get_scope_indents(self, pyobject): - pymodule = pyobject.get_module() - return sourceutils.get_indents( - pymodule.lines, pyobject.get_scope().get_start()) + \ - sourceutils.get_indent(self.pycore) - - def _get_changes_made_by_new_class(self, dest_attr, new_name): - old_pyclass = self.pyfunction.parent - if dest_attr not in old_pyclass: - raise exceptions.RefactoringError( - 'Destination attribute <%s> not found' % dest_attr) - pyclass = old_pyclass[dest_attr].get_object().get_type() - if not isinstance(pyclass, pyobjects.PyClass): - raise exceptions.RefactoringError( - 'Unknown class type for attribute <%s>' % dest_attr) - pymodule = pyclass.get_module() - resource = pyclass.get_module().get_resource() - start, end = sourceutils.get_body_region(pyclass) - pre_blanks = '\n' - if pymodule.source_code[start:end].strip() != 'pass': - pre_blanks = '\n\n' - start = end - indents = self._get_scope_indents(pyclass) - body = pre_blanks + sourceutils.fix_indentation( - self.get_new_method(new_name), indents) - return resource, start, end, body - - def get_new_method(self, name): - return '%s\n%s' % ( - self._get_new_header(name), - sourceutils.fix_indentation(self._get_body(), - sourceutils.get_indent(self.pycore))) - - def _get_unchanged_body(self): - return sourceutils.get_body(self.pyfunction) - - def _get_body(self, host='host'): - self_name = self._get_self_name() - body = self_name + ' = None\n' + self._get_unchanged_body() - pymodule = self.pycore.get_string_module(body) - finder = occurrences.create_finder( - self.pycore, self_name, pymodule[self_name]) - result = rename.rename_in_module(finder, host, pymodule=pymodule) - if result is None: - result = body - return result[result.index('\n') + 1:] - - def _get_self_name(self): - return self.pyfunction.get_param_names()[0] - - def _get_new_header(self, name): - header = 'def %s(self' % name - if self._is_host_used(): - header += ', host' - definition_info = functionutils.DefinitionInfo.read(self.pyfunction) - others = definition_info.arguments_to_string(1) - if others: - header += ', ' + others - return header + '):' - - def _get_passed_arguments_string(self): - result = '' - if self._is_host_used(): - result = 'self' - definition_info = functionutils.DefinitionInfo.read(self.pyfunction) - others = definition_info.arguments_to_string(1) - if others: - if result: - result += ', ' - result += others - return result - - def _is_host_used(self): - return self._get_body('__old_self') != self._get_unchanged_body() - - -class MoveGlobal(object): - """For moving global function and classes""" - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) - self.old_pyname = evaluate.eval_location(this_pymodule, offset) - self.old_name = self.old_pyname.get_object().get_name() - pymodule = self.old_pyname.get_object().get_module() - self.source = pymodule.get_resource() - self.tools = _MoveTools(self.pycore, self.source, - self.old_pyname, self.old_name) - self.import_tools = self.tools.import_tools - self._check_exceptional_conditions() - - def _check_exceptional_conditions(self): - if self.old_pyname is None or \ - not isinstance(self.old_pyname.get_object(), pyobjects.PyDefinedObject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed on a class/function.') - moving_pyobject = self.old_pyname.get_object() - if not self._is_global(moving_pyobject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed on a global class/function.') - - def _is_global(self, pyobject): - return pyobject.get_scope().parent == pyobject.get_module().get_scope() - - def get_changes(self, dest, resources=None, - task_handle=taskhandle.NullTaskHandle()): - if resources is None: - resources = self.pycore.get_python_files() - if dest is None or not dest.exists(): - raise exceptions.RefactoringError( - 'Move destination does not exist.') - if dest.is_folder() and dest.has_child('__init__.py'): - dest = dest.get_child('__init__.py') - if dest.is_folder(): - raise exceptions.RefactoringError( - 'Move destination for non-modules should not be folders.') - if self.source == dest: - raise exceptions.RefactoringError( - 'Moving global elements to the same module.') - return self._calculate_changes(dest, resources, task_handle) - - def _calculate_changes(self, dest, resources, task_handle): - changes = ChangeSet('Moving global <%s>' % self.old_name) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - for file_ in resources: - job_set.started_job(file_.path) - if file_ == self.source: - changes.add_change(self._source_module_changes(dest)) - elif file_ == dest: - changes.add_change(self._dest_module_changes(dest)) - elif self.tools.occurs_in_module(resource=file_): - pymodule = self.pycore.resource_to_pyobject(file_) - # Changing occurrences - placeholder = '__rope_renaming_%s_' % self.old_name - source = self.tools.rename_in_module(placeholder, - resource=file_) - should_import = source is not None - # Removing out of date imports - pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.remove_old_imports(pymodule) - # Adding new import - if should_import: - pymodule = self.tools.new_pymodule(pymodule, source) - source, imported = importutils.add_import( - self.pycore, pymodule, self._new_modname(dest), self.old_name) - source = source.replace(placeholder, imported) - source = self.tools.new_source(pymodule, source) - if source != file_.read(): - changes.add_change(ChangeContents(file_, source)) - job_set.finished_job() - return changes - - def _source_module_changes(self, dest): - placeholder = '__rope_moving_%s_' % self.old_name - handle = _ChangeMoveOccurrencesHandle(placeholder) - occurrence_finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname) - start, end = self._get_moving_region() - renamer = ModuleSkipRenamer(occurrence_finder, self.source, - handle, start, end) - source = renamer.get_changed_module() - if handle.occurred: - pymodule = self.pycore.get_string_module(source, self.source) - # Adding new import - source, imported = importutils.add_import( - self.pycore, pymodule, self._new_modname(dest), self.old_name) - source = source.replace(placeholder, imported) - return ChangeContents(self.source, source) - - def _new_modname(self, dest): - return self.pycore.modname(dest) - - def _dest_module_changes(self, dest): - # Changing occurrences - pymodule = self.pycore.resource_to_pyobject(dest) - source = self.tools.rename_in_module(self.old_name, pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) - - moving, imports = self._get_moving_element_with_imports() - source = self.tools.remove_old_imports(pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) - pymodule, has_changed = self._add_imports2(pymodule, imports) - - module_with_imports = self.import_tools.module_imports(pymodule) - source = pymodule.source_code - lineno = 0 - if module_with_imports.imports: - lineno = module_with_imports.imports[-1].end_line - 1 - else: - while lineno < pymodule.lines.length() and \ - pymodule.lines.get_line(lineno + 1).lstrip().startswith('#'): - lineno += 1 - if lineno > 0: - cut = pymodule.lines.get_line_end(lineno) + 1 - result = source[:cut] + '\n\n' + moving + source[cut:] - else: - result = moving + source - # Organizing imports - source = result - pymodule = self.pycore.get_string_module(source, dest) - source = self.import_tools.organize_imports(pymodule, sort=False, - unused=False) - return ChangeContents(dest, source) - - def _get_moving_element_with_imports(self): - return moving_code_with_imports( - self.pycore, self.source, self._get_moving_element()) - - def _get_module_with_imports(self, source_code, resource): - pymodule = self.pycore.get_string_module(source_code, resource) - return self.import_tools.module_imports(pymodule) - - def _get_moving_element(self): - start, end = self._get_moving_region() - moving = self.source.read()[start:end] - return moving.rstrip() + '\n' - - def _get_moving_region(self): - pymodule = self.pycore.resource_to_pyobject(self.source) - lines = pymodule.lines - scope = self.old_pyname.get_object().get_scope() - start = lines.get_line_start(scope.get_start()) - end_line = scope.get_end() - while end_line < lines.length() and \ - lines.get_line(end_line + 1).strip() == '': - end_line += 1 - end = min(lines.get_line_end(end_line) + 1, len(pymodule.source_code)) - return start, end - - def _add_imports2(self, pymodule, new_imports): - source = self.tools.add_imports(pymodule, new_imports) - if source is None: - return pymodule, False - else: - resource = pymodule.get_resource() - pymodule = self.pycore.get_string_module(source, resource) - return pymodule, True - - -class MoveModule(object): - """For moving modules and packages""" - - def __init__(self, project, resource): - self.project = project - self.pycore = project.pycore - if not resource.is_folder() and resource.name == '__init__.py': - resource = resource.parent - if resource.is_folder() and not resource.has_child('__init__.py'): - raise exceptions.RefactoringError( - 'Cannot move non-package folder.') - dummy_pymodule = self.pycore.get_string_module('') - self.old_pyname = pynames.ImportedModule(dummy_pymodule, - resource=resource) - self.source = self.old_pyname.get_object().get_resource() - if self.source.is_folder(): - self.old_name = self.source.name - else: - self.old_name = self.source.name[:-3] - self.tools = _MoveTools(self.pycore, self.source, - self.old_pyname, self.old_name) - self.import_tools = self.tools.import_tools - - def get_changes(self, dest, resources=None, - task_handle=taskhandle.NullTaskHandle()): - moving_pyobject = self.old_pyname.get_object() - if resources is None: - resources = self.pycore.get_python_files() - if dest is None or not dest.is_folder(): - raise exceptions.RefactoringError( - 'Move destination for modules should be packages.') - return self._calculate_changes(dest, resources, task_handle) - - def _calculate_changes(self, dest, resources, task_handle): - changes = ChangeSet('Moving module <%s>' % self.old_name) - job_set = task_handle.create_jobset('Collecting changes', - len(resources)) - for module in resources: - job_set.started_job(module.path) - if module == self.source: - self._change_moving_module(changes, dest) - else: - source = self._change_occurrences_in_module(dest, - resource=module) - if source is not None: - changes.add_change(ChangeContents(module, source)) - job_set.finished_job() - if self.project == self.source.project: - changes.add_change(MoveResource(self.source, dest.path)) - return changes - - def _new_modname(self, dest): - destname = self.pycore.modname(dest) - if destname: - return destname + '.' + self.old_name - return self.old_name - - def _new_import(self, dest): - return importutils.NormalImport([(self._new_modname(dest), None)]) - - def _change_moving_module(self, changes, dest): - if not self.source.is_folder(): - pymodule = self.pycore.resource_to_pyobject(self.source) - source = self.import_tools.relatives_to_absolutes(pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) - source = self._change_occurrences_in_module(dest, pymodule) - source = self.tools.new_source(pymodule, source) - if source != self.source.read(): - changes.add_change(ChangeContents(self.source, source)) - - def _change_occurrences_in_module(self, dest, pymodule=None, - resource=None): - if not self.tools.occurs_in_module(pymodule=pymodule, - resource=resource): - return - if pymodule is None: - pymodule = self.pycore.resource_to_pyobject(resource) - new_name = self._new_modname(dest) - new_import = self._new_import(dest) - source = self.tools.rename_in_module( - new_name, imports=True, pymodule=pymodule, resource=resource) - should_import = self.tools.occurs_in_module( - pymodule=pymodule, resource=resource, imports=False) - pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.remove_old_imports(pymodule) - if should_import: - pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.add_imports(pymodule, [new_import]) - source = self.tools.new_source(pymodule, source) - if source != pymodule.resource.read(): - return source - - -class _ChangeMoveOccurrencesHandle(object): - - def __init__(self, new_name): - self.new_name = new_name - self.occurred = False - - def occurred_inside_skip(self, change_collector, occurrence): - pass - - def occurred_outside_skip(self, change_collector, occurrence): - start, end = occurrence.get_primary_range() - change_collector.add_change(start, end, self.new_name) - self.occurred = True - - -class _MoveTools(object): - - def __init__(self, pycore, source, pyname, old_name): - self.pycore = pycore - self.source = source - self.old_pyname = pyname - self.old_name = old_name - self.import_tools = importutils.ImportTools(self.pycore) - - def remove_old_imports(self, pymodule): - old_source = pymodule.source_code - module_with_imports = self.import_tools.module_imports(pymodule) - class CanSelect(object): - changed = False - old_name = self.old_name - old_pyname = self.old_pyname - def __call__(self, name): - try: - if name == self.old_name and \ - pymodule[name].get_object() == \ - self.old_pyname.get_object(): - self.changed = True - return False - except exceptions.AttributeNotFoundError: - pass - return True - can_select = CanSelect() - module_with_imports.filter_names(can_select) - new_source = module_with_imports.get_changed_source() - if old_source != new_source: - return new_source - - def rename_in_module(self, new_name, pymodule=None, - imports=False, resource=None): - occurrence_finder = self._create_finder(imports) - source = rename.rename_in_module( - occurrence_finder, new_name, replace_primary=True, - pymodule=pymodule, resource=resource) - return source - - def occurs_in_module(self, pymodule=None, resource=None, imports=True): - finder = self._create_finder(imports) - for occurrence in finder.find_occurrences(pymodule=pymodule, - resource=resource): - return True - return False - - def _create_finder(self, imports): - return occurrences.create_finder(self.pycore, self.old_name, - self.old_pyname, imports=imports) - - def new_pymodule(self, pymodule, source): - if source is not None: - return self.pycore.get_string_module( - source, pymodule.get_resource()) - return pymodule - - def new_source(self, pymodule, source): - if source is None: - return pymodule.source_code - return source - - def add_imports(self, pymodule, new_imports): - return _add_imports_to_module(self.import_tools, pymodule, new_imports) - - -def _add_imports_to_module(import_tools, pymodule, new_imports): - module_with_imports = import_tools.module_imports(pymodule) - for new_import in new_imports: - module_with_imports.add_import(new_import) - return module_with_imports.get_changed_source() - - -def moving_code_with_imports(pycore, resource, source): - import_tools = importutils.ImportTools(pycore) - pymodule = pycore.get_string_module(source, resource) - origin = pycore.resource_to_pyobject(resource) - - imports = [] - for stmt in import_tools.module_imports(origin).imports: - imports.append(stmt.import_info) - - back_names = [] - for name in origin: - if name not in pymodule: - back_names.append(name) - imports.append(import_tools.get_from_import(resource, back_names)) - - source = _add_imports_to_module(import_tools, pymodule, imports) - pymodule = pycore.get_string_module(source, resource) - - source = import_tools.relatives_to_absolutes(pymodule) - pymodule = pycore.get_string_module(source, resource) - source = import_tools.organize_imports(pymodule, selfs=False) - pymodule = pycore.get_string_module(source, resource) - - # extracting imports after changes - module_imports = import_tools.module_imports(pymodule) - imports = [import_stmt.import_info - for import_stmt in module_imports.imports] - start = 1 - if module_imports.imports: - start = module_imports.imports[-1].end_line - lines = codeanalyze.SourceLinesAdapter(source) - while start < lines.length() and not lines.get_line(start).strip(): - start += 1 - moving = source[lines.get_line_start(start):] - return moving, imports - - -class ModuleSkipRenamerHandle(object): - - def occurred_outside_skip(self, change_collector, occurrence): - pass - - def occurred_inside_skip(self, change_collector, occurrence): - pass - - -class ModuleSkipRenamer(object): - """Rename occurrences in a module - - This class can be used when you want to treat a region in a file - separately from other parts when renaming. - - """ - - def __init__(self, occurrence_finder, resource, handle=None, - skip_start=0, skip_end=0, replacement=''): - """Constructor - - if replacement is `None` the region is not changed. Otherwise - it is replaced with `replacement`. - - """ - self.occurrence_finder = occurrence_finder - self.resource = resource - self.skip_start = skip_start - self.skip_end = skip_end - self.replacement = replacement - self.handle = handle - if self.handle is None: - self.handle = ModuleSkipHandle() - - def get_changed_module(self): - source = self.resource.read() - change_collector = codeanalyze.ChangeCollector(source) - if self.replacement is not None: - change_collector.add_change(self.skip_start, self.skip_end, - self.replacement) - for occurrence in self.occurrence_finder.find_occurrences(self.resource): - start, end = occurrence.get_primary_range() - if self.skip_start <= start < self.skip_end: - self.handle.occurred_inside_skip(change_collector, occurrence) - else: - self.handle.occurred_outside_skip(change_collector, occurrence) - result = change_collector.get_changed() - if result is not None and result != source: - return result diff --git a/pymode/libs3/rope/refactor/multiproject.py b/pymode/libs3/rope/refactor/multiproject.py deleted file mode 100644 index 6a85d2a2..00000000 --- a/pymode/libs3/rope/refactor/multiproject.py +++ /dev/null @@ -1,78 +0,0 @@ -"""This module can be used for performing cross-project refactorings - -See the "cross-project refactorings" section of ``docs/library.txt`` -file. - -""" - -from rope.base import resources, project, libutils - - -class MultiProjectRefactoring(object): - - def __init__(self, refactoring, projects, addpath=True): - """Create a multiproject proxy for the main refactoring - - `projects` are other project. - - """ - self.refactoring = refactoring - self.projects = projects - self.addpath = addpath - - def __call__(self, project, *args, **kwds): - """Create the refactoring""" - return _MultiRefactoring(self.refactoring, self.projects, - self.addpath, project, *args, **kwds) - - -class _MultiRefactoring(object): - - def __init__(self, refactoring, other_projects, addpath, - project, *args, **kwds): - self.refactoring = refactoring - self.projects = [project] + other_projects - for other_project in other_projects: - for folder in self.project.pycore.get_source_folders(): - other_project.get_prefs().add('python_path', folder.real_path) - self.refactorings = [] - for other in self.projects: - args, kwds = self._resources_for_args(other, args, kwds) - self.refactorings.append( - self.refactoring(other, *args, **kwds)) - - def get_all_changes(self, *args, **kwds): - """Get a project to changes dict""" - result = [] - for project, refactoring in zip(self.projects, self.refactorings): - args, kwds = self._resources_for_args(project, args, kwds) - result.append((project, refactoring.get_changes(*args, **kwds))) - return result - - def __getattr__(self, name): - return getattr(self.main_refactoring, name) - - def _resources_for_args(self, project, args, kwds): - newargs = [self._change_project_resource(project, arg) for arg in args] - newkwds = dict((name, self._change_project_resource(project, value)) - for name, value in kwds.items()) - return newargs, newkwds - - def _change_project_resource(self, project, obj): - if isinstance(obj, resources.Resource) and \ - obj.project != project: - return libutils.path_to_resource(project, obj.real_path) - return obj - - @property - def project(self): - return self.projects[0] - - @property - def main_refactoring(self): - return self.refactorings[0] - - -def perform(project_changes): - for project, changes in project_changes: - project.do(changes) diff --git a/pymode/libs3/rope/refactor/occurrences.py b/pymode/libs3/rope/refactor/occurrences.py deleted file mode 100644 index 2808ed2c..00000000 --- a/pymode/libs3/rope/refactor/occurrences.py +++ /dev/null @@ -1,334 +0,0 @@ -import re - -import rope.base.pynames -from rope.base import pynames, pyobjects, codeanalyze, evaluate, exceptions, utils, worder - - -class Finder(object): - """For finding occurrences of a name - - The constructor takes a `filters` argument. It should be a list - of functions that take a single argument. For each possible - occurrence, these functions are called in order with the an - instance of `Occurrence`: - - * If it returns `None` other filters are tried. - * If it returns `True`, the occurrence will be a match. - * If it returns `False`, the occurrence will be skipped. - * If all of the filters return `None`, it is skipped also. - - """ - - def __init__(self, pycore, name, filters=[lambda o: True], docs=False): - self.pycore = pycore - self.name = name - self.docs = docs - self.filters = filters - self._textual_finder = _TextualFinder(name, docs=docs) - - def find_occurrences(self, resource=None, pymodule=None): - """Generate `Occurrence` instances""" - tools = _OccurrenceToolsCreator(self.pycore, resource=resource, - pymodule=pymodule, docs=self.docs) - for offset in self._textual_finder.find_offsets(tools.source_code): - occurrence = Occurrence(tools, offset) - for filter in self.filters: - result = filter(occurrence) - if result is None: - continue - if result: - yield occurrence - break - - -def create_finder(pycore, name, pyname, only_calls=False, imports=True, - unsure=None, docs=False, instance=None, in_hierarchy=False): - """A factory for `Finder` - - Based on the arguments it creates a list of filters. `instance` - argument is needed only when you want implicit interfaces to be - considered. - - """ - pynames = set([pyname]) - filters = [] - if only_calls: - filters.append(CallsFilter()) - if not imports: - filters.append(NoImportsFilter()) - if isinstance(instance, rope.base.pynames.ParameterName): - for pyobject in instance.get_objects(): - try: - pynames.add(pyobject[name]) - except exceptions.AttributeNotFoundError: - pass - for pyname in pynames: - filters.append(PyNameFilter(pyname)) - if in_hierarchy: - filters.append(InHierarchyFilter(pyname)) - if unsure: - filters.append(UnsureFilter(unsure)) - return Finder(pycore, name, filters=filters, docs=docs) - - -class Occurrence(object): - - def __init__(self, tools, offset): - self.tools = tools - self.offset = offset - self.resource = tools.resource - - @utils.saveit - def get_word_range(self): - return self.tools.word_finder.get_word_range(self.offset) - - @utils.saveit - def get_primary_range(self): - return self.tools.word_finder.get_primary_range(self.offset) - - @utils.saveit - def get_pyname(self): - try: - return self.tools.name_finder.get_pyname_at(self.offset) - except exceptions.BadIdentifierError: - pass - - @utils.saveit - def get_primary_and_pyname(self): - try: - return self.tools.name_finder.get_primary_and_pyname_at(self.offset) - except exceptions.BadIdentifierError: - pass - - @utils.saveit - def is_in_import_statement(self): - return (self.tools.word_finder.is_from_statement(self.offset) or - self.tools.word_finder.is_import_statement(self.offset)) - - def is_called(self): - return self.tools.word_finder.is_a_function_being_called(self.offset) - - def is_defined(self): - return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) - - def is_a_fixed_primary(self): - return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) or \ - self.tools.word_finder.is_a_name_after_from_import(self.offset) - - def is_written(self): - return self.tools.word_finder.is_assigned_here(self.offset) - - def is_unsure(self): - return unsure_pyname(self.get_pyname()) - - @property - @utils.saveit - def lineno(self): - offset = self.get_word_range()[0] - return self.tools.pymodule.lines.get_line_number(offset) - - -def same_pyname(expected, pyname): - """Check whether `expected` and `pyname` are the same""" - if expected is None or pyname is None: - return False - if expected == pyname: - return True - if type(expected) not in (pynames.ImportedModule, pynames.ImportedName) and \ - type(pyname) not in (pynames.ImportedModule, pynames.ImportedName): - return False - return expected.get_definition_location() == pyname.get_definition_location() and \ - expected.get_object() == pyname.get_object() - -def unsure_pyname(pyname, unbound=True): - """Return `True` if we don't know what this name references""" - if pyname is None: - return True - if unbound and not isinstance(pyname, pynames.UnboundName): - return False - if pyname.get_object() == pyobjects.get_unknown(): - return True - - -class PyNameFilter(object): - """For finding occurrences of a name""" - - def __init__(self, pyname): - self.pyname = pyname - - def __call__(self, occurrence): - if same_pyname(self.pyname, occurrence.get_pyname()): - return True - - -class InHierarchyFilter(object): - """For finding occurrences of a name""" - - def __init__(self, pyname, implementations_only=False): - self.pyname = pyname - self.impl_only = implementations_only - self.pyclass = self._get_containing_class(pyname) - if self.pyclass is not None: - self.name = pyname.get_object().get_name() - self.roots = self._get_root_classes(self.pyclass, self.name) - else: - self.roots = None - - def __call__(self, occurrence): - if self.roots is None: - return - pyclass = self._get_containing_class(occurrence.get_pyname()) - if pyclass is not None: - roots = self._get_root_classes(pyclass, self.name) - if self.roots.intersection(roots): - return True - - def _get_containing_class(self, pyname): - if isinstance(pyname, pynames.DefinedName): - scope = pyname.get_object().get_scope() - parent = scope.parent - if parent is not None and parent.get_kind() == 'Class': - return parent.pyobject - - def _get_root_classes(self, pyclass, name): - if self.impl_only and pyclass == self.pyclass: - return set([pyclass]) - result = set() - for superclass in pyclass.get_superclasses(): - if name in superclass: - result.update(self._get_root_classes(superclass, name)) - if not result: - return set([pyclass]) - return result - - -class UnsureFilter(object): - - def __init__(self, unsure): - self.unsure = unsure - - def __call__(self, occurrence): - if occurrence.is_unsure() and self.unsure(occurrence): - return True - - -class NoImportsFilter(object): - - def __call__(self, occurrence): - if occurrence.is_in_import_statement(): - return False - - -class CallsFilter(object): - - def __call__(self, occurrence): - if not occurrence.is_called(): - return False - - -class _TextualFinder(object): - - def __init__(self, name, docs=False): - self.name = name - self.docs = docs - self.comment_pattern = _TextualFinder.any('comment', [r'#[^\n]*']) - self.string_pattern = _TextualFinder.any( - 'string', [codeanalyze.get_string_pattern()]) - self.pattern = self._get_occurrence_pattern(self.name) - - def find_offsets(self, source): - if not self._fast_file_query(source): - return - if self.docs: - searcher = self._normal_search - else: - searcher = self._re_search - for matched in searcher(source): - yield matched - - def _re_search(self, source): - for match in self.pattern.finditer(source): - for key, value in match.groupdict().items(): - if value and key == 'occurrence': - yield match.start(key) - - def _normal_search(self, source): - current = 0 - while True: - try: - found = source.index(self.name, current) - current = found + len(self.name) - if (found == 0 or not self._is_id_char(source[found - 1])) and \ - (current == len(source) or not self._is_id_char(source[current])): - yield found - except ValueError: - break - - def _is_id_char(self, c): - return c.isalnum() or c == '_' - - def _fast_file_query(self, source): - try: - source.index(self.name) - return True - except ValueError: - return False - - def _get_source(self, resource, pymodule): - if resource is not None: - return resource.read() - else: - return pymodule.source_code - - def _get_occurrence_pattern(self, name): - occurrence_pattern = _TextualFinder.any('occurrence', - ['\\b' + name + '\\b']) - pattern = re.compile(occurrence_pattern + '|' + self.comment_pattern + - '|' + self.string_pattern) - return pattern - - @staticmethod - def any(name, list_): - return '(?P<%s>' % name + '|'.join(list_) + ')' - - -class _OccurrenceToolsCreator(object): - - def __init__(self, pycore, resource=None, pymodule=None, docs=False): - self.pycore = pycore - self.__resource = resource - self.__pymodule = pymodule - self.docs = docs - - @property - @utils.saveit - def name_finder(self): - return evaluate.ScopeNameFinder(self.pymodule) - - @property - @utils.saveit - def source_code(self): - if self.__resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def word_finder(self): - return worder.Worder(self.source_code, self.docs) - - @property - @utils.saveit - def resource(self): - if self.__resource is not None: - return self.__resource - if self.__pymodule is not None: - return self.__pymodule.resource - - @property - @utils.saveit - def pymodule(self): - if self.__pymodule is not None: - return self.__pymodule - return self.pycore.resource_to_pyobject(self.resource) diff --git a/pymode/libs3/rope/refactor/patchedast.py b/pymode/libs3/rope/refactor/patchedast.py deleted file mode 100644 index 042b33dd..00000000 --- a/pymode/libs3/rope/refactor/patchedast.py +++ /dev/null @@ -1,734 +0,0 @@ -import collections -import re -import warnings - -from rope.base import ast, codeanalyze, exceptions - - -def get_patched_ast(source, sorted_children=False): - """Adds ``region`` and ``sorted_children`` fields to nodes - - Adds ``sorted_children`` field only if `sorted_children` is True. - - """ - return patch_ast(ast.parse(source), source, sorted_children) - - -def patch_ast(node, source, sorted_children=False): - """Patches the given node - - After calling, each node in `node` will have a new field named - `region` that is a tuple containing the start and end offsets - of the code that generated it. - - If `sorted_children` is true, a `sorted_children` field will - be created for each node, too. It is a list containing child - nodes as well as whitespaces and comments that occur between - them. - - """ - if hasattr(node, 'region'): - return node - walker = _PatchingASTWalker(source, children=sorted_children) - ast.call_for_nodes(node, walker) - return node - - -def node_region(patched_ast_node): - """Get the region of a patched ast node""" - return patched_ast_node.region - - -def write_ast(patched_ast_node): - """Extract source form a patched AST node with `sorted_children` field - - If the node is patched with sorted_children turned off you can use - `node_region` function for obtaining code using module source code. - """ - result = [] - for child in patched_ast_node.sorted_children: - if isinstance(child, ast.AST): - result.append(write_ast(child)) - else: - result.append(child) - return ''.join(result) - - -class MismatchedTokenError(exceptions.RopeError): - pass - - -class _PatchingASTWalker(object): - - def __init__(self, source, children=False): - self.source = _Source(source) - self.children = children - self.lines = codeanalyze.SourceLinesAdapter(source) - self.children_stack = [] - - Number = object() - String = object() - - def __call__(self, node): - method = getattr(self, '_' + node.__class__.__name__, None) - if method is not None: - return method(node) - # ???: Unknown node; what should we do here? - warnings.warn('Unknown node type <%s>; please report!' - % node.__class__.__name__, RuntimeWarning) - node.region = (self.source.offset, self.source.offset) - if self.children: - node.sorted_children = ast.get_children(node) - - def _handle(self, node, base_children, eat_parens=False, eat_spaces=False): - if hasattr(node, 'region'): - # ???: The same node was seen twice; what should we do? - warnings.warn( - 'Node <%s> has been already patched; please report!' % - node.__class__.__name__, RuntimeWarning) - return - base_children = collections.deque(base_children) - self.children_stack.append(base_children) - children = collections.deque() - formats = [] - suspected_start = self.source.offset - start = suspected_start - first_token = True - while base_children: - child = base_children.popleft() - if child is None: - continue - offset = self.source.offset - if isinstance(child, ast.arg): - region = self.source.consume(child.arg) - child = self.source[region[0]:region[1]] - token_start = offset - elif isinstance(child, ast.AST): - ast.call_for_nodes(child, self) - token_start = child.region[0] - else: - if child is self.String: - region = self.source.consume_string( - end=self._find_next_statement_start()) - elif child is self.Number: - region = self.source.consume_number() - elif child == '!=': - # INFO: This has been added to handle deprecated ``<>`` - region = self.source.consume_not_equal() - else: - region = self.source.consume(child) - child = self.source[region[0]:region[1]] - token_start = region[0] - if not first_token: - formats.append(self.source[offset:token_start]) - if self.children: - children.append(self.source[offset:token_start]) - else: - first_token = False - start = token_start - if self.children: - children.append(child) - start = self._handle_parens(children, start, formats) - if eat_parens: - start = self._eat_surrounding_parens( - children, suspected_start, start) - if eat_spaces: - if self.children: - children.appendleft(self.source[0:start]) - end_spaces = self.source[self.source.offset:] - self.source.consume(end_spaces) - if self.children: - children.append(end_spaces) - start = 0 - if self.children: - node.sorted_children = children - node.region = (start, self.source.offset) - self.children_stack.pop() - - def _handle_parens(self, children, start, formats): - """Changes `children` and returns new start""" - opens, closes = self._count_needed_parens(formats) - old_end = self.source.offset - new_end = None - for i in range(closes): - new_end = self.source.consume(')')[1] - if new_end is not None: - if self.children: - children.append(self.source[old_end:new_end]) - new_start = start - for i in range(opens): - new_start = self.source.rfind_token('(', 0, new_start) - if new_start != start: - if self.children: - children.appendleft(self.source[new_start:start]) - start = new_start - return start - - def _eat_surrounding_parens(self, children, suspected_start, start): - index = self.source.rfind_token('(', suspected_start, start) - if index is not None: - old_start = start - old_offset = self.source.offset - start = index - if self.children: - children.appendleft(self.source[start + 1:old_start]) - children.appendleft('(') - token_start, token_end = self.source.consume(')') - if self.children: - children.append(self.source[old_offset:token_start]) - children.append(')') - return start - - def _count_needed_parens(self, children): - start = 0 - opens = 0 - for child in children: - if not isinstance(child, str): - continue - if child == '' or child[0] in '\'"': - continue - index = 0 - while index < len(child): - if child[index] == ')': - if opens > 0: - opens -= 1 - else: - start += 1 - if child[index] == '(': - opens += 1 - if child[index] == '#': - try: - index = child.index('\n', index) - except ValueError: - break - index += 1 - return start, opens - - def _find_next_statement_start(self): - for children in reversed(self.children_stack): - for child in children: - if isinstance(child, ast.stmt): - return child.col_offset \ - + self.lines.get_line_start(child.lineno) - return len(self.source.source) - - _operators = {'And': 'and', 'Or': 'or', 'Add': '+', 'Sub': '-', 'Mult': '*', - 'Div': '/', 'Mod': '%', 'Pow': '**', 'LShift': '<<', - 'RShift': '>>', 'BitOr': '|', 'BitAnd': '&', 'BitXor': '^', - 'FloorDiv': '//', 'Invert': '~', 'Not': 'not', 'UAdd': '+', - 'USub': '-', 'Eq': '==', 'NotEq': '!=', 'Lt': '<', - 'LtE': '<=', 'Gt': '>', 'GtE': '>=', 'Is': 'is', - 'IsNot': 'is not', 'In': 'in', 'NotIn': 'not in'} - - def _get_op(self, node): - return self._operators[node.__class__.__name__].split(' ') - - def _Attribute(self, node): - self._handle(node, [node.value, '.', node.attr]) - - def _Assert(self, node): - children = ['assert', node.test] - if node.msg: - children.append(',') - children.append(node.msg) - self._handle(node, children) - - def _Assign(self, node): - children = self._child_nodes(node.targets, '=') - children.append('=') - children.append(node.value) - self._handle(node, children) - - def _AugAssign(self, node): - children = [node.target] - children.extend(self._get_op(node.op)) - children.extend(['=', node.value]) - self._handle(node, children) - - def _Repr(self, node): - self._handle(node, ['`', node.value, '`']) - - def _BinOp(self, node): - children = [node.left] + self._get_op(node.op) + [node.right] - self._handle(node, children) - - def _BoolOp(self, node): - self._handle(node, self._child_nodes(node.values, - self._get_op(node.op)[0])) - - def _Break(self, node): - self._handle(node, ['break']) - - def _Call(self, node): - children = [node.func, '('] - args = list(node.args) + node.keywords - children.extend(self._child_nodes(args, ',')) - if node.starargs is not None: - if args: - children.append(',') - children.extend(['*', node.starargs]) - if node.kwargs is not None: - if args or node.starargs is not None: - children.append(',') - children.extend(['**', node.kwargs]) - children.append(')') - self._handle(node, children) - - def _ClassDef(self, node): - children = [] - if getattr(node, 'decorator_list', None): - for decorator in node.decorator_list: - children.append('@') - children.append(decorator) - children.extend(['class', node.name]) - if node.bases: - children.append('(') - children.extend(self._child_nodes(node.bases, ',')) - children.append(')') - children.append(':') - children.extend(node.body) - self._handle(node, children) - - def _Compare(self, node): - children = [] - children.append(node.left) - for op, expr in zip(node.ops, node.comparators): - children.extend(self._get_op(op)) - children.append(expr) - self._handle(node, children) - - def _Delete(self, node): - self._handle(node, ['del'] + self._child_nodes(node.targets, ',')) - - def _Num(self, node): - self._handle(node, [self.Number]) - - def _Str(self, node): - self._handle(node, [self.String]) - - def _Continue(self, node): - self._handle(node, ['continue']) - - def _Dict(self, node): - children = [] - children.append('{') - if node.keys: - for index, (key, value) in enumerate(list(zip(node.keys, node.values))): - children.extend([key, ':', value]) - if index < len(node.keys) - 1: - children.append(',') - children.append('}') - self._handle(node, children) - - def _Ellipsis(self, node): - self._handle(node, ['...']) - - def _Expr(self, node): - self._handle(node, [node.value]) - - def _Exec(self, node): - children = [] - children.extend(['exec', node.body]) - if node.globals: - children.extend(['in', node.globals]) - if node.locals: - children.extend([',', node.locals]) - self._handle(node, children) - - def _ExtSlice(self, node): - children = [] - for index, dim in enumerate(node.dims): - if index > 0: - children.append(',') - children.append(dim) - self._handle(node, children) - - def _For(self, node): - children = ['for', node.target, 'in', node.iter, ':'] - children.extend(node.body) - if node.orelse: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _ImportFrom(self, node): - children = ['from'] - if node.level: - children.append('.' * node.level) - children.extend([node.module or '', # see comment at rope.base.ast.walk - 'import']) - children.extend(self._child_nodes(node.names, ',')) - self._handle(node, children) - - def _alias(self, node): - children = [node.name] - if node.asname: - children.extend(['as', node.asname]) - self._handle(node, children) - - def _FunctionDef(self, node): - children = [] - try: - decorators = getattr(node, 'decorator_list') - except AttributeError: - decorators = getattr(node, 'decorators', None) - if decorators: - for decorator in decorators: - children.append('@') - children.append(decorator) - children.extend(['def', node.name, '(', node.args]) - children.extend([')', ':']) - children.extend(node.body) - self._handle(node, children) - - def _arguments(self, node): - children = [] - args = list(node.args) - defaults = [None] * (len(args) - len(node.defaults)) + list(node.defaults) - for index, (arg, default) in enumerate(list(zip(args, defaults))): - if index > 0: - children.append(',') - self._add_args_to_children(children, arg, default) - if node.vararg is not None: - if args: - children.append(',') - children.extend(['*', node.vararg]) - if node.kwarg is not None: - if args or node.vararg is not None: - children.append(',') - children.extend(['**', node.kwarg]) - self._handle(node, children) - - def _add_args_to_children(self, children, arg, default): - if isinstance(arg, (list, tuple)): - self._add_tuple_parameter(children, arg) - else: - children.append(arg) - if default is not None: - children.append('=') - children.append(default) - - def _add_tuple_parameter(self, children, arg): - children.append('(') - for index, token in enumerate(arg): - if index > 0: - children.append(',') - if isinstance(token, (list, tuple)): - self._add_tuple_parameter(children, token) - else: - children.append(token) - children.append(')') - - def _GeneratorExp(self, node): - children = [node.elt] - children.extend(node.generators) - self._handle(node, children, eat_parens=True) - - def _comprehension(self, node): - children = ['for', node.target, 'in', node.iter] - if node.ifs: - for if_ in node.ifs: - children.append('if') - children.append(if_) - self._handle(node, children) - - def _Global(self, node): - children = self._child_nodes(node.names, ',') - children.insert(0, 'global') - self._handle(node, children) - - def _If(self, node): - if self._is_elif(node): - children = ['elif'] - else: - children = ['if'] - children.extend([node.test, ':']) - children.extend(node.body) - if node.orelse: - if len(node.orelse) == 1 and self._is_elif(node.orelse[0]): - pass - else: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _is_elif(self, node): - if not isinstance(node, ast.If): - return False - offset = self.lines.get_line_start(node.lineno) + node.col_offset - word = self.source[offset:offset + 4] - # XXX: This is a bug; the offset does not point to the first - alt_word = self.source[offset - 5:offset - 1] - return 'elif' in (word, alt_word) - - def _IfExp(self, node): - return self._handle(node, [node.body, 'if', node.test, - 'else', node.orelse]) - - def _Import(self, node): - children = ['import'] - children.extend(self._child_nodes(node.names, ',')) - self._handle(node, children) - - def _keyword(self, node): - self._handle(node, [node.arg, '=', node.value]) - - def _Lambda(self, node): - self._handle(node, ['lambda', node.args, ':', node.body]) - - def _List(self, node): - self._handle(node, ['['] + self._child_nodes(node.elts, ',') + [']']) - - def _ListComp(self, node): - children = ['[', node.elt] - children.extend(node.generators) - children.append(']') - self._handle(node, children) - - def _Module(self, node): - self._handle(node, list(node.body), eat_spaces=True) - - def _Name(self, node): - self._handle(node, [node.id]) - - def _Pass(self, node): - self._handle(node, ['pass']) - - def _Print(self, node): - children = ['print'] - if node.dest: - children.extend(['>>', node.dest]) - if node.values: - children.append(',') - children.extend(self._child_nodes(node.values, ',')) - if not node.nl: - children.append(',') - self._handle(node, children) - - def _Raise(self, node): - children = ['raise'] - if node.cause: - children.append(node.cause) - if node.exc: - children.append(node.exc) - self._handle(node, children) - - def _Return(self, node): - children = ['return'] - if node.value: - children.append(node.value) - self._handle(node, children) - - def _Sliceobj(self, node): - children = [] - for index, slice in enumerate(node.nodes): - if index > 0: - children.append(':') - if slice: - children.append(slice) - self._handle(node, children) - - def _Index(self, node): - self._handle(node, [node.value]) - - def _Subscript(self, node): - self._handle(node, [node.value, '[', node.slice, ']']) - - def _Slice(self, node): - children = [] - if node.lower: - children.append(node.lower) - children.append(':') - if node.upper: - children.append(node.upper) - if node.step: - children.append(':') - children.append(node.step) - self._handle(node, children) - - def _TryFinally(self, node): - children = [] - if len(node.body) != 1 or not isinstance(node.body[0], ast.TryExcept): - children.extend(['try', ':']) - children.extend(node.body) - children.extend(['finally', ':']) - children.extend(node.finalbody) - self._handle(node, children) - - def _TryExcept(self, node): - children = ['try', ':'] - children.extend(node.body) - children.extend(node.handlers) - if node.orelse: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _ExceptHandler(self, node): - self._excepthandler(node) - - def _excepthandler(self, node): - children = ['except'] - if node.type: - children.append(node.type) - if node.name: - children.extend(['as', node.name]) - children.append(':') - children.extend(node.body) - self._handle(node, children) - - def _Tuple(self, node): - if node.elts: - self._handle(node, self._child_nodes(node.elts, ','), - eat_parens=True) - else: - self._handle(node, ['(', ')']) - - def _UnaryOp(self, node): - children = self._get_op(node.op) - children.append(node.operand) - self._handle(node, children) - - def _Yield(self, node): - children = ['yield'] - if node.value: - children.append(node.value) - self._handle(node, children) - - def _While(self, node): - children = ['while', node.test, ':'] - children.extend(node.body) - if node.orelse: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _With(self, node): - children = ['with', node.context_expr] - if node.optional_vars: - children.extend(['as', node.optional_vars]) - children.append(':') - children.extend(node.body) - self._handle(node, children) - - def _child_nodes(self, nodes, separator): - children = [] - for index, child in enumerate(nodes): - children.append(child) - if index < len(nodes) - 1: - children.append(separator) - return children - - -class _Source(object): - - def __init__(self, source): - self.source = source - self.offset = 0 - - def consume(self, token): - try: - while True: - new_offset = self.source.index(token, self.offset) - if self._good_token(token, new_offset): - break - else: - self._skip_comment() - except (ValueError, TypeError): - raise MismatchedTokenError( - 'Token <%s> at %s cannot be matched' % - (token, self._get_location())) - self.offset = new_offset + len(token) - return (new_offset, self.offset) - - def consume_string(self, end=None): - if _Source._string_pattern is None: - original = codeanalyze.get_string_pattern() - pattern = r'(%s)((\s|\\\n|#[^\n]*\n)*(%s))*' % \ - (original, original) - _Source._string_pattern = re.compile(pattern) - repattern = _Source._string_pattern - return self._consume_pattern(repattern, end) - - def consume_number(self): - if _Source._number_pattern is None: - _Source._number_pattern = re.compile( - self._get_number_pattern()) - repattern = _Source._number_pattern - return self._consume_pattern(repattern) - - def consume_not_equal(self): - if _Source._not_equals_pattern is None: - _Source._not_equals_pattern = re.compile(r'<>|!=') - repattern = _Source._not_equals_pattern - return self._consume_pattern(repattern) - - def _good_token(self, token, offset, start=None): - """Checks whether consumed token is in comments""" - if start is None: - start = self.offset - try: - comment_index = self.source.rindex('#', start, offset) - except ValueError: - return True - try: - new_line_index = self.source.rindex('\n', start, offset) - except ValueError: - return False - return comment_index < new_line_index - - def _skip_comment(self): - self.offset = self.source.index('\n', self.offset + 1) - - def _get_location(self): - lines = self.source[:self.offset].split('\n') - return (len(lines), len(lines[-1])) - - def _consume_pattern(self, repattern, end=None): - while True: - if end is None: - end = len(self.source) - match = repattern.search(self.source, self.offset, end) - if self._good_token(match.group(), match.start()): - break - else: - self._skip_comment() - self.offset = match.end() - return match.start(), match.end() - - def till_token(self, token): - new_offset = self.source.index(token, self.offset) - return self[self.offset:new_offset] - - def rfind_token(self, token, start, end): - index = start - while True: - try: - index = self.source.rindex(token, start, end) - if self._good_token(token, index, start=start): - return index - else: - end = index - except ValueError: - return None - - def from_offset(self, offset): - return self[offset:self.offset] - - def find_backwards(self, pattern, offset): - return self.source.rindex(pattern, 0, offset) - - def __getitem__(self, index): - return self.source[index] - - def __getslice__(self, i, j): - return self.source[i:j] - - def _get_number_pattern(self): - # HACK: It is merely an approaximation and does the job - integer = r'(0|0x)?[\da-fA-F]+[lL]?' - return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d*)?[jJ]?' % integer - - _string_pattern = None - _number_pattern = None - _not_equals_pattern = None diff --git a/pymode/libs3/rope/refactor/rename.py b/pymode/libs3/rope/refactor/rename.py deleted file mode 100644 index f61e4c40..00000000 --- a/pymode/libs3/rope/refactor/rename.py +++ /dev/null @@ -1,216 +0,0 @@ -import warnings - -from rope.base import exceptions, pyobjects, pynames, taskhandle, evaluate, worder, codeanalyze -from rope.base.change import ChangeSet, ChangeContents, MoveResource -from rope.refactor import occurrences, sourceutils - - -class Rename(object): - """A class for performing rename refactoring - - It can rename everything: classes, functions, modules, packages, - methods, variables and keyword arguments. - - """ - - def __init__(self, project, resource, offset=None): - """If `offset` is None, the `resource` itself will be renamed""" - self.project = project - self.pycore = project.pycore - self.resource = resource - if offset is not None: - self.old_name = worder.get_name_at(self.resource, offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) - self.old_instance, self.old_pyname = \ - evaluate.eval_location2(this_pymodule, offset) - if self.old_pyname is None: - raise exceptions.RefactoringError( - 'Rename refactoring should be performed' - ' on resolvable python identifiers.') - else: - if not resource.is_folder() and resource.name == '__init__.py': - resource = resource.parent - dummy_pymodule = self.pycore.get_string_module('') - self.old_instance = None - self.old_pyname = pynames.ImportedModule(dummy_pymodule, - resource=resource) - if resource.is_folder(): - self.old_name = resource.name - else: - self.old_name = resource.name[:-3] - - def get_old_name(self): - return self.old_name - - def get_changes(self, new_name, in_file=None, in_hierarchy=False, - unsure=None, docs=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes needed for this refactoring - - Parameters: - - - `in_hierarchy`: when renaming a method this keyword forces - to rename all matching methods in the hierarchy - - `docs`: when `True` rename refactoring will rename - occurrences in comments and strings where the name is - visible. Setting it will make renames faster, too. - - `unsure`: decides what to do about unsure occurrences. - If `None`, they are ignored. Otherwise `unsure` is - called with an instance of `occurrence.Occurrence` as - parameter. If it returns `True`, the occurrence is - considered to be a match. - - `resources` can be a list of `rope.base.resources.File`\s to - apply this refactoring on. If `None`, the restructuring - will be applied to all python files. - - `in_file`: this argument has been deprecated; use - `resources` instead. - - """ - if unsure in (True, False): - warnings.warn( - 'unsure parameter should be a function that returns ' - 'True or False', DeprecationWarning, stacklevel=2) - def unsure_func(value=unsure): - return value - unsure = unsure_func - if in_file is not None: - warnings.warn( - '`in_file` argument has been deprecated; use `resources` ' - 'instead. ', DeprecationWarning, stacklevel=2) - if in_file: - resources = [self.resource] - if _is_local(self.old_pyname): - resources = [self.resource] - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Renaming <%s> to <%s>' % - (self.old_name, new_name)) - finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname, unsure=unsure, - docs=docs, instance=self.old_instance, - in_hierarchy=in_hierarchy and self.is_method()) - job_set = task_handle.create_jobset('Collecting Changes', len(resources)) - for file_ in resources: - job_set.started_job(file_.path) - new_content = rename_in_module(finder, new_name, resource=file_) - if new_content is not None: - changes.add_change(ChangeContents(file_, new_content)) - job_set.finished_job() - if self._is_renaming_a_module(): - resource = self.old_pyname.get_object().get_resource() - if self._is_allowed_to_move(resources, resource): - self._rename_module(resource, new_name, changes) - return changes - - def _is_allowed_to_move(self, resources, resource): - if resource.is_folder(): - try: - return resource.get_child('__init__.py') in resources - except exceptions.ResourceNotFoundError: - return False - else: - return resource in resources - - def _is_renaming_a_module(self): - if isinstance(self.old_pyname.get_object(), pyobjects.AbstractModule): - return True - return False - - def is_method(self): - pyname = self.old_pyname - return isinstance(pyname, pynames.DefinedName) and \ - isinstance(pyname.get_object(), pyobjects.PyFunction) and \ - isinstance(pyname.get_object().parent, pyobjects.PyClass) - - def _rename_module(self, resource, new_name, changes): - if not resource.is_folder(): - new_name = new_name + '.py' - parent_path = resource.parent.path - if parent_path == '': - new_location = new_name - else: - new_location = parent_path + '/' + new_name - changes.add_change(MoveResource(resource, new_location)) - - -class ChangeOccurrences(object): - """A class for changing the occurrences of a name in a scope - - This class replaces the occurrences of a name. Note that it only - changes the scope containing the offset passed to the constructor. - What's more it does not have any side-effects. That is for - example changing occurrences of a module does not rename the - module; it merely replaces the occurrences of that module in a - scope with the given expression. This class is useful for - performing many custom refactorings. - - """ - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.resource = resource - self.offset = offset - self.old_name = worder.get_name_at(resource, offset) - self.pymodule = self.pycore.resource_to_pyobject(self.resource) - self.old_pyname = evaluate.eval_location(self.pymodule, offset) - - def get_old_name(self): - word_finder = worder.Worder(self.resource.read()) - return word_finder.get_primary_at(self.offset) - - def _get_scope_offset(self): - lines = self.pymodule.lines - scope = self.pymodule.get_scope().\ - get_inner_scope_for_line(lines.get_line_number(self.offset)) - start = lines.get_line_start(scope.get_start()) - end = lines.get_line_end(scope.get_end()) - return start, end - - def get_changes(self, new_name, only_calls=False, reads=True, writes=True): - changes = ChangeSet('Changing <%s> occurrences to <%s>' % - (self.old_name, new_name)) - scope_start, scope_end = self._get_scope_offset() - finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname, - imports=False, only_calls=only_calls) - new_contents = rename_in_module( - finder, new_name, pymodule=self.pymodule, replace_primary=True, - region=(scope_start, scope_end), reads=reads, writes=writes) - if new_contents is not None: - changes.add_change(ChangeContents(self.resource, new_contents)) - return changes - - -def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None, - replace_primary=False, region=None, reads=True, writes=True): - """Returns the changed source or `None` if there is no changes""" - if resource is not None: - source_code = resource.read() - else: - source_code = pymodule.source_code - change_collector = codeanalyze.ChangeCollector(source_code) - for occurrence in occurrences_finder.find_occurrences(resource, pymodule): - if replace_primary and occurrence.is_a_fixed_primary(): - continue - if replace_primary: - start, end = occurrence.get_primary_range() - else: - start, end = occurrence.get_word_range() - if (not reads and not occurrence.is_written()) or \ - (not writes and occurrence.is_written()): - continue - if region is None or region[0] <= start < region[1]: - change_collector.add_change(start, end, new_name) - return change_collector.get_changed() - -def _is_local(pyname): - module, lineno = pyname.get_definition_location() - if lineno is None: - return False - scope = module.get_scope().get_inner_scope_for_line(lineno) - if isinstance(pyname, pynames.DefinedName) and \ - scope.get_kind() in ('Function', 'Class'): - scope = scope.parent - return scope.get_kind() == 'Function' and \ - pyname in list(scope.get_names().values()) and \ - isinstance(pyname, pynames.AssignedName) diff --git a/pymode/libs3/rope/refactor/restructure.py b/pymode/libs3/rope/refactor/restructure.py deleted file mode 100644 index 1573c2fe..00000000 --- a/pymode/libs3/rope/refactor/restructure.py +++ /dev/null @@ -1,307 +0,0 @@ -import warnings - -from rope.base import change, taskhandle, builtins, ast, codeanalyze -from rope.refactor import patchedast, similarfinder, sourceutils -from rope.refactor.importutils import module_imports - - -class Restructure(object): - """A class to perform python restructurings - - A restructuring transforms pieces of code matching `pattern` to - `goal`. In the `pattern` wildcards can appear. Wildcards match - some piece of code based on their kind and arguments that are - passed to them through `args`. - - `args` is a dictionary of wildcard names to wildcard arguments. - If the argument is a tuple, the first item of the tuple is - considered to be the name of the wildcard to use; otherwise the - "default" wildcard is used. For getting the list arguments a - wildcard supports, see the pydoc of the wildcard. (see - `rope.refactor.wildcard.DefaultWildcard` for the default - wildcard.) - - `wildcards` is the list of wildcard types that can appear in - `pattern`. See `rope.refactor.wildcards`. If a wildcard does not - specify its kind (by using a tuple in args), the wildcard named - "default" is used. So there should be a wildcard with "default" - name in `wildcards`. - - `imports` is the list of imports that changed modules should - import. Note that rope handles duplicate imports and does not add - the import if it already appears. - - Example #1:: - - pattern ${pyobject}.get_attribute(${name}) - goal ${pyobject}[${name}] - args pyobject: instance=rope.base.pyobjects.PyObject - - Example #2:: - - pattern ${name} in ${pyobject}.get_attributes() - goal ${name} in {pyobject} - args pyobject: instance=rope.base.pyobjects.PyObject - - Example #3:: - - pattern ${pycore}.create_module(${project}.root, ${name}) - goal generate.create_module(${project}, ${name}) - - imports - from rope.contrib import generate - - args - pycore: type=rope.base.pycore.PyCore - project: type=rope.base.project.Project - - Example #4:: - - pattern ${pow}(${param1}, ${param2}) - goal ${param1} ** ${param2} - args pow: name=mod.pow, exact - - Example #5:: - - pattern ${inst}.longtask(${p1}, ${p2}) - goal - ${inst}.subtask1(${p1}) - ${inst}.subtask2(${p2}) - args - inst: type=mod.A,unsure - - """ - - def __init__(self, project, pattern, goal, args=None, - imports=None, wildcards=None): - """Construct a restructuring - - See class pydoc for more info about the arguments. - - """ - self.pycore = project.pycore - self.pattern = pattern - self.goal = goal - self.args = args - if self.args is None: - self.args = {} - self.imports = imports - if self.imports is None: - self.imports = [] - self.wildcards = wildcards - self.template = similarfinder.CodeTemplate(self.goal) - - def get_changes(self, checks=None, imports=None, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes needed by this restructuring - - `resources` can be a list of `rope.base.resources.File`\s to - apply the restructuring on. If `None`, the restructuring will - be applied to all python files. - - `checks` argument has been deprecated. Use the `args` argument - of the constructor. The usage of:: - - strchecks = {'obj1.type': 'mod.A', 'obj2': 'mod.B', - 'obj3.object': 'mod.C'} - checks = restructuring.make_checks(strchecks) - - can be replaced with:: - - args = {'obj1': 'type=mod.A', 'obj2': 'name=mod.B', - 'obj3': 'object=mod.C'} - - where obj1, obj2 and obj3 are wildcard names that appear - in restructuring pattern. - - """ - if checks is not None: - warnings.warn( - 'The use of checks parameter is deprecated; ' - 'use the args parameter of the constructor instead.', - DeprecationWarning, stacklevel=2) - for name, value in checks.items(): - self.args[name] = similarfinder._pydefined_to_str(value) - if imports is not None: - warnings.warn( - 'The use of imports parameter is deprecated; ' - 'use imports parameter of the constructor, instead.', - DeprecationWarning, stacklevel=2) - self.imports = imports - changes = change.ChangeSet('Restructuring <%s> to <%s>' % - (self.pattern, self.goal)) - if resources is not None: - files = [resource for resource in resources - if self.pycore.is_python_file(resource)] - else: - files = self.pycore.get_python_files() - job_set = task_handle.create_jobset('Collecting Changes', len(files)) - for resource in files: - job_set.started_job(resource.path) - pymodule = self.pycore.resource_to_pyobject(resource) - finder = similarfinder.SimilarFinder(pymodule, - wildcards=self.wildcards) - matches = list(finder.get_matches(self.pattern, self.args)) - computer = self._compute_changes(matches, pymodule) - result = computer.get_changed() - if result is not None: - imported_source = self._add_imports(resource, result, - self.imports) - changes.add_change(change.ChangeContents(resource, - imported_source)) - job_set.finished_job() - return changes - - def _compute_changes(self, matches, pymodule): - return _ChangeComputer( - pymodule.source_code, pymodule.get_ast(), - pymodule.lines, self.template, matches) - - def _add_imports(self, resource, source, imports): - if not imports: - return source - import_infos = self._get_import_infos(resource, imports) - pymodule = self.pycore.get_string_module(source, resource) - imports = module_imports.ModuleImports(self.pycore, pymodule) - for import_info in import_infos: - imports.add_import(import_info) - return imports.get_changed_source() - - def _get_import_infos(self, resource, imports): - pymodule = self.pycore.get_string_module('\n'.join(imports), - resource) - imports = module_imports.ModuleImports(self.pycore, pymodule) - return [imports.import_info - for imports in imports.imports] - - def make_checks(self, string_checks): - """Convert str to str dicts to str to PyObject dicts - - This function is here to ease writing a UI. - - """ - checks = {} - for key, value in string_checks.items(): - is_pyname = not key.endswith('.object') and \ - not key.endswith('.type') - evaluated = self._evaluate(value, is_pyname=is_pyname) - if evaluated is not None: - checks[key] = evaluated - return checks - - def _evaluate(self, code, is_pyname=True): - attributes = code.split('.') - pyname = None - if attributes[0] in ('__builtin__', '__builtins__'): - class _BuiltinsStub(object): - def get_attribute(self, name): - return builtins.builtins[name] - pyobject = _BuiltinsStub() - else: - pyobject = self.pycore.get_module(attributes[0]) - for attribute in attributes[1:]: - pyname = pyobject[attribute] - if pyname is None: - return None - pyobject = pyname.get_object() - return pyname if is_pyname else pyobject - - -def replace(code, pattern, goal): - """used by other refactorings""" - finder = similarfinder.RawSimilarFinder(code) - matches = list(finder.get_matches(pattern)) - ast = patchedast.get_patched_ast(code) - lines = codeanalyze.SourceLinesAdapter(code) - template = similarfinder.CodeTemplate(goal) - computer = _ChangeComputer(code, ast, lines, template, matches) - result = computer.get_changed() - if result is None: - return code - return result - - -class _ChangeComputer(object): - - def __init__(self, code, ast, lines, goal, matches): - self.source = code - self.goal = goal - self.matches = matches - self.ast = ast - self.lines = lines - self.matched_asts = {} - self._nearest_roots = {} - if self._is_expression(): - for match in self.matches: - self.matched_asts[match.ast] = match - - def get_changed(self): - if self._is_expression(): - result = self._get_node_text(self.ast) - if result == self.source: - return None - return result - else: - collector = codeanalyze.ChangeCollector(self.source) - last_end = -1 - for match in self.matches: - start, end = match.get_region() - if start < last_end: - if not self._is_expression(): - continue - last_end = end - replacement = self._get_matched_text(match) - collector.add_change(start, end, replacement) - return collector.get_changed() - - def _is_expression(self): - return self.matches and isinstance(self.matches[0], - similarfinder.ExpressionMatch) - - def _get_matched_text(self, match): - mapping = {} - for name in self.goal.get_names(): - node = match.get_ast(name) - if node is None: - raise similarfinder.BadNameInCheckError( - 'Unknown name <%s>' % name) - force = self._is_expression() and match.ast == node - mapping[name] = self._get_node_text(node, force) - unindented = self.goal.substitute(mapping) - return self._auto_indent(match.get_region()[0], unindented) - - def _get_node_text(self, node, force=False): - if not force and node in self.matched_asts: - return self._get_matched_text(self.matched_asts[node]) - start, end = patchedast.node_region(node) - main_text = self.source[start:end] - collector = codeanalyze.ChangeCollector(main_text) - for node in self._get_nearest_roots(node): - sub_start, sub_end = patchedast.node_region(node) - collector.add_change(sub_start - start, sub_end - start, - self._get_node_text(node)) - result = collector.get_changed() - if result is None: - return main_text - return result - - def _auto_indent(self, offset, text): - lineno = self.lines.get_line_number(offset) - indents = sourceutils.get_indents(self.lines, lineno) - result = [] - for index, line in enumerate(text.splitlines(True)): - if index != 0 and line.strip(): - result.append(' ' * indents) - result.append(line) - return ''.join(result) - - def _get_nearest_roots(self, node): - if node not in self._nearest_roots: - result = [] - for child in ast.get_child_nodes(node): - if child in self.matched_asts: - result.append(child) - else: - result.extend(self._get_nearest_roots(child)) - self._nearest_roots[node] = result - return self._nearest_roots[node] diff --git a/pymode/libs3/rope/refactor/similarfinder.py b/pymode/libs3/rope/refactor/similarfinder.py deleted file mode 100644 index 70ae7e15..00000000 --- a/pymode/libs3/rope/refactor/similarfinder.py +++ /dev/null @@ -1,362 +0,0 @@ -"""This module can be used for finding similar code""" -import re - -import rope.refactor.wildcards -from rope.base import codeanalyze, evaluate, exceptions, ast, builtins -from rope.refactor import (patchedast, sourceutils, occurrences, - wildcards, importutils) - - -class BadNameInCheckError(exceptions.RefactoringError): - pass - - -class SimilarFinder(object): - """`SimilarFinder` can be used to find similar pieces of code - - See the notes in the `rope.refactor.restructure` module for more - info. - - """ - - def __init__(self, pymodule, wildcards=None): - """Construct a SimilarFinder""" - self.source = pymodule.source_code - self.raw_finder = RawSimilarFinder( - pymodule.source_code, pymodule.get_ast(), self._does_match) - self.pymodule = pymodule - if wildcards is None: - self.wildcards = {} - for wildcard in [rope.refactor.wildcards. - DefaultWildcard(pymodule.pycore.project)]: - self.wildcards[wildcard.get_name()] = wildcard - else: - self.wildcards = wildcards - - def get_matches(self, code, args={}, start=0, end=None): - self.args = args - if end is None: - end = len(self.source) - skip_region = None - if 'skip' in args.get('', {}): - resource, region = args['']['skip'] - if resource == self.pymodule.get_resource(): - skip_region = region - return self.raw_finder.get_matches(code, start=start, end=end, - skip=skip_region) - - def get_match_regions(self, *args, **kwds): - for match in self.get_matches(*args, **kwds): - yield match.get_region() - - def _does_match(self, node, name): - arg = self.args.get(name, '') - kind = 'default' - if isinstance(arg, (tuple, list)): - kind = arg[0] - arg = arg[1] - suspect = wildcards.Suspect(self.pymodule, node, name) - return self.wildcards[kind].matches(suspect, arg) - - -class RawSimilarFinder(object): - """A class for finding similar expressions and statements""" - - def __init__(self, source, node=None, does_match=None): - if node is None: - node = ast.parse(source) - if does_match is None: - self.does_match = self._simple_does_match - else: - self.does_match = does_match - self._init_using_ast(node, source) - - def _simple_does_match(self, node, name): - return isinstance(node, (ast.expr, ast.Name)) - - def _init_using_ast(self, node, source): - self.source = source - self._matched_asts = {} - if not hasattr(node, 'region'): - patchedast.patch_ast(node, source) - self.ast = node - - def get_matches(self, code, start=0, end=None, skip=None): - """Search for `code` in source and return a list of `Match`\es - - `code` can contain wildcards. ``${name}`` matches normal - names and ``${?name} can match any expression. You can use - `Match.get_ast()` for getting the node that has matched a - given pattern. - - """ - if end is None: - end = len(self.source) - for match in self._get_matched_asts(code): - match_start, match_end = match.get_region() - if start <= match_start and match_end <= end: - if skip is not None and (skip[0] < match_end and - skip[1] > match_start): - continue - yield match - - def _get_matched_asts(self, code): - if code not in self._matched_asts: - wanted = self._create_pattern(code) - matches = _ASTMatcher(self.ast, wanted, - self.does_match).find_matches() - self._matched_asts[code] = matches - return self._matched_asts[code] - - def _create_pattern(self, expression): - expression = self._replace_wildcards(expression) - node = ast.parse(expression) - # Getting Module.Stmt.nodes - nodes = node.body - if len(nodes) == 1 and isinstance(nodes[0], ast.Expr): - # Getting Discard.expr - wanted = nodes[0].value - else: - wanted = nodes - return wanted - - def _replace_wildcards(self, expression): - ropevar = _RopeVariable() - template = CodeTemplate(expression) - mapping = {} - for name in template.get_names(): - mapping[name] = ropevar.get_var(name) - return template.substitute(mapping) - - -class _ASTMatcher(object): - - def __init__(self, body, pattern, does_match): - """Searches the given pattern in the body AST. - - body is an AST node and pattern can be either an AST node or - a list of ASTs nodes - """ - self.body = body - self.pattern = pattern - self.matches = None - self.ropevar = _RopeVariable() - self.matches_callback = does_match - - def find_matches(self): - if self.matches is None: - self.matches = [] - ast.call_for_nodes(self.body, self._check_node, recursive=True) - return self.matches - - def _check_node(self, node): - if isinstance(self.pattern, list): - self._check_statements(node) - else: - self._check_expression(node) - - def _check_expression(self, node): - mapping = {} - if self._match_nodes(self.pattern, node, mapping): - self.matches.append(ExpressionMatch(node, mapping)) - - def _check_statements(self, node): - for child in ast.get_children(node): - if isinstance(child, (list, tuple)): - self.__check_stmt_list(child) - - def __check_stmt_list(self, nodes): - for index in range(len(nodes)): - if len(nodes) - index >= len(self.pattern): - current_stmts = nodes[index:index + len(self.pattern)] - mapping = {} - if self._match_stmts(current_stmts, mapping): - self.matches.append(StatementMatch(current_stmts, mapping)) - - def _match_nodes(self, expected, node, mapping): - if isinstance(expected, ast.Name): - if self.ropevar.is_var(expected.id): - return self._match_wildcard(expected, node, mapping) - if not isinstance(expected, ast.AST): - return expected == node - if expected.__class__ != node.__class__: - return False - - children1 = self._get_children(expected) - children2 = self._get_children(node) - if len(children1) != len(children2): - return False - for child1, child2 in zip(children1, children2): - if isinstance(child1, ast.AST): - if not self._match_nodes(child1, child2, mapping): - return False - elif isinstance(child1, (list, tuple)): - if not isinstance(child2, (list, tuple)) or \ - len(child1) != len(child2): - return False - for c1, c2 in zip(child1, child2): - if not self._match_nodes(c1, c2, mapping): - return False - else: - if child1 != child2: - return False - return True - - def _get_children(self, node): - """Return not `ast.expr_context` children of `node`""" - children = ast.get_children(node) - return [child for child in children - if not isinstance(child, ast.expr_context)] - - def _match_stmts(self, current_stmts, mapping): - if len(current_stmts) != len(self.pattern): - return False - for stmt, expected in zip(current_stmts, self.pattern): - if not self._match_nodes(expected, stmt, mapping): - return False - return True - - def _match_wildcard(self, node1, node2, mapping): - name = self.ropevar.get_base(node1.id) - if name not in mapping: - if self.matches_callback(node2, name): - mapping[name] = node2 - return True - return False - else: - return self._match_nodes(mapping[name], node2, {}) - - -class Match(object): - - def __init__(self, mapping): - self.mapping = mapping - - def get_region(self): - """Returns match region""" - - def get_ast(self, name): - """Return the ast node that has matched rope variables""" - return self.mapping.get(name, None) - - -class ExpressionMatch(Match): - - def __init__(self, ast, mapping): - super(ExpressionMatch, self).__init__(mapping) - self.ast = ast - - def get_region(self): - return self.ast.region - - -class StatementMatch(Match): - - def __init__(self, ast_list, mapping): - super(StatementMatch, self).__init__(mapping) - self.ast_list = ast_list - - def get_region(self): - return self.ast_list[0].region[0], self.ast_list[-1].region[1] - - -class CodeTemplate(object): - - def __init__(self, template): - self.template = template - self._find_names() - - def _find_names(self): - self.names = {} - for match in CodeTemplate._get_pattern().finditer(self.template): - if 'name' in match.groupdict() and \ - match.group('name') is not None: - start, end = match.span('name') - name = self.template[start + 2:end - 1] - if name not in self.names: - self.names[name] = [] - self.names[name].append((start, end)) - - def get_names(self): - return list(self.names.keys()) - - def substitute(self, mapping): - collector = codeanalyze.ChangeCollector(self.template) - for name, occurrences in self.names.items(): - for region in occurrences: - collector.add_change(region[0], region[1], mapping[name]) - result = collector.get_changed() - if result is None: - return self.template - return result - - _match_pattern = None - - @classmethod - def _get_pattern(cls): - if cls._match_pattern is None: - pattern = codeanalyze.get_comment_pattern() + '|' + \ - codeanalyze.get_string_pattern() + '|' + \ - r'(?P\$\{[^\s\$\}]*\})' - cls._match_pattern = re.compile(pattern) - return cls._match_pattern - - -class _RopeVariable(object): - """Transform and identify rope inserted wildcards""" - - _normal_prefix = '__rope__variable_normal_' - _any_prefix = '__rope__variable_any_' - - def get_var(self, name): - if name.startswith('?'): - return self._get_any(name) - else: - return self._get_normal(name) - - def is_var(self, name): - return self._is_normal(name) or self._is_var(name) - - def get_base(self, name): - if self._is_normal(name): - return name[len(self._normal_prefix):] - if self._is_var(name): - return '?' + name[len(self._any_prefix):] - - def _get_normal(self, name): - return self._normal_prefix + name - - def _get_any(self, name): - return self._any_prefix + name[1:] - - def _is_normal(self, name): - return name.startswith(self._normal_prefix) - - def _is_var(self, name): - return name.startswith(self._any_prefix) - - -def make_pattern(code, variables): - variables = set(variables) - collector = codeanalyze.ChangeCollector(code) - def does_match(node, name): - return isinstance(node, ast.Name) and node.id == name - finder = RawSimilarFinder(code, does_match=does_match) - for variable in variables: - for match in finder.get_matches('${%s}' % variable): - start, end = match.get_region() - collector.add_change(start, end, '${%s}' % variable) - result = collector.get_changed() - return result if result is not None else code - - -def _pydefined_to_str(pydefined): - address = [] - if isinstance(pydefined, (builtins.BuiltinClass, builtins.BuiltinFunction)): - return '__builtins__.' + pydefined.get_name() - else: - while pydefined.parent is not None: - address.insert(0, pydefined.get_name()) - pydefined = pydefined.parent - module_name = pydefined.pycore.modname(pydefined.resource) - return '.'.join(module_name.split('.') + address) diff --git a/pymode/libs3/rope/refactor/sourceutils.py b/pymode/libs3/rope/refactor/sourceutils.py deleted file mode 100644 index f64213db..00000000 --- a/pymode/libs3/rope/refactor/sourceutils.py +++ /dev/null @@ -1,92 +0,0 @@ -from rope.base import ast, codeanalyze - - -def get_indents(lines, lineno): - return codeanalyze.count_line_indents(lines.get_line(lineno)) - - -def find_minimum_indents(source_code): - result = 80 - lines = source_code.split('\n') - for line in lines: - if line.strip() == '': - continue - result = min(result, codeanalyze.count_line_indents(line)) - return result - - -def indent_lines(source_code, amount): - if amount == 0: - return source_code - lines = source_code.splitlines(True) - result = [] - for l in lines: - if l.strip() == '': - result.append('\n') - continue - if amount < 0: - indents = codeanalyze.count_line_indents(l) - result.append(max(0, indents + amount) * ' ' + l.lstrip()) - else: - result.append(' ' * amount + l) - return ''.join(result) - - -def fix_indentation(code, new_indents): - """Change the indentation of `code` to `new_indents`""" - min_indents = find_minimum_indents(code) - return indent_lines(code, new_indents - min_indents) - - -def add_methods(pymodule, class_scope, methods_sources): - source_code = pymodule.source_code - lines = pymodule.lines - insertion_line = class_scope.get_end() - if class_scope.get_scopes(): - insertion_line = class_scope.get_scopes()[-1].get_end() - insertion_offset = lines.get_line_end(insertion_line) - methods = '\n\n' + '\n\n'.join(methods_sources) - indented_methods = fix_indentation( - methods, get_indents(lines, class_scope.get_start()) + - get_indent(pymodule.pycore)) - result = [] - result.append(source_code[:insertion_offset]) - result.append(indented_methods) - result.append(source_code[insertion_offset:]) - return ''.join(result) - - -def get_body(pyfunction): - """Return unindented function body""" - scope = pyfunction.get_scope() - pymodule = pyfunction.get_module() - start, end = get_body_region(pyfunction) - return fix_indentation(pymodule.source_code[start:end], 0) - - -def get_body_region(defined): - """Return the start and end offsets of function body""" - scope = defined.get_scope() - pymodule = defined.get_module() - lines = pymodule.lines - node = defined.get_ast() - start_line = node.lineno - if defined.get_doc() is None: - start_line = node.body[0].lineno - elif len(node.body) > 1: - start_line = node.body[1].lineno - start = lines.get_line_start(start_line) - scope_start = pymodule.logical_lines.logical_line_in(scope.start) - if scope_start[1] >= start_line: - # a one-liner! - # XXX: what if colon appears in a string - start = pymodule.source_code.index(':', start) + 1 - while pymodule.source_code[start].isspace(): - start += 1 - end = min(lines.get_line_end(scope.end) + 1, len(pymodule.source_code)) - return start, end - - -def get_indent(pycore): - project = pycore.project - return project.prefs.get('indent_size', 4) diff --git a/pymode/libs3/rope/refactor/suites.py b/pymode/libs3/rope/refactor/suites.py deleted file mode 100644 index d955c819..00000000 --- a/pymode/libs3/rope/refactor/suites.py +++ /dev/null @@ -1,142 +0,0 @@ -from rope.base import ast - - -def find_visible(node, lines): - """Return the line which is visible from all `lines`""" - root = ast_suite_tree(node) - return find_visible_for_suite(root, lines) - - -def find_visible_for_suite(root, lines): - if len(lines) == 1: - return lines[0] - line1 = lines[0] - line2 = find_visible_for_suite(root, lines[1:]) - suite1 = root.find_suite(line1) - suite2 = root.find_suite(line2) - def valid(suite): - return suite is not None and not suite.ignored - if valid(suite1) and not valid(suite2): - return line1 - if not valid(suite1) and valid(suite2): - return line2 - if not valid(suite1) and not valid(suite2): - return None - while suite1 != suite2 and suite1.parent != suite2.parent: - if suite1._get_level() < suite2._get_level(): - line2 = suite2.get_start() - suite2 = suite2.parent - elif suite1._get_level() > suite2._get_level(): - line1 = suite1.get_start() - suite1 = suite1.parent - else: - line1 = suite1.get_start() - line2 = suite2.get_start() - suite1 = suite1.parent - suite2 = suite2.parent - if suite1 == suite2: - return min(line1, line2) - return min(suite1.get_start(), suite2.get_start()) - - -def ast_suite_tree(node): - if hasattr(node, 'lineno'): - lineno = node.lineno - else: - lineno = 1 - return Suite(node.body, lineno) - - -class Suite(object): - - def __init__(self, child_nodes, lineno, parent=None, ignored=False): - self.parent = parent - self.lineno = lineno - self.child_nodes = child_nodes - self._children = None - self.ignored = ignored - - def get_start(self): - if self.parent is None: - if self.child_nodes: - return self.local_start() - else: - return 1 - return self.lineno - - def get_children(self): - if self._children is None: - walker = _SuiteWalker(self) - for child in self.child_nodes: - ast.walk(child, walker) - self._children = walker.suites - return self._children - - def local_start(self): - return self.child_nodes[0].lineno - - def local_end(self): - end = self.child_nodes[-1].lineno - if self.get_children(): - end = max(end, self.get_children()[-1].local_end()) - return end - - def find_suite(self, line): - if line is None: - return None - for child in self.get_children(): - if child.local_start() <= line <= child.local_end(): - return child.find_suite(line) - return self - - def _get_level(self): - if self.parent is None: - return 0 - return self.parent._get_level() + 1 - - -class _SuiteWalker(object): - - def __init__(self, suite): - self.suite = suite - self.suites = [] - - def _If(self, node): - self._add_if_like_node(node) - - def _For(self, node): - self._add_if_like_node(node) - - def _While(self, node): - self._add_if_like_node(node) - - def _With(self, node): - self.suites.append(Suite(node.body, node.lineno, self.suite)) - - def _TryFinally(self, node): - if len(node.finalbody) == 1 and \ - isinstance(node.body[0], ast.TryExcept): - self._TryExcept(node.body[0]) - else: - self.suites.append(Suite(node.body, node.lineno, self.suite)) - self.suites.append(Suite(node.finalbody, node.lineno, self.suite)) - - def _TryExcept(self, node): - self.suites.append(Suite(node.body, node.lineno, self.suite)) - for handler in node.handlers: - self.suites.append(Suite(handler.body, node.lineno, self.suite)) - if node.orelse: - self.suites.append(Suite(node.orelse, node.lineno, self.suite)) - - def _add_if_like_node(self, node): - self.suites.append(Suite(node.body, node.lineno, self.suite)) - if node.orelse: - self.suites.append(Suite(node.orelse, node.lineno, self.suite)) - - def _FunctionDef(self, node): - self.suites.append(Suite(node.body, node.lineno, - self.suite, ignored=True)) - - def _ClassDef(self, node): - self.suites.append(Suite(node.body, node.lineno, - self.suite, ignored=True)) diff --git a/pymode/libs3/rope/refactor/topackage.py b/pymode/libs3/rope/refactor/topackage.py deleted file mode 100644 index b7113979..00000000 --- a/pymode/libs3/rope/refactor/topackage.py +++ /dev/null @@ -1,32 +0,0 @@ -import rope.refactor.importutils -from rope.base.change import ChangeSet, ChangeContents, MoveResource, CreateFolder - - -class ModuleToPackage(object): - - def __init__(self, project, resource): - self.project = project - self.pycore = project.pycore - self.resource = resource - - def get_changes(self): - changes = ChangeSet('Transform <%s> module to package' % - self.resource.path) - new_content = self._transform_relatives_to_absolute(self.resource) - if new_content is not None: - changes.add_change(ChangeContents(self.resource, new_content)) - parent = self.resource.parent - name = self.resource.name[:-3] - changes.add_change(CreateFolder(parent, name)) - parent_path = parent.path + '/' - if not parent.path: - parent_path = '' - new_path = parent_path + '%s/__init__.py' % name - if self.resource.project == self.project: - changes.add_change(MoveResource(self.resource, new_path)) - return changes - - def _transform_relatives_to_absolute(self, resource): - pymodule = self.pycore.resource_to_pyobject(resource) - import_tools = rope.refactor.importutils.ImportTools(self.pycore) - return import_tools.relatives_to_absolutes(pymodule) diff --git a/pymode/libs3/rope/refactor/usefunction.py b/pymode/libs3/rope/refactor/usefunction.py deleted file mode 100644 index b0621525..00000000 --- a/pymode/libs3/rope/refactor/usefunction.py +++ /dev/null @@ -1,171 +0,0 @@ -from rope.base import (change, taskhandle, evaluate, - exceptions, pyobjects, pynames, ast) -from rope.refactor import restructure, sourceutils, similarfinder, importutils - - -class UseFunction(object): - """Try to use a function wherever possible""" - - def __init__(self, project, resource, offset): - self.project = project - self.offset = offset - this_pymodule = project.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None: - raise exceptions.RefactoringError('Unresolvable name selected') - self.pyfunction = pyname.get_object() - if not isinstance(self.pyfunction, pyobjects.PyFunction) or \ - not isinstance(self.pyfunction.parent, pyobjects.PyModule): - raise exceptions.RefactoringError( - 'Use function works for global functions, only.') - self.resource = self.pyfunction.get_module().get_resource() - self._check_returns() - - def _check_returns(self): - node = self.pyfunction.get_ast() - if _yield_count(node): - raise exceptions.RefactoringError('Use function should not ' - 'be used on generators.') - returns = _return_count(node) - if returns > 1: - raise exceptions.RefactoringError('usefunction: Function has more ' - 'than one return statement.') - if returns == 1 and not _returns_last(node): - raise exceptions.RefactoringError('usefunction: return should ' - 'be the last statement.') - - def get_changes(self, resources=None, - task_handle=taskhandle.NullTaskHandle()): - if resources is None: - resources = self.project.pycore.get_python_files() - changes = change.ChangeSet('Using function <%s>' % - self.pyfunction.get_name()) - if self.resource in resources: - newresources = list(resources) - newresources.remove(self.resource) - for c in self._restructure(newresources, task_handle).changes: - changes.add_change(c) - if self.resource in resources: - for c in self._restructure([self.resource], task_handle, - others=False).changes: - changes.add_change(c) - return changes - - def get_function_name(self): - return self.pyfunction.get_name() - - def _restructure(self, resources, task_handle, others=True): - body = self._get_body() - pattern = self._make_pattern() - goal = self._make_goal(import_=others) - imports = None - if others: - imports = ['import %s' % self._module_name()] - - body_region = sourceutils.get_body_region(self.pyfunction) - args_value = {'skip': (self.resource, body_region)} - args = {'': args_value} - - restructuring = restructure.Restructure( - self.project, pattern, goal, args=args, imports=imports) - return restructuring.get_changes(resources=resources, - task_handle=task_handle) - - def _find_temps(self): - return find_temps(self.project, self._get_body()) - - def _module_name(self): - return self.project.pycore.modname(self.resource) - - def _make_pattern(self): - params = self.pyfunction.get_param_names() - body = self._get_body() - body = restructure.replace(body, 'return', 'pass') - wildcards = list(params) - wildcards.extend(self._find_temps()) - if self._does_return(): - if self._is_expression(): - replacement = '${%s}' % self._rope_returned - else: - replacement = '%s = ${%s}' % (self._rope_result, - self._rope_returned) - body = restructure.replace( - body, 'return ${%s}' % self._rope_returned, - replacement) - wildcards.append(self._rope_result) - return similarfinder.make_pattern(body, wildcards) - - def _get_body(self): - return sourceutils.get_body(self.pyfunction) - - def _make_goal(self, import_=False): - params = self.pyfunction.get_param_names() - function_name = self.pyfunction.get_name() - if import_: - function_name = self._module_name() + '.' + function_name - goal = '%s(%s)' % (function_name, - ', ' .join(('${%s}' % p) for p in params)) - if self._does_return() and not self._is_expression(): - goal = '${%s} = %s' % (self._rope_result, goal) - return goal - - def _does_return(self): - body = self._get_body() - removed_return = restructure.replace(body, 'return ${result}', '') - return removed_return != body - - def _is_expression(self): - return len(self.pyfunction.get_ast().body) == 1 - - _rope_result = '_rope__result' - _rope_returned = '_rope__returned' - - -def find_temps(project, code): - code = 'def f():\n' + sourceutils.indent_lines(code, 4) - pymodule = project.pycore.get_string_module(code) - result = [] - function_scope = pymodule.get_scope().get_scopes()[0] - for name, pyname in function_scope.get_names().items(): - if isinstance(pyname, pynames.AssignedName): - result.append(name) - return result - - -def _returns_last(node): - return node.body and isinstance(node.body[-1], ast.Return) - -def _yield_count(node): - visitor = _ReturnOrYieldFinder() - visitor.start_walking(node) - return visitor.yields - -def _return_count(node): - visitor = _ReturnOrYieldFinder() - visitor.start_walking(node) - return visitor.returns - -class _ReturnOrYieldFinder(object): - - def __init__(self): - self.returns = 0 - self.yields = 0 - - def _Return(self, node): - self.returns += 1 - - def _Yield(self, node): - self.yields += 1 - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - def start_walking(self, node): - nodes = [node] - if isinstance(node, ast.FunctionDef): - nodes = ast.get_child_nodes(node) - for child in nodes: - ast.walk(child, self) diff --git a/pymode/libs3/rope/refactor/wildcards.py b/pymode/libs3/rope/refactor/wildcards.py deleted file mode 100644 index 6c487a2a..00000000 --- a/pymode/libs3/rope/refactor/wildcards.py +++ /dev/null @@ -1,176 +0,0 @@ -from rope.base import ast, evaluate, builtins, pyobjects -from rope.refactor import patchedast, occurrences - - -class Wildcard(object): - - def get_name(self): - """Return the name of this wildcard""" - - def matches(self, suspect, arg): - """Return `True` if `suspect` matches this wildcard""" - - -class Suspect(object): - - def __init__(self, pymodule, node, name): - self.name = name - self.pymodule = pymodule - self.node = node - - -class DefaultWildcard(object): - """The default restructuring wildcard - - The argument passed to this wildcard is in the - ``key1=value1,key2=value2,...`` format. Possible keys are: - - * name - for checking the reference - * type - for checking the type - * object - for checking the object - * instance - for checking types but similar to builtin isinstance - * exact - matching only occurrences with the same name as the wildcard - * unsure - matching unsure occurrences - - """ - - def __init__(self, project): - self.project = project - - def get_name(self): - return 'default' - - def matches(self, suspect, arg=''): - args = parse_arg(arg) - - if not self._check_exact(args, suspect): - return False - if not self._check_object(args, suspect): - return False - return True - - def _check_object(self, args, suspect): - kind = None - expected = None - unsure = args.get('unsure', False) - for check in ['name', 'object', 'type', 'instance']: - if check in args: - kind = check - expected = args[check] - if expected is not None: - checker = _CheckObject(self.project, expected, - kind, unsure=unsure) - return checker(suspect.pymodule, suspect.node) - return True - - def _check_exact(self, args, suspect): - node = suspect.node - if args.get('exact'): - if not isinstance(node, ast.Name) or not node.id == suspect.name: - return False - else: - if not isinstance(node, ast.expr): - return False - return True - - -def parse_arg(arg): - if isinstance(arg, dict): - return arg - result = {} - tokens = arg.split(',') - for token in tokens: - if '=' in token: - parts = token.split('=', 1) - result[parts[0].strip()] = parts[1].strip() - else: - result[token.strip()] = True - return result - - -class _CheckObject(object): - - def __init__(self, project, expected, kind='object', unsure=False): - self.project = project - self.kind = kind - self.unsure = unsure - self.expected = self._evaluate(expected) - - def __call__(self, pymodule, node): - pyname = self._evaluate_node(pymodule, node) - if pyname is None or self.expected is None: - return self.unsure - if self._unsure_pyname(pyname, unbound=self.kind=='name'): - return True - if self.kind == 'name': - return self._same_pyname(self.expected, pyname) - else: - pyobject = pyname.get_object() - if self.kind == 'object': - objects = [pyobject] - if self.kind == 'type': - objects = [pyobject.get_type()] - if self.kind == 'instance': - objects = [pyobject] - objects.extend(self._get_super_classes(pyobject)) - objects.extend(self._get_super_classes(pyobject.get_type())) - for pyobject in objects: - if self._same_pyobject(self.expected.get_object(), pyobject): - return True - return False - - def _get_super_classes(self, pyobject): - result = [] - if isinstance(pyobject, pyobjects.AbstractClass): - for superclass in pyobject.get_superclasses(): - result.append(superclass) - result.extend(self._get_super_classes(superclass)) - return result - - def _same_pyobject(self, expected, pyobject): - return expected == pyobject - - def _same_pyname(self, expected, pyname): - return occurrences.same_pyname(expected, pyname) - - def _unsure_pyname(self, pyname, unbound=True): - return self.unsure and occurrences.unsure_pyname(pyname, unbound) - - def _split_name(self, name): - parts = name.split('.') - expression, kind = parts[0], parts[-1] - if len(parts) == 1: - kind = 'name' - return expression, kind - - def _evaluate_node(self, pymodule, node): - scope = pymodule.get_scope().get_inner_scope_for_line(node.lineno) - expression = node - if isinstance(expression, ast.Name) and \ - isinstance(expression.ctx, ast.Store): - start, end = patchedast.node_region(expression) - text = pymodule.source_code[start:end] - return evaluate.eval_str(scope, text) - else: - return evaluate.eval_node(scope, expression) - - def _evaluate(self, code): - attributes = code.split('.') - pyname = None - if attributes[0] in ('__builtin__', '__builtins__'): - class _BuiltinsStub(object): - def get_attribute(self, name): - return builtins.builtins[name] - def __getitem__(self, name): - return builtins.builtins[name] - def __contains__(self, name): - return name in builtins.builtins - pyobject = _BuiltinsStub() - else: - pyobject = self.project.pycore.get_module(attributes[0]) - for attribute in attributes[1:]: - pyname = pyobject[attribute] - if pyname is None: - return None - pyobject = pyname.get_object() - return pyname diff --git a/pymode/rope.py b/pymode/rope.py index df02b9c8..3a20c267 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -1,4 +1,5 @@ -""" Rope support in pymode. """ +"""Integration with Rope library.""" + from __future__ import absolute_import, print_function import os.path @@ -6,7 +7,7 @@ import site import sys -from rope.base import project, libutils, exceptions, change, worder # noqa +from rope.base import project, libutils, exceptions, change, worder, pycore from rope.base.fscommands import FileSystemCommands # noqa from rope.base.taskhandle import TaskHandle # noqa from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa @@ -16,7 +17,7 @@ def look_ropeproject(path): - """ Search for ropeproject in current and parent dirs. + """Search for ropeproject in current and parent dirs. :return str|None: A finded path @@ -73,7 +74,7 @@ def complete(dot=False): cline = env.current.line[:col] env.debug('dot completion', cline) - if FROM_RE.match(cline) or cline.endswith('..') or cline.endswith('\.'): + if FROM_RE.match(cline) or cline.endswith('..') or cline.endswith('\.'): # noqa return env.stop("") proposals = get_proporsals(source, offset, dot=dot) @@ -105,7 +106,7 @@ def get_proporsals(source, offset, base='', dot=False): :return str: """ - with RopeContext() as ctx: + with RopeContext() as ctx: # noqa try: proposals = codeassist.code_assist( @@ -346,7 +347,10 @@ class RopeContext(object): """ A context manager to have a rope project context. """ - def __init__(self, path, project_path): + projects = {} + resource = {} + + def __init__(self, path=None, project_path=None): """ Init Rope context. """ self.path = path @@ -913,3 +917,19 @@ def _insert_import(name, module, ctx): progress = ProgressHandler('Apply changes ...') ctx.project.do(changes, task_handle=progress.handle) reload_changes(changes) + + +# Monkey patch Rope +def find_source_folders(self, folder): + """Look only python files an packages.""" + for resource in folder.get_folders(): + if self._is_package(resource): # noqa + return [folder] + + for resource in folder.get_files(): + if resource.name.endswith('.py'): + return [folder] + + return [] + +pycore.PyCore._find_source_folders = find_source_folders # noqa diff --git a/pymode/utils.py b/pymode/utils.py index eb947b88..1ce2e343 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -6,7 +6,7 @@ from contextlib import contextmanager import vim # noqa -from ._compat import StringIO, PY2 +from ._compat import StringIO DEBUG = int(vim.eval('g:pymode_debug')) @@ -37,4 +37,3 @@ def patch_paths(): Load required modules from the plugin's sources. """ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs')) - sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs2' if PY2 else 'libs3')) From a8ae99802f138da6e72ff1de762af5a486adaa01 Mon Sep 17 00:00:00 2001 From: Bryce Guinta Date: Thu, 22 Dec 2016 05:41:19 -0700 Subject: [PATCH 007/246] Add a 'frequent problems' section into README.rst to avoid non-issue issues being opened. --- README.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/README.rst b/README.rst index a47d460e..867029d5 100644 --- a/README.rst +++ b/README.rst @@ -163,6 +163,22 @@ You can override the default key bindings by redefining them in your `.vimrc`, f let g:pymode_doc_bind = "" +Frequent Problems +================= + +Read this section before opening an issue on the tracker. + +Python 3 Syntax +--------------- + +By default python-mode uses python 2 syntax checking. To enable python 3 +syntax checking (e.g. for async) add:: + + let g:pymode_python = 'python3' + +To your vimrc or exrc file + + Documentation ============= From 6a901cd5840eb0ee758948bc15474a488d498a13 Mon Sep 17 00:00:00 2001 From: Semyon Maryasin Date: Mon, 7 Dec 2015 20:03:07 +0300 Subject: [PATCH 008/246] Folding for `async def` subroutines Fix folding of "async def" functions. The async keyword will fold for python 2, however that shouldn't be a problem. --- plugin/pymode.vim | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 26541ae2..82dd25cb 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -39,7 +39,7 @@ call pymode#default("g:pymode_folding", 1) " Maximum file length to check for nested class/def statements call pymode#default("g:pymode_folding_nest_limit", 1000) " Change for folding customization (by example enable fold for 'if', 'for') -call pymode#default("g:pymode_folding_regex", '^\s*\%(class\|def\) \w\+') +call pymode#default("g:pymode_folding_regex", '^\s*\%(class\|def\|async\s\+def\) \w\+') " Enable/disable python motion operators call pymode#default("g:pymode_motion", 1) From 285f1f6d0507d37d659eed733226d5db592194e6 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 16 Dec 2016 17:15:00 -0800 Subject: [PATCH 009/246] Update readme. --- README.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 5c897bc1..7045f93c 100644 --- a/README.rst +++ b/README.rst @@ -8,8 +8,7 @@ *The project needs maintainers and contributors* -Actually, I have no time for support the project, so if you feel yourself as -strong don't be hesitate to contact me. +Slack Channel: https://python-mode.herokuapp.com/ ----- From 05008700a1c34fa89b838e36a793f4a721cc4a41 Mon Sep 17 00:00:00 2001 From: Bryce Guinta Date: Fri, 16 Dec 2016 21:25:21 -0700 Subject: [PATCH 010/246] breakpoint: Use tabs if expandtab is false. #718 --- autoload/pymode/breakpoint.vim | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index cf7b95be..c3189aad 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -35,7 +35,13 @@ fun! pymode#breakpoint#operate(lnum) "{{{ normal dd else let plnum = prevnonblank(a:lnum) - call append(line('.')-1, repeat(' ', indent(plnum)).g:pymode_breakpoint_cmd) + if &expandtab + let indents = repeat(' ', indent(plnum)) + else + let indents = repeat("\t", plnum / &shiftwidth) + endif + + call append(line('.')-1, indents.g:pymode_breakpoint_cmd) normal k endif From 4f6e134f912c90fda36d956b0de61e2d05c04f63 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Sat, 17 Dec 2016 13:53:06 -0800 Subject: [PATCH 011/246] Update Rope refactoring library. --- README.rst | 9 +- pylama.ini | 2 +- pymode/{libs2 => libs}/rope/__init__.py | 3 +- pymode/{libs2 => libs}/rope/base/__init__.py | 0 pymode/{libs2 => libs}/rope/base/arguments.py | 0 pymode/{libs2 => libs}/rope/base/ast.py | 15 +- pymode/{libs3 => libs}/rope/base/astutils.py | 3 + pymode/{libs2 => libs}/rope/base/builtins.py | 45 +- pymode/{libs2 => libs}/rope/base/change.py | 2 +- .../{libs2 => libs}/rope/base/codeanalyze.py | 40 +- .../rope/base/default_config.py | 11 +- pymode/{libs2 => libs}/rope/base/evaluate.py | 7 +- .../{libs2 => libs}/rope/base/exceptions.py | 0 .../{libs2 => libs}/rope/base/fscommands.py | 34 +- pymode/{libs2 => libs}/rope/base/history.py | 0 pymode/{libs2 => libs}/rope/base/libutils.py | 0 .../{libs2 => libs}/rope/base/oi/__init__.py | 0 pymode/{libs2 => libs}/rope/base/oi/doa.py | 9 +- pymode/libs/rope/base/oi/docstrings.py | 226 +++++ .../{libs2 => libs}/rope/base/oi/memorydb.py | 21 + .../{libs2 => libs}/rope/base/oi/objectdb.py | 12 +- .../rope/base/oi/objectinfo.py | 0 pymode/{libs2 => libs}/rope/base/oi/runmod.py | 32 +- pymode/{libs2 => libs}/rope/base/oi/soa.py | 0 pymode/{libs2 => libs}/rope/base/oi/soi.py | 52 +- .../{libs2 => libs}/rope/base/oi/transform.py | 0 pymode/{libs2 => libs}/rope/base/prefs.py | 0 pymode/{libs2 => libs}/rope/base/project.py | 18 +- pymode/{libs2 => libs}/rope/base/pycore.py | 0 pymode/{libs2 => libs}/rope/base/pynames.py | 0 .../{libs2 => libs}/rope/base/pynamesdef.py | 0 pymode/{libs2 => libs}/rope/base/pyobjects.py | 2 +- .../{libs2 => libs}/rope/base/pyobjectsdef.py | 54 +- pymode/{libs2 => libs}/rope/base/pyscopes.py | 0 .../rope/base/resourceobserver.py | 0 pymode/{libs2 => libs}/rope/base/resources.py | 2 +- pymode/{libs2 => libs}/rope/base/simplify.py | 0 pymode/{libs2 => libs}/rope/base/stdmods.py | 25 +- .../{libs2 => libs}/rope/base/taskhandle.py | 0 .../rope/base/utils/__init__.py} | 0 pymode/libs/rope/base/utils/datastructures.py | 67 ++ pymode/libs/rope/base/utils/pycompat.py | 45 + pymode/{libs2 => libs}/rope/base/worder.py | 0 .../{libs2 => libs}/rope/contrib/__init__.py | 0 .../rope/contrib/autoimport.py | 0 .../rope/contrib/changestack.py | 0 .../rope/contrib/codeassist.py | 35 +- .../rope/contrib/finderrors.py | 2 +- pymode/{libs2 => libs}/rope/contrib/findit.py | 0 .../rope/contrib/fixmodnames.py | 0 .../{libs2 => libs}/rope/contrib/fixsyntax.py | 6 +- .../{libs2 => libs}/rope/contrib/generate.py | 0 .../{libs2 => libs}/rope/refactor/__init__.py | 0 .../rope/refactor/change_signature.py | 4 +- .../rope/refactor/encapsulate_field.py | 0 .../{libs2 => libs}/rope/refactor/extract.py | 44 +- .../rope/refactor/functionutils.py | 0 .../rope/refactor/importutils/__init__.py | 9 +- .../rope/refactor/importutils/actions.py | 0 .../rope/refactor/importutils/importinfo.py | 0 .../refactor/importutils/module_imports.py | 97 +-- .../{libs2 => libs}/rope/refactor/inline.py | 2 +- .../rope/refactor/introduce_factory.py | 0 .../rope/refactor/introduce_parameter.py | 0 .../rope/refactor/localtofield.py | 0 .../rope/refactor/method_object.py | 0 pymode/{libs2 => libs}/rope/refactor/move.py | 140 +++- .../rope/refactor/multiproject.py | 0 .../rope/refactor/occurrences.py | 20 +- .../rope/refactor/patchedast.py | 120 ++- .../{libs2 => libs}/rope/refactor/rename.py | 0 .../rope/refactor/restructure.py | 0 .../rope/refactor/similarfinder.py | 2 +- .../rope/refactor/sourceutils.py | 0 .../{libs2 => libs}/rope/refactor/suites.py | 21 +- .../rope/refactor/topackage.py | 0 .../rope/refactor/usefunction.py | 0 .../rope/refactor/wildcards.py | 0 pymode/libs2/rope/base/astutils.py | 61 -- pymode/libs3/rope/__init__.py | 18 - pymode/libs3/rope/base/__init__.py | 8 - pymode/libs3/rope/base/arguments.py | 109 --- pymode/libs3/rope/base/ast.py | 68 -- pymode/libs3/rope/base/builtins.py | 782 ----------------- pymode/libs3/rope/base/change.py | 448 ---------- pymode/libs3/rope/base/codeanalyze.py | 358 -------- pymode/libs3/rope/base/default_config.py | 86 -- pymode/libs3/rope/base/evaluate.py | 325 -------- pymode/libs3/rope/base/exceptions.py | 61 -- pymode/libs3/rope/base/fscommands.py | 267 ------ pymode/libs3/rope/base/history.py | 235 ------ pymode/libs3/rope/base/libutils.py | 65 -- pymode/libs3/rope/base/oi/__init__.py | 38 - pymode/libs3/rope/base/oi/doa.py | 162 ---- pymode/libs3/rope/base/oi/memorydb.py | 106 --- pymode/libs3/rope/base/oi/objectdb.py | 192 ----- pymode/libs3/rope/base/oi/objectinfo.py | 232 ----- pymode/libs3/rope/base/oi/runmod.py | 215 ----- pymode/libs3/rope/base/oi/soa.py | 136 --- pymode/libs3/rope/base/oi/soi.py | 186 ----- pymode/libs3/rope/base/oi/transform.py | 285 ------- pymode/libs3/rope/base/prefs.py | 41 - pymode/libs3/rope/base/project.py | 375 --------- pymode/libs3/rope/base/pycore.py | 410 --------- pymode/libs3/rope/base/pynames.py | 199 ----- pymode/libs3/rope/base/pynamesdef.py | 55 -- pymode/libs3/rope/base/pyobjects.py | 311 ------- pymode/libs3/rope/base/pyobjectsdef.py | 555 ------------ pymode/libs3/rope/base/pyscopes.py | 313 ------- pymode/libs3/rope/base/resourceobserver.py | 271 ------ pymode/libs3/rope/base/resources.py | 212 ----- pymode/libs3/rope/base/simplify.py | 55 -- pymode/libs3/rope/base/stdmods.py | 43 - pymode/libs3/rope/base/taskhandle.py | 133 --- pymode/libs3/rope/base/utils.py | 78 -- pymode/libs3/rope/base/worder.py | 524 ------------ pymode/libs3/rope/contrib/__init__.py | 7 - pymode/libs3/rope/contrib/autoimport.py | 217 ----- pymode/libs3/rope/contrib/changestack.py | 52 -- pymode/libs3/rope/contrib/codeassist.py | 648 -------------- pymode/libs3/rope/contrib/finderrors.py | 91 -- pymode/libs3/rope/contrib/findit.py | 110 --- pymode/libs3/rope/contrib/fixmodnames.py | 69 -- pymode/libs3/rope/contrib/fixsyntax.py | 178 ---- pymode/libs3/rope/contrib/generate.py | 355 -------- pymode/libs3/rope/refactor/__init__.py | 55 -- .../libs3/rope/refactor/change_signature.py | 340 -------- .../libs3/rope/refactor/encapsulate_field.py | 202 ----- pymode/libs3/rope/refactor/extract.py | 789 ------------------ pymode/libs3/rope/refactor/functionutils.py | 222 ----- .../rope/refactor/importutils/__init__.py | 299 ------- .../rope/refactor/importutils/actions.py | 359 -------- .../rope/refactor/importutils/importinfo.py | 201 ----- .../refactor/importutils/module_imports.py | 451 ---------- pymode/libs3/rope/refactor/inline.py | 615 -------------- .../libs3/rope/refactor/introduce_factory.py | 133 --- .../rope/refactor/introduce_parameter.py | 95 --- pymode/libs3/rope/refactor/localtofield.py | 50 -- pymode/libs3/rope/refactor/method_object.py | 87 -- pymode/libs3/rope/refactor/move.py | 628 -------------- pymode/libs3/rope/refactor/multiproject.py | 78 -- pymode/libs3/rope/refactor/occurrences.py | 334 -------- pymode/libs3/rope/refactor/patchedast.py | 734 ---------------- pymode/libs3/rope/refactor/rename.py | 216 ----- pymode/libs3/rope/refactor/restructure.py | 307 ------- pymode/libs3/rope/refactor/similarfinder.py | 362 -------- pymode/libs3/rope/refactor/sourceutils.py | 92 -- pymode/libs3/rope/refactor/suites.py | 142 ---- pymode/libs3/rope/refactor/topackage.py | 32 - pymode/libs3/rope/refactor/usefunction.py | 171 ---- pymode/libs3/rope/refactor/wildcards.py | 176 ---- pymode/rope.py | 32 +- pymode/utils.py | 3 +- 153 files changed, 1000 insertions(+), 17188 deletions(-) rename pymode/{libs2 => libs}/rope/__init__.py (92%) rename pymode/{libs2 => libs}/rope/base/__init__.py (100%) rename pymode/{libs2 => libs}/rope/base/arguments.py (100%) rename pymode/{libs2 => libs}/rope/base/ast.py (89%) rename pymode/{libs3 => libs}/rope/base/astutils.py (95%) rename pymode/{libs2 => libs}/rope/base/builtins.py (94%) rename pymode/{libs2 => libs}/rope/base/change.py (99%) rename pymode/{libs2 => libs}/rope/base/codeanalyze.py (91%) rename pymode/{libs2 => libs}/rope/base/default_config.py (91%) rename pymode/{libs2 => libs}/rope/base/evaluate.py (98%) rename pymode/{libs2 => libs}/rope/base/exceptions.py (100%) rename pymode/{libs2 => libs}/rope/base/fscommands.py (89%) rename pymode/{libs2 => libs}/rope/base/history.py (100%) rename pymode/{libs2 => libs}/rope/base/libutils.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/__init__.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/doa.py (96%) create mode 100644 pymode/libs/rope/base/oi/docstrings.py rename pymode/{libs2 => libs}/rope/base/oi/memorydb.py (87%) rename pymode/{libs2 => libs}/rope/base/oi/objectdb.py (93%) rename pymode/{libs2 => libs}/rope/base/oi/objectinfo.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/runmod.py (88%) rename pymode/{libs2 => libs}/rope/base/oi/soa.py (100%) rename pymode/{libs2 => libs}/rope/base/oi/soi.py (78%) rename pymode/{libs2 => libs}/rope/base/oi/transform.py (100%) rename pymode/{libs2 => libs}/rope/base/prefs.py (100%) rename pymode/{libs2 => libs}/rope/base/project.py (97%) rename pymode/{libs2 => libs}/rope/base/pycore.py (100%) rename pymode/{libs2 => libs}/rope/base/pynames.py (100%) rename pymode/{libs2 => libs}/rope/base/pynamesdef.py (100%) rename pymode/{libs2 => libs}/rope/base/pyobjects.py (99%) rename pymode/{libs2 => libs}/rope/base/pyobjectsdef.py (92%) rename pymode/{libs2 => libs}/rope/base/pyscopes.py (100%) rename pymode/{libs2 => libs}/rope/base/resourceobserver.py (100%) rename pymode/{libs2 => libs}/rope/base/resources.py (99%) rename pymode/{libs2 => libs}/rope/base/simplify.py (100%) rename pymode/{libs2 => libs}/rope/base/stdmods.py (60%) rename pymode/{libs2 => libs}/rope/base/taskhandle.py (100%) rename pymode/{libs2/rope/base/utils.py => libs/rope/base/utils/__init__.py} (100%) create mode 100644 pymode/libs/rope/base/utils/datastructures.py create mode 100644 pymode/libs/rope/base/utils/pycompat.py rename pymode/{libs2 => libs}/rope/base/worder.py (100%) rename pymode/{libs2 => libs}/rope/contrib/__init__.py (100%) rename pymode/{libs2 => libs}/rope/contrib/autoimport.py (100%) rename pymode/{libs2 => libs}/rope/contrib/changestack.py (100%) rename pymode/{libs2 => libs}/rope/contrib/codeassist.py (96%) rename pymode/{libs2 => libs}/rope/contrib/finderrors.py (98%) rename pymode/{libs2 => libs}/rope/contrib/findit.py (100%) rename pymode/{libs2 => libs}/rope/contrib/fixmodnames.py (100%) rename pymode/{libs2 => libs}/rope/contrib/fixsyntax.py (97%) rename pymode/{libs2 => libs}/rope/contrib/generate.py (100%) rename pymode/{libs2 => libs}/rope/refactor/__init__.py (100%) rename pymode/{libs2 => libs}/rope/refactor/change_signature.py (98%) rename pymode/{libs2 => libs}/rope/refactor/encapsulate_field.py (100%) rename pymode/{libs2 => libs}/rope/refactor/extract.py (95%) rename pymode/{libs2 => libs}/rope/refactor/functionutils.py (100%) rename pymode/{libs2 => libs}/rope/refactor/importutils/__init__.py (97%) rename pymode/{libs2 => libs}/rope/refactor/importutils/actions.py (100%) rename pymode/{libs2 => libs}/rope/refactor/importutils/importinfo.py (100%) rename pymode/{libs2 => libs}/rope/refactor/importutils/module_imports.py (89%) rename pymode/{libs2 => libs}/rope/refactor/inline.py (99%) rename pymode/{libs2 => libs}/rope/refactor/introduce_factory.py (100%) rename pymode/{libs2 => libs}/rope/refactor/introduce_parameter.py (100%) rename pymode/{libs2 => libs}/rope/refactor/localtofield.py (100%) rename pymode/{libs2 => libs}/rope/refactor/method_object.py (100%) rename pymode/{libs2 => libs}/rope/refactor/move.py (85%) rename pymode/{libs2 => libs}/rope/refactor/multiproject.py (100%) rename pymode/{libs2 => libs}/rope/refactor/occurrences.py (95%) rename pymode/{libs2 => libs}/rope/refactor/patchedast.py (87%) rename pymode/{libs2 => libs}/rope/refactor/rename.py (100%) rename pymode/{libs2 => libs}/rope/refactor/restructure.py (100%) rename pymode/{libs2 => libs}/rope/refactor/similarfinder.py (99%) rename pymode/{libs2 => libs}/rope/refactor/sourceutils.py (100%) rename pymode/{libs2 => libs}/rope/refactor/suites.py (86%) rename pymode/{libs2 => libs}/rope/refactor/topackage.py (100%) rename pymode/{libs2 => libs}/rope/refactor/usefunction.py (100%) rename pymode/{libs2 => libs}/rope/refactor/wildcards.py (100%) delete mode 100644 pymode/libs2/rope/base/astutils.py delete mode 100644 pymode/libs3/rope/__init__.py delete mode 100644 pymode/libs3/rope/base/__init__.py delete mode 100644 pymode/libs3/rope/base/arguments.py delete mode 100644 pymode/libs3/rope/base/ast.py delete mode 100644 pymode/libs3/rope/base/builtins.py delete mode 100644 pymode/libs3/rope/base/change.py delete mode 100644 pymode/libs3/rope/base/codeanalyze.py delete mode 100644 pymode/libs3/rope/base/default_config.py delete mode 100644 pymode/libs3/rope/base/evaluate.py delete mode 100644 pymode/libs3/rope/base/exceptions.py delete mode 100644 pymode/libs3/rope/base/fscommands.py delete mode 100644 pymode/libs3/rope/base/history.py delete mode 100644 pymode/libs3/rope/base/libutils.py delete mode 100644 pymode/libs3/rope/base/oi/__init__.py delete mode 100644 pymode/libs3/rope/base/oi/doa.py delete mode 100644 pymode/libs3/rope/base/oi/memorydb.py delete mode 100644 pymode/libs3/rope/base/oi/objectdb.py delete mode 100644 pymode/libs3/rope/base/oi/objectinfo.py delete mode 100644 pymode/libs3/rope/base/oi/runmod.py delete mode 100644 pymode/libs3/rope/base/oi/soa.py delete mode 100644 pymode/libs3/rope/base/oi/soi.py delete mode 100644 pymode/libs3/rope/base/oi/transform.py delete mode 100644 pymode/libs3/rope/base/prefs.py delete mode 100644 pymode/libs3/rope/base/project.py delete mode 100644 pymode/libs3/rope/base/pycore.py delete mode 100644 pymode/libs3/rope/base/pynames.py delete mode 100644 pymode/libs3/rope/base/pynamesdef.py delete mode 100644 pymode/libs3/rope/base/pyobjects.py delete mode 100644 pymode/libs3/rope/base/pyobjectsdef.py delete mode 100644 pymode/libs3/rope/base/pyscopes.py delete mode 100644 pymode/libs3/rope/base/resourceobserver.py delete mode 100644 pymode/libs3/rope/base/resources.py delete mode 100644 pymode/libs3/rope/base/simplify.py delete mode 100644 pymode/libs3/rope/base/stdmods.py delete mode 100644 pymode/libs3/rope/base/taskhandle.py delete mode 100644 pymode/libs3/rope/base/utils.py delete mode 100644 pymode/libs3/rope/base/worder.py delete mode 100644 pymode/libs3/rope/contrib/__init__.py delete mode 100644 pymode/libs3/rope/contrib/autoimport.py delete mode 100644 pymode/libs3/rope/contrib/changestack.py delete mode 100644 pymode/libs3/rope/contrib/codeassist.py delete mode 100644 pymode/libs3/rope/contrib/finderrors.py delete mode 100644 pymode/libs3/rope/contrib/findit.py delete mode 100644 pymode/libs3/rope/contrib/fixmodnames.py delete mode 100644 pymode/libs3/rope/contrib/fixsyntax.py delete mode 100644 pymode/libs3/rope/contrib/generate.py delete mode 100644 pymode/libs3/rope/refactor/__init__.py delete mode 100644 pymode/libs3/rope/refactor/change_signature.py delete mode 100644 pymode/libs3/rope/refactor/encapsulate_field.py delete mode 100644 pymode/libs3/rope/refactor/extract.py delete mode 100644 pymode/libs3/rope/refactor/functionutils.py delete mode 100644 pymode/libs3/rope/refactor/importutils/__init__.py delete mode 100644 pymode/libs3/rope/refactor/importutils/actions.py delete mode 100644 pymode/libs3/rope/refactor/importutils/importinfo.py delete mode 100644 pymode/libs3/rope/refactor/importutils/module_imports.py delete mode 100644 pymode/libs3/rope/refactor/inline.py delete mode 100644 pymode/libs3/rope/refactor/introduce_factory.py delete mode 100644 pymode/libs3/rope/refactor/introduce_parameter.py delete mode 100644 pymode/libs3/rope/refactor/localtofield.py delete mode 100644 pymode/libs3/rope/refactor/method_object.py delete mode 100644 pymode/libs3/rope/refactor/move.py delete mode 100644 pymode/libs3/rope/refactor/multiproject.py delete mode 100644 pymode/libs3/rope/refactor/occurrences.py delete mode 100644 pymode/libs3/rope/refactor/patchedast.py delete mode 100644 pymode/libs3/rope/refactor/rename.py delete mode 100644 pymode/libs3/rope/refactor/restructure.py delete mode 100644 pymode/libs3/rope/refactor/similarfinder.py delete mode 100644 pymode/libs3/rope/refactor/sourceutils.py delete mode 100644 pymode/libs3/rope/refactor/suites.py delete mode 100644 pymode/libs3/rope/refactor/topackage.py delete mode 100644 pymode/libs3/rope/refactor/usefunction.py delete mode 100644 pymode/libs3/rope/refactor/wildcards.py diff --git a/README.rst b/README.rst index 7045f93c..a47d460e 100644 --- a/README.rst +++ b/README.rst @@ -6,9 +6,9 @@ ----- -*The project needs maintainers and contributors* +*The project needs contributors* -Slack Channel: https://python-mode.herokuapp.com/ +** Python-mode Slack Channel is here: https://python-mode.herokuapp.com/ ** ----- @@ -180,7 +180,10 @@ at https://github.com/klen/python-mode/issues Contributing ============ -See the `AUTHORS` file. +* Kirill Klenov (horneds@gmail.com) +* Bryce Guinta (https://github.com/brycepg) + +Also see the `AUTHORS` file. Development of python-mode happens at github: https://github.com/klen/python-mode diff --git a/pylama.ini b/pylama.ini index b8d3f375..0394772f 100644 --- a/pylama.ini +++ b/pylama.ini @@ -5,4 +5,4 @@ linters=pep8,pyflakes,pylint skip=1 [pylama:pylint] -disable=E1120,E1130,E1103,W1401 +disable=E1120,E1130,E1103,W1401,F0001 diff --git a/pymode/libs2/rope/__init__.py b/pymode/libs/rope/__init__.py similarity index 92% rename from pymode/libs2/rope/__init__.py rename to pymode/libs/rope/__init__.py index c8e11f68..624b6279 100644 --- a/pymode/libs2/rope/__init__.py +++ b/pymode/libs/rope/__init__.py @@ -1,8 +1,9 @@ """rope, a python refactoring library""" INFO = __doc__ -VERSION = '0.10.2' +VERSION = '0.10.3' COPYRIGHT = """\ +Copyright (C) 2014-2015 Matej Cepl Copyright (C) 2006-2012 Ali Gholami Rudi Copyright (C) 2009-2012 Anton Gritsay diff --git a/pymode/libs2/rope/base/__init__.py b/pymode/libs/rope/base/__init__.py similarity index 100% rename from pymode/libs2/rope/base/__init__.py rename to pymode/libs/rope/base/__init__.py diff --git a/pymode/libs2/rope/base/arguments.py b/pymode/libs/rope/base/arguments.py similarity index 100% rename from pymode/libs2/rope/base/arguments.py rename to pymode/libs/rope/base/arguments.py diff --git a/pymode/libs2/rope/base/ast.py b/pymode/libs/rope/base/ast.py similarity index 89% rename from pymode/libs2/rope/base/ast.py rename to pymode/libs/rope/base/ast.py index f6a9d88d..d43c83c5 100644 --- a/pymode/libs2/rope/base/ast.py +++ b/pymode/libs/rope/base/ast.py @@ -3,18 +3,23 @@ from rope.base import fscommands +try: + unicode +except NameError: + unicode = str + def parse(source, filename=''): # NOTE: the raw string should be given to `compile` function if isinstance(source, unicode): source = fscommands.unicode_to_file_data(source) - if '\r' in source: - source = source.replace('\r\n', '\n').replace('\r', '\n') - if not source.endswith('\n'): - source += '\n' + if b'\r' in source: + source = source.replace(b'\r\n', b'\n').replace(b'\r', b'\n') + if not source.endswith(b'\n'): + source += b'\n' try: return compile(source, filename, 'exec', _ast.PyCF_ONLY_AST) - except (TypeError, ValueError), e: + except (TypeError, ValueError) as e: error = SyntaxError() error.lineno = 1 error.filename = filename diff --git a/pymode/libs3/rope/base/astutils.py b/pymode/libs/rope/base/astutils.py similarity index 95% rename from pymode/libs3/rope/base/astutils.py rename to pymode/libs/rope/base/astutils.py index 8ace1a92..6c0b3d78 100644 --- a/pymode/libs3/rope/base/astutils.py +++ b/pymode/libs/rope/base/astutils.py @@ -40,6 +40,9 @@ def _added(self, node, levels): def _Name(self, node): self._add_node(node) + def _ExceptHandler(self, node): + self.names.append((node.name, [])) + def _Tuple(self, node): new_levels = [] if self.levels is not None: diff --git a/pymode/libs2/rope/base/builtins.py b/pymode/libs/rope/base/builtins.py similarity index 94% rename from pymode/libs2/rope/base/builtins.py rename to pymode/libs/rope/base/builtins.py index 5bb84859..bc42f720 100644 --- a/pymode/libs2/rope/base/builtins.py +++ b/pymode/libs/rope/base/builtins.py @@ -1,8 +1,13 @@ """This module trys to support builtin types and functions.""" import inspect +try: + raw_input +except NameError: + raw_input = input import rope.base.evaluate -from rope.base import pynames, pyobjects, arguments, utils, ast +from rope.base.utils import pycompat +from rope.base import pynames, pyobjects, arguments, utils class BuiltinModule(pyobjects.AbstractModule): @@ -32,7 +37,7 @@ def attributes(self): result.update(self.initial) if self.pycore is not None: submodules = self.pycore._builtin_submodules(self.name) - for name, module in submodules.iteritems(): + for name, module in submodules.items(): result[name] = rope.base.builtins.BuiltinName(module) return result @@ -266,7 +271,10 @@ def __init__(self, holding=None): # Getting methods collector('__getitem__', function=self._list_get) collector('pop', function=self._list_get) - collector('__getslice__', function=self._self_get) + try: + collector('__getslice__', function=self._list_get) + except AttributeError: + pass super(List, self).__init__(list, collector.attributes) @@ -290,6 +298,10 @@ def _self_set(self, context): def _list_get(self, context): if self.holding is not None: + args = context.get_arguments(['self', 'key']) + if (len(args) > 1 and args[1] is not None and + args[1].get_type() == builtins['slice'].get_object()): + return get_list(self.holding) return self.holding return context.get_per_name() @@ -407,7 +419,7 @@ def __init__(self, *objects): if objects: first = objects[0] attributes = { - '__getitem__': BuiltinName(BuiltinFunction(first)), + '__getitem__': BuiltinName(BuiltinFunction(first)), # TODO: add slice support '__getslice__': BuiltinName(BuiltinFunction(pyobjects.PyObject(self))), '__new__': BuiltinName(BuiltinFunction(function=self._new_tuple)), @@ -487,14 +499,21 @@ def __init__(self): collector = _AttributeCollector(str) collector('__iter__', get_iterator(self_object), check_existence=False) - self_methods = ['__getitem__', '__getslice__', 'capitalize', 'center', - 'decode', 'encode', 'expandtabs', 'join', 'ljust', + self_methods = ['__getitem__', 'capitalize', 'center', + 'encode', 'expandtabs', 'join', 'ljust', 'lower', 'lstrip', 'replace', 'rjust', 'rstrip', 'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill'] for method in self_methods: collector(method, self_object) + py2_self_methods = ["__getslice__", "decode"] + for method in py2_self_methods: + try: + collector(method, self_object) + except AttributeError: + pass + for method in ['rsplit', 'split', 'splitlines']: collector(method, get_list(self_object)) @@ -568,7 +587,7 @@ def __init__(self): attributes = {} def add(name, returned=None, function=None): - builtin = getattr(file, name, None) + builtin = getattr(open, name, None) attributes[name] = BuiltinName( BuiltinFunction(returned=returned, function=function, builtin=builtin)) @@ -578,7 +597,7 @@ def add(name, returned=None, function=None): for method in ['close', 'flush', 'lineno', 'isatty', 'seek', 'tell', 'truncate', 'write', 'writelines']: add(method) - super(File, self).__init__(file, attributes) + super(File, self).__init__(open, attributes) get_file = _create_builtin_getter(File) @@ -642,12 +661,12 @@ def get_name(self): return 'lambda' def get_param_names(self, special_args=True): - result = [node.id for node in self.arguments.args - if isinstance(node, ast.Name)] + result = [pycompat.get_ast_arg_arg(node) for node in self.arguments.args + if isinstance(node, pycompat.ast_arg_type)] if self.arguments.vararg: - result.append('*' + self.arguments.vararg) + result.append('*' + pycompat.get_ast_arg_arg(self.arguments.vararg)) if self.arguments.kwarg: - result.append('**' + self.arguments.kwarg) + result.append('**' + pycompat.get_ast_arg_arg(self.arguments.kwarg)) return result @property @@ -787,4 +806,4 @@ def _input_function(args): builtin=raw_input)), } -builtins = BuiltinModule('__builtin__', initial=_initial_builtins) +builtins = BuiltinModule(pycompat.builtins.__name__, initial=_initial_builtins) diff --git a/pymode/libs2/rope/base/change.py b/pymode/libs/rope/base/change.py similarity index 99% rename from pymode/libs2/rope/base/change.py rename to pymode/libs/rope/base/change.py index e9764484..fe2ebf43 100644 --- a/pymode/libs2/rope/base/change.py +++ b/pymode/libs/rope/base/change.py @@ -369,7 +369,7 @@ def _create_resource(self, file_name, kind='file'): fscommands.create_file(resource_path) else: fscommands.create_folder(resource_path) - except IOError, e: + except IOError as e: raise exceptions.RopeError(e) diff --git a/pymode/libs2/rope/base/codeanalyze.py b/pymode/libs/rope/base/codeanalyze.py similarity index 91% rename from pymode/libs2/rope/base/codeanalyze.py rename to pymode/libs/rope/base/codeanalyze.py index 87061912..1704e9ad 100644 --- a/pymode/libs2/rope/base/codeanalyze.py +++ b/pymode/libs/rope/base/codeanalyze.py @@ -19,9 +19,7 @@ def get_changed(self): if not self.changes: return None - def compare_changes(change1, change2): - return cmp(change1[:2], change2[:2]) - self.changes.sort(compare_changes) + self.changes.sort(key=lambda x: x[:2]) pieces = [] last_changed = 0 for change in self.changes: @@ -131,31 +129,31 @@ def __call__(self): i += 1 return result - _main_chars = re.compile(r'[\'|"|#|\\|\[|\]|\{|\}|\(|\)]') + # Matches all backslashes before the token, to detect escaped quotes + _main_tokens = re.compile(r'(\\*)((\'\'\'|"""|\'|")|#|\[|\]|\{|\}|\(|\))') def _analyze_line(self, line): - char = None - for match in self._main_chars.finditer(line): - char = match.group() - i = match.start() - if char in '\'"': + token = None + for match in self._main_tokens.finditer(line): + prefix = match.group(1) + token = match.group(2) + # Skip any tokens which are escaped + if len(prefix) % 2 == 1: + continue + if token in ["'''", '"""', "'", '"']: if not self.in_string: - self.in_string = char - if char * 3 == line[i:i + 3]: - self.in_string = char * 3 - elif self.in_string == line[i:i + len(self.in_string)] and \ - not (i > 0 and line[i - 1] == '\\' and - not (i > 1 and line[i - 2] == '\\')): + self.in_string = token + elif self.in_string == token: self.in_string = '' if self.in_string: continue - if char == '#': + if token == '#': break - if char in '([{': + if token in '([{': self.open_count += 1 - elif char in ')]}': + elif token in ')]}': self.open_count -= 1 - if line and char != '#' and line.endswith('\\'): + if line and token != '#' and line.endswith('\\'): self.continuation = True else: self.continuation = False @@ -177,7 +175,7 @@ def logical_line_in(self, line_number): block_start = get_block_start(self.lines, line_number, indents) try: return self._block_logical_line(block_start, line_number) - except IndentationError, e: + except IndentationError as e: tries += 1 if tries == 5: raise e @@ -222,7 +220,7 @@ def _calculate_logical(self, readline, line_number): if line_number <= end: return (start, end) last_end = end + 1 - except tokenize.TokenError, e: + except tokenize.TokenError as e: current = e.args[1][0] return (last_end, max(last_end, current - 1)) return (last_end, None) diff --git a/pymode/libs2/rope/base/default_config.py b/pymode/libs/rope/base/default_config.py similarity index 91% rename from pymode/libs2/rope/base/default_config.py rename to pymode/libs/rope/base/default_config.py index 3745e306..45e1fb46 100644 --- a/pymode/libs2/rope/base/default_config.py +++ b/pymode/libs/rope/base/default_config.py @@ -1,4 +1,5 @@ # The default ``config.py`` +# flake8: noqa def set_prefs(prefs): @@ -14,8 +15,10 @@ def set_prefs(prefs): # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' prefs['ignored_resources'] = [ - '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', '.git', - '.tox', '.env', 'node_modules', 'bower_components'] + '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', + '.git', '.tox', '.env', 'env', 'venv', 'node_modules', + 'bower_components' + ] # Specifies which files should be considered python files. It is # useful when you have scripts inside your project. Only files @@ -80,6 +83,10 @@ def set_prefs(prefs): # appear in the importing namespace. prefs['ignore_bad_imports'] = False + # If `True`, rope will insert new module imports as + # `from import ` by default. + prefs['prefer_module_from_imports'] = False + # If `True`, rope will transform a comma list of imports into # multiple separate import statements when organizing # imports. diff --git a/pymode/libs2/rope/base/evaluate.py b/pymode/libs/rope/base/evaluate.py similarity index 98% rename from pymode/libs2/rope/base/evaluate.py rename to pymode/libs/rope/base/evaluate.py index faf09407..f4323923 100644 --- a/pymode/libs2/rope/base/evaluate.py +++ b/pymode/libs/rope/base/evaluate.py @@ -2,6 +2,7 @@ import rope.base.pynames import rope.base.pyobjects from rope.base import ast, astutils, exceptions, pyobjects, arguments, worder +from rope.base.utils import pycompat BadIdentifierError = exceptions.BadIdentifierError @@ -290,7 +291,11 @@ def _Subscript(self, node): self._call_function(node.value, '__getitem__', [node.slice.value]) elif isinstance(node.slice, ast.Slice): - self._call_function(node.value, '__getslice__') + self._call_function(node.value, '__getitem__', + [node.slice]) + + def _Slice(self, node): + self.result = self._get_builtin_name('slice') def _call_function(self, node, function_name, other_args=None): pyname = eval_node(self.scope, node) diff --git a/pymode/libs2/rope/base/exceptions.py b/pymode/libs/rope/base/exceptions.py similarity index 100% rename from pymode/libs2/rope/base/exceptions.py rename to pymode/libs/rope/base/exceptions.py diff --git a/pymode/libs2/rope/base/fscommands.py b/pymode/libs/rope/base/fscommands.py similarity index 89% rename from pymode/libs2/rope/base/fscommands.py rename to pymode/libs/rope/base/fscommands.py index daf118a0..3564ed91 100644 --- a/pymode/libs2/rope/base/fscommands.py +++ b/pymode/libs/rope/base/fscommands.py @@ -10,6 +10,12 @@ import shutil import subprocess +import rope.base.utils.pycompat as pycompat + +try: + unicode +except NameError: + unicode = str def create_fscommands(root): dirlist = os.listdir(root) @@ -240,29 +246,43 @@ def read_file_coding(path): def read_str_coding(source): + if type(source) == bytes: + newline = b'\n' + else: + newline = '\n' + #try: + # source = source.decode("utf-8") + #except AttributeError: + # pass try: - first = source.index('\n') + 1 - second = source.index('\n', first) + 1 + first = source.index(newline) + 1 + second = source.index(newline, first) + 1 except ValueError: second = len(source) return _find_coding(source[:second]) def _find_coding(text): - coding = 'coding' + if isinstance(text, pycompat.str): + text = text.encode('utf-8') + coding = b'coding' + to_chr = chr if pycompat.PY3 else lambda x: x try: start = text.index(coding) + len(coding) - if text[start] not in '=:': + if text[start] not in b'=:': return start += 1 - while start < len(text) and text[start].isspace(): + while start < len(text) and to_chr(text[start]).isspace(): start += 1 end = start while end < len(text): c = text[end] - if not c.isalnum() and c not in '-_': + if not to_chr(c).isalnum() and c not in b'-_': break end += 1 - return text[start:end] + result = text[start:end] + if isinstance(result, bytes): + result = result.decode('utf-8') + return result except ValueError: pass diff --git a/pymode/libs2/rope/base/history.py b/pymode/libs/rope/base/history.py similarity index 100% rename from pymode/libs2/rope/base/history.py rename to pymode/libs/rope/base/history.py diff --git a/pymode/libs2/rope/base/libutils.py b/pymode/libs/rope/base/libutils.py similarity index 100% rename from pymode/libs2/rope/base/libutils.py rename to pymode/libs/rope/base/libutils.py diff --git a/pymode/libs2/rope/base/oi/__init__.py b/pymode/libs/rope/base/oi/__init__.py similarity index 100% rename from pymode/libs2/rope/base/oi/__init__.py rename to pymode/libs/rope/base/oi/__init__.py diff --git a/pymode/libs2/rope/base/oi/doa.py b/pymode/libs/rope/base/oi/doa.py similarity index 96% rename from pymode/libs2/rope/base/oi/doa.py rename to pymode/libs/rope/base/oi/doa.py index 1b2a00fc..3f314c66 100644 --- a/pymode/libs2/rope/base/oi/doa.py +++ b/pymode/libs/rope/base/oi/doa.py @@ -1,4 +1,7 @@ -import cPickle as pickle +try: + import pickle +except ImportError: + import cPickle as pickle import marshal import os import socket @@ -62,7 +65,7 @@ def _init_data_receiving(self): self.receiving_thread.start() def _receive_information(self): - #temp = open('/dev/shm/info', 'w') + #temp = open('/dev/shm/info', 'wb') for data in self.receiver.receive_data(): self.analyze_data(data) #temp.write(str(data) + '\n') @@ -125,7 +128,7 @@ def get_send_info(self): def receive_data(self): conn, addr = self.server_socket.accept() self.server_socket.close() - my_file = conn.makefile('r') + my_file = conn.makefile('rb') while True: try: yield pickle.load(my_file) diff --git a/pymode/libs/rope/base/oi/docstrings.py b/pymode/libs/rope/base/oi/docstrings.py new file mode 100644 index 00000000..4519e126 --- /dev/null +++ b/pymode/libs/rope/base/oi/docstrings.py @@ -0,0 +1,226 @@ +""" +Hinting the type using docstring of class/function. + +It's an irreplaceable thing if you are using Dependency Injection with passive class: +http://www.martinfowler.com/articles/injection.html + +Some code extracted (or based on code) from: +https://github.com/davidhalter/jedi/blob/b489019f5bd5750051122b94cc767df47751ecb7/jedi/evaluate/docstrings.py +Thanks to @davidhalter for this utils under MIT License. + +Similar solutions: + + - https://www.jetbrains.com/pycharm/help/type-hinting-in-pycharm.html + - https://www.python.org/dev/peps/pep-0484/#type-comments + - http://www.pydev.org/manual_adv_type_hints.html + - https://jedi.readthedocs.org/en/latest/docs/features.html#type-hinting + +Discussions: + + - https://groups.google.com/d/topic/rope-dev/JlAzmZ83K1M/discussion + - https://groups.google.com/d/topic/rope-dev/LCFNN98vckI/discussion + +""" +import re +from ast import literal_eval + +from rope.base.exceptions import AttributeNotFoundError +from rope.base.evaluate import ScopeNameFinder +from rope.base.pyobjects import PyClass, PyFunction + +PEP0484_PATTERNS = [ + re.compile(r'type:\s*([^\n, ]+)'), +] + +DOCSTRING_PARAM_PATTERNS = [ + r'\s*:type\s+%s:\s*([^\n, ]+)', # Sphinx + r'\s*:param\s+(\w+)\s+%s:[^\n]+', # Sphinx param with type + r'\s*@type\s+%s:\s*([^\n, ]+)', # Epydoc +] + +DOCSTRING_RETURN_PATTERNS = [ + re.compile(r'\s*:rtype:\s*([^\n, ]+)', re.M), # Sphinx + re.compile(r'\s*@rtype:\s*([^\n, ]+)', re.M), # Epydoc +] + +REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`') + +try: + from numpydoc.docscrape import NumpyDocString +except ImportError: + def _search_param_in_numpydocstr(docstr, param_str): + return [] +else: + def _search_param_in_numpydocstr(docstr, param_str): + """Search `docstr` (in numpydoc format) for type(-s) of `param_str`.""" + params = NumpyDocString(docstr)._parsed_data['Parameters'] + for p_name, p_type, p_descr in params: + if p_name == param_str: + m = re.match('([^,]+(,[^,]+)*?)(,[ ]*optional)?$', p_type) + if m: + p_type = m.group(1) + + if p_type.startswith('{'): + types = set(type(x).__name__ for x in literal_eval(p_type)) + return list(types) + else: + return [p_type] + return [] + + +def hint_pep0484(pyname): + from rope.base.oi.soi import _get_lineno_for_node + lineno = _get_lineno_for_node(pyname.assignments[0].ast_node) + holding_scope = pyname.module.get_scope().get_inner_scope_for_line(lineno) + line = holding_scope._get_global_scope()._scope_finder.lines.get_line(lineno) + if '#' in line: + type_strs = _search_type_in_pep0484(line.split('#', 1)[1]) + if type_strs: + return _resolve_type(type_strs[0], holding_scope.pyobject) + + +def _search_type_in_pep0484(code): + """ For more info see: + https://www.python.org/dev/peps/pep-0484/#type-comments + + >>> _search_type_in_pep0484('type: int') + ['int'] + """ + for p in PEP0484_PATTERNS: + match = p.search(code) + if match: + return [match.group(1)] + + +def hint_param(pyfunc, param_name): + type_strs = None + func = pyfunc + while not type_strs and func: + if func.get_doc(): + type_strs = _search_param_in_docstr(func.get_doc(), param_name) + func = _get_superfunc(func) + + if type_strs: + return _resolve_type(type_strs[0], pyfunc) + + +def _get_superfunc(pyfunc): + + if not isinstance(pyfunc.parent, PyClass): + return + + for cls in _get_mro(pyfunc.parent)[1:]: + try: + superfunc = cls.get_attribute(pyfunc.get_name()).get_object() + except AttributeNotFoundError: + pass + else: + if isinstance(superfunc, PyFunction): + return superfunc + + +def _get_mro(pyclass): + # FIXME: to use real mro() result + l = [pyclass] + for cls in l: + for super_cls in cls.get_superclasses(): + if isinstance(super_cls, PyClass) and super_cls not in l: + l.append(super_cls) + return l + + +def _resolve_type(type_name, pyobj): + type_ = None + if '.' not in type_name: + try: + type_ = pyobj.get_module().get_scope().get_name(type_name).get_object() + except Exception: + pass + else: + mod_name, attr_name = type_name.rsplit('.', 1) + try: + mod_finder = ScopeNameFinder(pyobj.get_module()) + mod = mod_finder._find_module(mod_name).get_object() + type_ = mod.get_attribute(attr_name).get_object() + except Exception: + pass + return type_ + + +def _search_param_in_docstr(docstr, param_str): + """ + Search `docstr` for type(-s) of `param_str`. + + >>> _search_param_in_docstr(':type param: int', 'param') + ['int'] + >>> _search_param_in_docstr('@type param: int', 'param') + ['int'] + >>> _search_param_in_docstr( + ... ':type param: :class:`threading.Thread`', 'param') + ['threading.Thread'] + >>> bool(_search_param_in_docstr('no document', 'param')) + False + >>> _search_param_in_docstr(':param int param: some description', 'param') + ['int'] + + """ + patterns = [re.compile(p % re.escape(param_str)) + for p in DOCSTRING_PARAM_PATTERNS] + for pattern in patterns: + match = pattern.search(docstr) + if match: + return [_strip_rst_role(match.group(1))] + + return (_search_param_in_numpydocstr(docstr, param_str) or + []) + + +def _strip_rst_role(type_str): + """ + Strip off the part looks like a ReST role in `type_str`. + + >>> _strip_rst_role(':class:`ClassName`') # strip off :class: + 'ClassName' + >>> _strip_rst_role(':py:obj:`module.Object`') # works with domain + 'module.Object' + >>> _strip_rst_role('ClassName') # do nothing when not ReST role + 'ClassName' + + See also: + http://sphinx-doc.org/domains.html#cross-referencing-python-objects + + """ + match = REST_ROLE_PATTERN.match(type_str) + if match: + return match.group(1) + else: + return type_str + + +def hint_return(pyfunc): + type_str = None + func = pyfunc + while not type_str and func: + if func.get_doc(): + type_str = _search_return_in_docstr(func.get_doc()) + func = _get_superfunc(func) + if type_str: + return _resolve_type(type_str, pyfunc) + + +def _search_return_in_docstr(code): + for p in DOCSTRING_RETURN_PATTERNS: + match = p.search(code) + if match: + return _strip_rst_role(match.group(1)) + + +def hint_attr(pyclass, attr_name): + type_strs = None + for cls in _get_mro(pyclass): + if cls.get_doc(): + type_strs = _search_param_in_docstr(cls.get_doc(), attr_name) + if type_strs: + break + if type_strs: + return _resolve_type(type_strs[0], pyclass) diff --git a/pymode/libs2/rope/base/oi/memorydb.py b/pymode/libs/rope/base/oi/memorydb.py similarity index 87% rename from pymode/libs2/rope/base/oi/memorydb.py rename to pymode/libs/rope/base/oi/memorydb.py index f49075ca..01c814ce 100644 --- a/pymode/libs2/rope/base/oi/memorydb.py +++ b/pymode/libs/rope/base/oi/memorydb.py @@ -21,6 +21,16 @@ def _load_files(self): def keys(self): return self._files.keys() + def __iter__(self): + for f in self._files: + yield f + + def __len__(self): + return len(self._files) + + def __setitem__(self): + raise NotImplementedError() + def __contains__(self, key): return key in self._files @@ -76,6 +86,17 @@ def __getitem__(self, key): def __delitem__(self, key): del self.scopes[key] + def __iter__(self): + for s in self.scopes: + yield s + + def __len__(self): + return len(self.scopes) + + def __setitem__(self): + raise NotImplementedError() + + class ScopeInfo(objectdb.ScopeInfo): diff --git a/pymode/libs2/rope/base/oi/objectdb.py b/pymode/libs/rope/base/oi/objectdb.py similarity index 93% rename from pymode/libs2/rope/base/oi/objectdb.py rename to pymode/libs/rope/base/oi/objectdb.py index 6f988add..61f2711d 100644 --- a/pymode/libs2/rope/base/oi/objectdb.py +++ b/pymode/libs/rope/base/oi/objectdb.py @@ -1,4 +1,8 @@ -import UserDict +from __future__ import print_function +try: + from collections import MutableMapping +except ImportError: + from UserDict import DictMixin as MutableMapping class ObjectDB(object): @@ -78,7 +82,7 @@ def _get_scope_info(self, path, key, readonly=True): self.files[path].create_scope(key) result = self.files[path][key] if isinstance(result, dict): - print self.files, self.files[path], self.files[path][key] + print(self.files, self.files[path], self.files[path][key]) return result def _file_removed(self, path): @@ -120,13 +124,13 @@ def add_call(self, parameters, returned): raise NotImplementedError() -class FileInfo(UserDict.DictMixin): +class FileInfo(MutableMapping): def create_scope(self, key): pass -class FileDict(UserDict.DictMixin): +class FileDict(MutableMapping): def create(self, key): pass diff --git a/pymode/libs2/rope/base/oi/objectinfo.py b/pymode/libs/rope/base/oi/objectinfo.py similarity index 100% rename from pymode/libs2/rope/base/oi/objectinfo.py rename to pymode/libs/rope/base/oi/objectinfo.py diff --git a/pymode/libs2/rope/base/oi/runmod.py b/pymode/libs/rope/base/oi/runmod.py similarity index 88% rename from pymode/libs2/rope/base/oi/runmod.py rename to pymode/libs/rope/base/oi/runmod.py index e332d7e6..ba0184c1 100644 --- a/pymode/libs2/rope/base/oi/runmod.py +++ b/pymode/libs/rope/base/oi/runmod.py @@ -1,13 +1,16 @@ - def __rope_start_everything(): import os import sys import socket - import cPickle as pickle + try: + import pickle + except ImportError: + import cPickle as pickle import marshal import inspect import types import threading + import rope.base.utils.pycompat as pycompat class _MessageSender(object): @@ -19,7 +22,7 @@ class _SocketSender(_MessageSender): def __init__(self, port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(('127.0.0.1', port)) - self.my_file = s.makefile('w') + self.my_file = s.makefile('wb') def send_data(self, data): if not self.my_file.closed: @@ -76,8 +79,9 @@ def on_function_call(self, frame, event, arg): code = frame.f_code for argname in code.co_varnames[:code.co_argcount]: try: - args.append(self._object_to_persisted_form( - frame.f_locals[argname])) + argvalue = self._object_to_persisted_form( + frame.f_locals[argname]) + args.append(argvalue) except (TypeError, AttributeError): args.append(('unknown',)) try: @@ -97,7 +101,6 @@ def _is_an_interesting_call(self, frame): # return False #return not frame.f_back or # not self._is_code_inside_project(frame.f_back.f_code) - if not self._is_code_inside_project(frame.f_code) and \ (not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code)): @@ -125,7 +128,7 @@ def _get_persisted_class(self, object_): return ('unknown',) def _get_persisted_builtin(self, object_): - if isinstance(object_, (str, unicode)): + if isinstance(object_, pycompat.string_types): return ('builtin', 'str') if isinstance(object_, list): holding = None @@ -137,7 +140,9 @@ def _get_persisted_builtin(self, object_): keys = None values = None if len(object_) > 0: - keys = object_.keys()[0] + # @todo - fix it properly, why is __locals__ being + # duplicated ? + keys = [key for key in object_.keys() if key != '__locals__'][0] values = object_[keys] return ('builtin', 'dict', self._object_to_persisted_form(keys), @@ -166,14 +171,14 @@ def _object_to_persisted_form(self, object_): if isinstance(object_, types.CodeType): return self._get_persisted_code(object_) if isinstance(object_, types.FunctionType): - return self._get_persisted_code(object_.func_code) + return self._get_persisted_code(object_.__code__) if isinstance(object_, types.MethodType): - return self._get_persisted_code(object_.im_func.func_code) + return self._get_persisted_code(object_.__func__.__code__) if isinstance(object_, types.ModuleType): return self._get_persisted_module(object_) - if isinstance(object_, (str, unicode, list, dict, tuple, set)): + if isinstance(object_, pycompat.string_types + (list, dict, tuple, set)): return self._get_persisted_builtin(object_) - if isinstance(object_, (types.TypeType, types.ClassType)): + if isinstance(object_, type): return self._get_persisted_class(object_) return ('instance', self._get_persisted_class(type(object_))) @@ -204,10 +209,11 @@ def _realpath(path): run_globals.update({'__name__': '__main__', '__builtins__': __builtins__, '__file__': file_to_run}) + if send_info != '-': data_sender = _FunctionCallDataSender(send_info, project_root) del sys.argv[1:4] - execfile(file_to_run, run_globals) + pycompat.execfile(file_to_run, run_globals) if send_info != '-': data_sender.close() diff --git a/pymode/libs2/rope/base/oi/soa.py b/pymode/libs/rope/base/oi/soa.py similarity index 100% rename from pymode/libs2/rope/base/oi/soa.py rename to pymode/libs/rope/base/oi/soa.py diff --git a/pymode/libs2/rope/base/oi/soi.py b/pymode/libs/rope/base/oi/soi.py similarity index 78% rename from pymode/libs2/rope/base/oi/soi.py rename to pymode/libs/rope/base/oi/soi.py index 5a11b5ef..f39e6a8a 100644 --- a/pymode/libs2/rope/base/oi/soi.py +++ b/pymode/libs/rope/base/oi/soi.py @@ -8,6 +8,7 @@ import rope.base.pynames import rope.base.pyobjects from rope.base import evaluate, utils, arguments +from rope.base.oi.docstrings import hint_return, hint_param, hint_attr, hint_pep0484 _ignore_inferred = utils.ignore_exception( @@ -28,7 +29,12 @@ def infer_returned_object(pyfunction, args): pyfunction.get_param_names(special_args=False)) object_info.function_called(pyfunction, params, result) return result - return object_info.get_returned(pyfunction, args) + result = object_info.get_returned(pyfunction, args) + if result is not None: + return result + type_ = hint_return(pyfunction) + if type_ is not None: + return rope.base.pyobjects.PyObject(type_) @_ignore_inferred @@ -62,9 +68,42 @@ def infer_assigned_object(pyname): return for assignment in reversed(pyname.assignments): result = _infer_assignment(assignment, pyname.module) - if result is not None: + if isinstance(result, rope.base.builtins.BuiltinUnknown) and result.get_name() == 'NotImplementedType': + break + elif result == rope.base.pyobjects.get_unknown(): + break + elif result is not None: return result + hinting_result = hint_pep0484(pyname) + if hinting_result is not None: + return hinting_result + + hinting_result = _infer_assigned_object_by_hint(pyname) + if hinting_result is not None: + return hinting_result + + return result + + +def _infer_assigned_object_by_hint(pyname): + lineno = _get_lineno_for_node(pyname.assignments[0].ast_node) + holding_scope = pyname.module.get_scope().get_inner_scope_for_line(lineno) + pyobject = holding_scope.pyobject + if isinstance(pyobject, rope.base.pyobjects.PyClass): + pyclass = pyobject + elif (isinstance(pyobject, rope.base.pyobjectsdef.PyFunction) and + isinstance(pyobject.parent, rope.base.pyobjects.PyClass)): + pyclass = pyobject.parent + else: + return + for name, attr in pyclass.get_attributes().items(): + if attr is pyname: + type_ = hint_attr(pyclass, name) + if type_ is not None: + return rope.base.pyobjects.PyObject(type_) + break + def get_passed_objects(pyfunction, parameter_index): object_info = pyfunction.pycore.object_info @@ -106,8 +145,15 @@ def _infer_returned(pyobject, args): def _parameter_objects(pyobject): + result = [] params = pyobject.get_param_names(special_args=False) - return [rope.base.pyobjects.get_unknown()] * len(params) + for name in params: + type_ = hint_param(pyobject, name) + if type_ is not None: + result.append(rope.base.pyobjects.PyObject(type_)) + else: + result.append(rope.base.pyobjects.get_unknown()) + return result # handling `rope.base.pynames.AssignmentValue` diff --git a/pymode/libs2/rope/base/oi/transform.py b/pymode/libs/rope/base/oi/transform.py similarity index 100% rename from pymode/libs2/rope/base/oi/transform.py rename to pymode/libs/rope/base/oi/transform.py diff --git a/pymode/libs2/rope/base/prefs.py b/pymode/libs/rope/base/prefs.py similarity index 100% rename from pymode/libs2/rope/base/prefs.py rename to pymode/libs/rope/base/prefs.py diff --git a/pymode/libs2/rope/base/project.py b/pymode/libs/rope/base/project.py similarity index 97% rename from pymode/libs2/rope/base/project.py rename to pymode/libs/rope/base/project.py index 23597f8c..2feef36c 100644 --- a/pymode/libs2/rope/base/project.py +++ b/pymode/libs/rope/base/project.py @@ -1,14 +1,19 @@ -import cPickle as pickle import os import shutil import sys import warnings import rope.base.fscommands -from rope.base import exceptions, taskhandle, prefs, history, pycore, utils import rope.base.resourceobserver as resourceobserver -from rope.base.resources import File, Folder, _ResourceMatcher +import rope.base.utils.pycompat as pycompat +from rope.base import exceptions, taskhandle, prefs, history, pycore, utils from rope.base.exceptions import ModuleNotFoundError +from rope.base.resources import File, Folder, _ResourceMatcher + +try: + import pickle +except ImportError: + import cPickle as pickle class _Project(object): @@ -257,7 +262,7 @@ def _init_prefs(self, prefs): '__file__': config.real_path}) if config.exists(): config = self.ropefolder.get_child('config.py') - execfile(config.real_path, run_globals) + pycompat.execfile(config.real_path, run_globals) else: exec(self._default_config(), run_globals) if 'set_prefs' in run_globals: @@ -453,6 +458,11 @@ def _realpath(path): Is equivalent to ``realpath(abspath(expanduser(path)))``. + Of the particular notice is the hack dealing with the unfortunate + sitaution of running native-Windows python (os.name == 'nt') inside + of Cygwin (abspath starts with '/'), which apparently normal + os.path.realpath completely messes up. + """ # there is a bug in cygwin for os.path.abspath() for abs paths if sys.platform == 'cygwin': diff --git a/pymode/libs2/rope/base/pycore.py b/pymode/libs/rope/base/pycore.py similarity index 100% rename from pymode/libs2/rope/base/pycore.py rename to pymode/libs/rope/base/pycore.py diff --git a/pymode/libs2/rope/base/pynames.py b/pymode/libs/rope/base/pynames.py similarity index 100% rename from pymode/libs2/rope/base/pynames.py rename to pymode/libs/rope/base/pynames.py diff --git a/pymode/libs2/rope/base/pynamesdef.py b/pymode/libs/rope/base/pynamesdef.py similarity index 100% rename from pymode/libs2/rope/base/pynamesdef.py rename to pymode/libs/rope/base/pynamesdef.py diff --git a/pymode/libs2/rope/base/pyobjects.py b/pymode/libs/rope/base/pyobjects.py similarity index 99% rename from pymode/libs2/rope/base/pyobjects.py rename to pymode/libs/rope/base/pyobjects.py index 76be3040..fd4d1c82 100644 --- a/pymode/libs2/rope/base/pyobjects.py +++ b/pymode/libs/rope/base/pyobjects.py @@ -106,7 +106,7 @@ def get_unknown(): write:: if pyname.get_object() == get_unknown(): - print 'cannot determine what this pyname holds' + print('cannot determine what this pyname holds') Rope could have used `None` for indicating unknown objects but we had to check that in many places. So actually this method diff --git a/pymode/libs2/rope/base/pyobjectsdef.py b/pymode/libs/rope/base/pyobjectsdef.py similarity index 92% rename from pymode/libs2/rope/base/pyobjectsdef.py rename to pymode/libs/rope/base/pyobjectsdef.py index a738b4de..b7aef627 100644 --- a/pymode/libs2/rope/base/pyobjectsdef.py +++ b/pymode/libs/rope/base/pyobjectsdef.py @@ -1,11 +1,17 @@ +import rope.base.builtins import rope.base.codeanalyze import rope.base.evaluate -import rope.base.builtins +import rope.base.libutils import rope.base.oi.soi import rope.base.pyscopes -import rope.base.libutils from rope.base import (pynamesdef as pynames, exceptions, ast, astutils, pyobjects, fscommands, arguments, utils) +from rope.base.utils import pycompat + +try: + unicode +except NameError: + unicode = str class PyFunction(pyobjects.PyFunction): @@ -71,13 +77,13 @@ def get_name(self): def get_param_names(self, special_args=True): # TODO: handle tuple parameters - result = [node.id for node in self.arguments.args - if isinstance(node, ast.Name)] + result = [pycompat.get_ast_arg_arg(node) for node in self.arguments.args + if isinstance(node, pycompat.ast_arg_type)] if special_args: if self.arguments.vararg: - result.append(self.arguments.vararg) + result.append(pycompat.get_ast_arg_arg(self.arguments.vararg)) if self.arguments.kwarg: - result.append(self.arguments.kwarg) + result.append(pycompat.get_ast_arg_arg(self.arguments.kwarg)) return result def get_kind(self): @@ -181,9 +187,9 @@ def _init_source(self, pycore, source_code, resource): else: source_bytes = source_code ast_node = ast.parse(source_bytes, filename=filename) - except SyntaxError, e: + except SyntaxError as e: raise exceptions.ModuleSyntaxError(filename, e.lineno, e.msg) - except UnicodeDecodeError, e: + except UnicodeDecodeError as e: raise exceptions.ModuleSyntaxError(filename, 1, '%s' % (e.reason)) return source_code, ast_node @@ -226,7 +232,7 @@ def _create_structural_attributes(self): result = {} modname = rope.base.libutils.modname(self.resource) extension_submodules = self.pycore._builtin_submodules(modname) - for name, module in extension_submodules.iteritems(): + for name, module in extension_submodules.items(): result[name] = rope.base.builtins.BuiltinName(module) if self.resource is None: return result @@ -370,26 +376,34 @@ def _assigned(self, name, assignment): def _update_evaluated(self, targets, assigned, evaluation='', eval_type=False): result = {} - names = astutils.get_name_levels(targets) - for name, levels in names: - assignment = pynames.AssignmentValue(assigned, levels, + if isinstance(targets, str): + assignment = pynames.AssignmentValue(assigned, [], evaluation, eval_type) - self._assigned(name, assignment) + self._assigned(targets, assignment) + else: + names = astutils.get_name_levels(targets) + for name, levels in names: + assignment = pynames.AssignmentValue(assigned, levels, + evaluation, eval_type) + self._assigned(name, assignment) return result def _With(self, node): - if node.optional_vars: - self._update_evaluated(node.optional_vars, - node.context_expr, '.__enter__()') + for item in pycompat.get_ast_with_items(node): + if item.optional_vars: + self._update_evaluated(item.optional_vars, + item.context_expr, '.__enter__()') for child in node.body: ast.walk(child, self) def _excepthandler(self, node): - if node.name is not None and isinstance(node.name, ast.Name): + node_name_type = str if pycompat.PY3 else ast.Name + if node.name is not None and isinstance(node.name, node_name_type): type_node = node.type if isinstance(node.type, ast.Tuple) and type_node.elts: type_node = type_node.elts[0] self._update_evaluated(node.name, type_node, eval_type=True) + for child in node.body: ast.walk(child, self) @@ -465,8 +479,10 @@ def _FunctionDef(self, node): _ScopeVisitor._FunctionDef(self, node) if len(node.args.args) > 0: first = node.args.args[0] - if isinstance(first, ast.Name): - new_visitor = _ClassInitVisitor(self, first.id) + new_visitor = None + if isinstance(first, pycompat.ast_arg_type): + new_visitor = _ClassInitVisitor(self, pycompat.get_ast_arg_arg(first)) + if new_visitor is not None: for child in ast.get_child_nodes(node): ast.walk(child, new_visitor) diff --git a/pymode/libs2/rope/base/pyscopes.py b/pymode/libs/rope/base/pyscopes.py similarity index 100% rename from pymode/libs2/rope/base/pyscopes.py rename to pymode/libs/rope/base/pyscopes.py diff --git a/pymode/libs2/rope/base/resourceobserver.py b/pymode/libs/rope/base/resourceobserver.py similarity index 100% rename from pymode/libs2/rope/base/resourceobserver.py rename to pymode/libs/rope/base/resourceobserver.py diff --git a/pymode/libs2/rope/base/resources.py b/pymode/libs/rope/base/resources.py similarity index 99% rename from pymode/libs2/rope/base/resources.py rename to pymode/libs/rope/base/resources.py index aac755f0..af8ac0a2 100644 --- a/pymode/libs2/rope/base/resources.py +++ b/pymode/libs/rope/base/resources.py @@ -109,7 +109,7 @@ def read(self): data = self.read_bytes() try: return fscommands.file_data_to_unicode(data) - except UnicodeDecodeError, e: + except UnicodeDecodeError as e: raise exceptions.ModuleDecodeError(self.path, e.reason) def read_bytes(self): diff --git a/pymode/libs2/rope/base/simplify.py b/pymode/libs/rope/base/simplify.py similarity index 100% rename from pymode/libs2/rope/base/simplify.py rename to pymode/libs/rope/base/simplify.py diff --git a/pymode/libs2/rope/base/stdmods.py b/pymode/libs/rope/base/stdmods.py similarity index 60% rename from pymode/libs2/rope/base/stdmods.py rename to pymode/libs/rope/base/stdmods.py index 457a4fac..5e868c2a 100644 --- a/pymode/libs2/rope/base/stdmods.py +++ b/pymode/libs/rope/base/stdmods.py @@ -1,13 +1,19 @@ import os +import re import sys from rope.base import utils +from rope.base.utils import pycompat def _stdlib_path(): - import distutils.sysconfig - return distutils.sysconfig.get_python_lib(standard_lib=True, - plat_specific=True) + if pycompat.PY2: + from distutils import sysconfig + return sysconfig.get_python_lib(standard_lib=True, + plat_specific=True) + elif pycompat.PY3: + import inspect + return os.path.dirname(inspect.getsourcefile(inspect)) @utils.cached(1) @@ -31,6 +37,15 @@ def python_modules(): return result +def normalize_so_name(name): + """ + Handle different types of python installations + """ + if "cpython" in name: + return os.path.splitext(os.path.splitext(name)[0])[0] + return os.path.splitext(name)[0] + + @utils.cached(1) def dynload_modules(): result = set(sys.builtin_module_names) @@ -40,7 +55,7 @@ def dynload_modules(): path = os.path.join(dynload_path, name) if os.path.isfile(path): if name.endswith('.dll'): - result.add(os.path.splitext(name)[0]) + result.add(normalize_so_name(name)) if name.endswith('.so'): - result.add(os.path.splitext(name)[0].replace('module', '')) + result.add(normalize_so_name(name)) return result diff --git a/pymode/libs2/rope/base/taskhandle.py b/pymode/libs/rope/base/taskhandle.py similarity index 100% rename from pymode/libs2/rope/base/taskhandle.py rename to pymode/libs/rope/base/taskhandle.py diff --git a/pymode/libs2/rope/base/utils.py b/pymode/libs/rope/base/utils/__init__.py similarity index 100% rename from pymode/libs2/rope/base/utils.py rename to pymode/libs/rope/base/utils/__init__.py diff --git a/pymode/libs/rope/base/utils/datastructures.py b/pymode/libs/rope/base/utils/datastructures.py new file mode 100644 index 00000000..0cb16cf2 --- /dev/null +++ b/pymode/libs/rope/base/utils/datastructures.py @@ -0,0 +1,67 @@ +# this snippet was taken from this link +# http://code.activestate.com/recipes/576694/ + +import collections + + +class OrderedSet(collections.MutableSet): + + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel + # node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def intersection(self, set_b): + return OrderedSet([item for item in self if item in set_b]) + + def discard(self, key): + if key in self.map: + key, prev, next = self.map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def pop(self, last=True): + if not self: + raise KeyError('set is empty') + key = self.end[1][0] if last else self.end[2][0] + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) diff --git a/pymode/libs/rope/base/utils/pycompat.py b/pymode/libs/rope/base/utils/pycompat.py new file mode 100644 index 00000000..367cf092 --- /dev/null +++ b/pymode/libs/rope/base/utils/pycompat.py @@ -0,0 +1,45 @@ +import sys +import _ast +# from rope.base import ast + +PY2 = sys.version_info[0] == 2 +PY27 = sys.version_info[0:2] >= (2, 7) +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +try: + str = unicode +except NameError: # PY3 + + str = str + string_types = (str,) + import builtins + ast_arg_type = _ast.arg + + def execfile(fn, global_vars=None, local_vars=None): + with open(fn) as f: + code = compile(f.read(), fn, 'exec') + exec(code, global_vars or {}, local_vars) + + def get_ast_arg_arg(node): + if isinstance(node, string_types): # TODO: G21: Understand the Algorithm (Where it's used?) + return node + return node.arg + + def get_ast_with_items(node): + return node.items + +else: # PY2 + + string_types = (basestring,) + builtins = __import__('__builtin__') + ast_arg_type = _ast.Name + execfile = execfile + + def get_ast_arg_arg(node): + if isinstance(node, string_types): # Python2 arguments.vararg, arguments.kwarg + return node + return node.id + + def get_ast_with_items(node): + return [node] diff --git a/pymode/libs2/rope/base/worder.py b/pymode/libs/rope/base/worder.py similarity index 100% rename from pymode/libs2/rope/base/worder.py rename to pymode/libs/rope/base/worder.py diff --git a/pymode/libs2/rope/contrib/__init__.py b/pymode/libs/rope/contrib/__init__.py similarity index 100% rename from pymode/libs2/rope/contrib/__init__.py rename to pymode/libs/rope/contrib/__init__.py diff --git a/pymode/libs2/rope/contrib/autoimport.py b/pymode/libs/rope/contrib/autoimport.py similarity index 100% rename from pymode/libs2/rope/contrib/autoimport.py rename to pymode/libs/rope/contrib/autoimport.py diff --git a/pymode/libs2/rope/contrib/changestack.py b/pymode/libs/rope/contrib/changestack.py similarity index 100% rename from pymode/libs2/rope/contrib/changestack.py rename to pymode/libs/rope/contrib/changestack.py diff --git a/pymode/libs2/rope/contrib/codeassist.py b/pymode/libs/rope/contrib/codeassist.py similarity index 96% rename from pymode/libs2/rope/contrib/codeassist.py rename to pymode/libs/rope/contrib/codeassist.py index 48b4a813..92c1bfc2 100644 --- a/pymode/libs2/rope/contrib/codeassist.py +++ b/pymode/libs/rope/contrib/codeassist.py @@ -527,28 +527,21 @@ def get_sorted_proposal_list(self): scope_proposals = proposals.get(scope, []) scope_proposals = [proposal for proposal in scope_proposals if proposal.type in self.typerank] - scope_proposals.sort(self._proposal_cmp) + scope_proposals.sort(key=self._proposal_key) result.extend(scope_proposals) return result - def _proposal_cmp(self, proposal1, proposal2): - if proposal1.type != proposal2.type: - return cmp(self.typerank.get(proposal1.type, 100), - self.typerank.get(proposal2.type, 100)) - return self._compare_underlined_names(proposal1.name, - proposal2.name) - - def _compare_underlined_names(self, name1, name2): - def underline_count(name): - result = 0 - while result < len(name) and name[result] == '_': - result += 1 - return result - underline_count1 = underline_count(name1) - underline_count2 = underline_count(name2) - if underline_count1 != underline_count2: - return cmp(underline_count1, underline_count2) - return cmp(name1, name2) + def _proposal_key(self, proposal1): + def _underline_count(name): + return sum(1 for c in name if c == "_") + return (self.typerank.get(proposal1.type, 100), + _underline_count(proposal1.name), + proposal1.name) + #if proposal1.type != proposal2.type: + # return cmp(self.typerank.get(proposal1.type, 100), + # self.typerank.get(proposal2.type, 100)) + #return self._compare_underlined_names(proposal1.name, + # proposal2.name) class PyDocExtractor(object): @@ -656,14 +649,14 @@ def _trim_docstring(self, docstring, indents=0): # and split into a list of lines: lines = docstring.expandtabs().splitlines() # Determine minimum indentation (first line doesn't count): - indent = sys.maxint + indent = sys.maxsize for line in lines[1:]: stripped = line.lstrip() if stripped: indent = min(indent, len(line) - len(stripped)) # Remove indentation (first line is special): trimmed = [lines[0].strip()] - if indent < sys.maxint: + if indent < sys.maxsize: for line in lines[1:]: trimmed.append(line[indent:].rstrip()) # Strip off trailing and leading blank lines: diff --git a/pymode/libs2/rope/contrib/finderrors.py b/pymode/libs/rope/contrib/finderrors.py similarity index 98% rename from pymode/libs2/rope/contrib/finderrors.py rename to pymode/libs/rope/contrib/finderrors.py index 9ee7dd15..109a3e8a 100644 --- a/pymode/libs2/rope/contrib/finderrors.py +++ b/pymode/libs/rope/contrib/finderrors.py @@ -5,7 +5,7 @@ errors = find_errors(project, project.get_resource('mod.py')) for error in errors: - print '%s: %s' % (error.lineno, error.error) + print('%s: %s' % (error.lineno, error.error)) prints possible errors for ``mod.py`` file. diff --git a/pymode/libs2/rope/contrib/findit.py b/pymode/libs/rope/contrib/findit.py similarity index 100% rename from pymode/libs2/rope/contrib/findit.py rename to pymode/libs/rope/contrib/findit.py diff --git a/pymode/libs2/rope/contrib/fixmodnames.py b/pymode/libs/rope/contrib/fixmodnames.py similarity index 100% rename from pymode/libs2/rope/contrib/fixmodnames.py rename to pymode/libs/rope/contrib/fixmodnames.py diff --git a/pymode/libs2/rope/contrib/fixsyntax.py b/pymode/libs/rope/contrib/fixsyntax.py similarity index 97% rename from pymode/libs2/rope/contrib/fixsyntax.py rename to pymode/libs/rope/contrib/fixsyntax.py index aab5c78c..fa2a17d9 100644 --- a/pymode/libs2/rope/contrib/fixsyntax.py +++ b/pymode/libs/rope/contrib/fixsyntax.py @@ -30,7 +30,7 @@ def get_pymodule(self): return libutils.get_string_module( self.project, code, resource=self.resource, force_errors=True) - except exceptions.ModuleSyntaxError, e: + except exceptions.ModuleSyntaxError as e: if msg is None: msg = '%s:%s %s' % (e.filename, e.lineno, e.message_) if tries < self.maxfixes: @@ -40,7 +40,7 @@ def get_pymodule(self): else: raise exceptions.ModuleSyntaxError( e.filename, e.lineno, - 'Failed to fix error: {}'.format(msg)) + 'Failed to fix error: {0}'.format(msg)) @property @utils.saveit @@ -76,7 +76,7 @@ def __init__(self, code): self.code = code self.lines = self.code.split('\n') self.lines.append('\n') - self.origs = range(len(self.lines) + 1) + self.origs = list(range(len(self.lines) + 1)) self.diffs = [0] * (len(self.lines) + 1) def comment(self, lineno): diff --git a/pymode/libs2/rope/contrib/generate.py b/pymode/libs/rope/contrib/generate.py similarity index 100% rename from pymode/libs2/rope/contrib/generate.py rename to pymode/libs/rope/contrib/generate.py diff --git a/pymode/libs2/rope/refactor/__init__.py b/pymode/libs/rope/refactor/__init__.py similarity index 100% rename from pymode/libs2/rope/refactor/__init__.py rename to pymode/libs/rope/refactor/__init__.py diff --git a/pymode/libs2/rope/refactor/change_signature.py b/pymode/libs/rope/refactor/change_signature.py similarity index 98% rename from pymode/libs2/rope/refactor/change_signature.py rename to pymode/libs/rope/refactor/change_signature.py index 4279d9cf..b5ba1856 100644 --- a/pymode/libs2/rope/refactor/change_signature.py +++ b/pymode/libs/rope/refactor/change_signature.py @@ -347,8 +347,6 @@ def find_occurrences(self, resource=None, pymodule=None): all_occurrences = [] for finder in self.finders: all_occurrences.extend(finder.find_occurrences(resource, pymodule)) - all_occurrences.sort(self._cmp_occurrences) + all_occurrences.sort(key=lambda x: x.get_primary_range()) return all_occurrences - def _cmp_occurrences(self, o1, o2): - return cmp(o1.get_primary_range(), o2.get_primary_range()) diff --git a/pymode/libs2/rope/refactor/encapsulate_field.py b/pymode/libs/rope/refactor/encapsulate_field.py similarity index 100% rename from pymode/libs2/rope/refactor/encapsulate_field.py rename to pymode/libs/rope/refactor/encapsulate_field.py diff --git a/pymode/libs2/rope/refactor/extract.py b/pymode/libs/rope/refactor/extract.py similarity index 95% rename from pymode/libs2/rope/refactor/extract.py rename to pymode/libs/rope/refactor/extract.py index be541bb5..80e74317 100644 --- a/pymode/libs2/rope/refactor/extract.py +++ b/pymode/libs/rope/refactor/extract.py @@ -1,8 +1,10 @@ import re +from rope.base.utils.datastructures import OrderedSet from rope.base import ast, codeanalyze from rope.base.change import ChangeSet, ChangeContents from rope.base.exceptions import RefactoringError +from rope.base.utils import pycompat from rope.refactor import (sourceutils, similarfinder, patchedast, suites, usefunction) @@ -598,19 +600,20 @@ def __init__(self, start, end, is_global): self.start = start self.end = end self.is_global = is_global - self.prewritten = set() - self.maybe_written = set() - self.written = set() - self.read = set() - self.postread = set() - self.postwritten = set() + self.prewritten = OrderedSet() + self.maybe_written = OrderedSet() + self.written = OrderedSet() + self.read = OrderedSet() + self.postread = OrderedSet() + self.postwritten = OrderedSet() self.host_function = True self.conditional = False def _read_variable(self, name, lineno): if self.start <= lineno <= self.end: if name not in self.written: - self.read.add(name) + if not self.conditional or name not in self.maybe_written: + self.read.add(name) if self.end < lineno: if name not in self.postwritten: self.postread.add(name) @@ -670,16 +673,27 @@ def _While(self, node): self._handle_conditional_node(node) def _For(self, node): - self._handle_conditional_node(node) + self.conditional = True + try: + # iter has to be checked before the target variables + ast.walk(node.iter, self) + ast.walk(node.target, self) + + for child in node.body: + ast.walk(child, self) + for child in node.orelse: + ast.walk(child, self) + finally: + self.conditional = False def _get_argnames(arguments): - result = [node.id for node in arguments.args - if isinstance(node, ast.Name)] + result = [pycompat.get_ast_arg_arg(node) for node in arguments.args + if isinstance(node, pycompat.ast_arg_type)] if arguments.vararg: - result.append(arguments.vararg) + result.append(pycompat.get_ast_arg_arg(arguments.vararg)) if arguments.kwarg: - result.append(arguments.kwarg) + result.append(pycompat.get_ast_arg_arg(arguments.kwarg)) return result @@ -744,7 +758,11 @@ def loop_encountered(self, node): ast.walk(child, self) self.loop_count -= 1 if node.orelse: - ast.walk(node.orelse, self) + if isinstance(node.orelse,(list,tuple)): + for node_ in node.orelse: + ast.walk(node_, self) + else: + ast.walk(node.orelse, self) def _Break(self, node): self.check_loop() diff --git a/pymode/libs2/rope/refactor/functionutils.py b/pymode/libs/rope/refactor/functionutils.py similarity index 100% rename from pymode/libs2/rope/refactor/functionutils.py rename to pymode/libs/rope/refactor/functionutils.py diff --git a/pymode/libs2/rope/refactor/importutils/__init__.py b/pymode/libs/rope/refactor/importutils/__init__.py similarity index 97% rename from pymode/libs2/rope/refactor/importutils/__init__.py rename to pymode/libs/rope/refactor/importutils/__init__.py index 4871faf3..6a44f01b 100644 --- a/pymode/libs2/rope/refactor/importutils/__init__.py +++ b/pymode/libs/rope/refactor/importutils/__init__.py @@ -278,6 +278,7 @@ def add_import(project, pymodule, module_name, name=None): imports = get_module_imports(project, pymodule) candidates = [] names = [] + selected_import = None # from mod import name if name is not None: from_import = FromImport(module_name, 0, [(name, None)]) @@ -286,7 +287,10 @@ def add_import(project, pymodule, module_name, name=None): # from pkg import mod if '.' in module_name: pkg, mod = module_name.rsplit('.', 1) - candidates.append(FromImport(pkg, 0, [(mod, None)])) + from_import = FromImport(pkg, 0, [(mod, None)]) + if project.prefs.get('prefer_module_from_imports'): + selected_import = from_import + candidates.append(from_import) if name: names.append(mod + '.' + name) else: @@ -301,7 +305,8 @@ def add_import(project, pymodule, module_name, name=None): candidates.append(normal_import) visitor = actions.AddingVisitor(project, candidates) - selected_import = normal_import + if selected_import is None: + selected_import = normal_import for import_statement in imports.imports: if import_statement.accept(visitor): selected_import = visitor.import_info diff --git a/pymode/libs2/rope/refactor/importutils/actions.py b/pymode/libs/rope/refactor/importutils/actions.py similarity index 100% rename from pymode/libs2/rope/refactor/importutils/actions.py rename to pymode/libs/rope/refactor/importutils/actions.py diff --git a/pymode/libs2/rope/refactor/importutils/importinfo.py b/pymode/libs/rope/refactor/importutils/importinfo.py similarity index 100% rename from pymode/libs2/rope/refactor/importutils/importinfo.py rename to pymode/libs/rope/refactor/importutils/importinfo.py diff --git a/pymode/libs2/rope/refactor/importutils/module_imports.py b/pymode/libs/rope/refactor/importutils/module_imports.py similarity index 89% rename from pymode/libs2/rope/refactor/importutils/module_imports.py rename to pymode/libs/rope/refactor/importutils/module_imports.py index b96eebc4..26059a49 100644 --- a/pymode/libs2/rope/refactor/importutils/module_imports.py +++ b/pymode/libs/rope/refactor/importutils/module_imports.py @@ -68,7 +68,7 @@ def get_changed_source(self): # Writing module docs result.extend(after_removing[first_non_blank:first_import]) # Writing imports - sorted_imports = sorted(imports, self._compare_import_locations) + sorted_imports = sorted(imports, key=self._get_location) for stmt in sorted_imports: if stmt != sorted_imports[0]: result.append('\n' * stmt.blank_lines) @@ -88,35 +88,31 @@ def _get_import_location(self, stmt): start = stmt.get_old_location()[0] return start - def _compare_import_locations(self, stmt1, stmt2): - def get_location(stmt): - if stmt.get_new_start() is not None: - return stmt.get_new_start() - else: - return stmt.get_old_location()[0] - return cmp(get_location(stmt1), get_location(stmt2)) + def _get_location(self, stmt): + if stmt.get_new_start() is not None: + return stmt.get_new_start() + else: + return stmt.get_old_location()[0] def _remove_imports(self, imports): lines = self.pymodule.source_code.splitlines(True) after_removing = [] + first_import_line = self._first_import_line() last_index = 0 for stmt in imports: start, end = stmt.get_old_location() - after_removing.extend(lines[last_index:start - 1]) + blank_lines = 0 + if start != first_import_line: + blank_lines = _count_blank_lines(lines.__getitem__, start - 2, + last_index - 1, -1) + after_removing.extend(lines[last_index:start - 1 - blank_lines]) last_index = end - 1 - for i in range(start, end): - after_removing.append('') after_removing.extend(lines[last_index:]) return after_removing def _first_non_blank_line(self, lines, lineno): - result = lineno - for line in lines[lineno:]: - if line.strip() == '': - result += 1 - else: - break - return result + return lineno + _count_blank_lines(lines.__getitem__, lineno, + len(lines)) def add_import(self, import_info): visitor = actions.AddingVisitor(self.project, [import_info]) @@ -166,7 +162,7 @@ def force_single_imports(self): """force a single import per statement""" for import_stmt in self.imports[:]: import_info = import_stmt.import_info - if import_info.is_empty(): + if import_info.is_empty() or import_stmt.readonly: continue if len(import_info.names_and_aliases) > 1: for name_and_alias in import_info.names_and_aliases: @@ -202,7 +198,7 @@ def sort_imports(self): if self.project.prefs.get("sort_imports_alphabetically"): sort_kwargs = dict(key=self._get_import_name) else: - sort_kwargs = dict(cmp=self._compare_imports) + sort_kwargs = dict(key=self._key_imports) # IDEA: Sort from import list visitor = actions.SortingVisitor(self.project, self._current_folder()) @@ -225,17 +221,16 @@ def _first_import_line(self): if self.pymodule.get_doc() is not None: lineno = 1 if len(nodes) > lineno: + if (isinstance(nodes[lineno], ast.Import) or + isinstance(nodes[lineno], ast.ImportFrom)): + return nodes[lineno].lineno lineno = self.pymodule.logical_lines.logical_line_in( nodes[lineno].lineno)[0] else: lineno = self.pymodule.lines.length() - while lineno > 1: - line = self.pymodule.lines.get_line(lineno - 1) - if line.strip() == '': - lineno -= 1 - else: - break - return lineno + + return lineno - _count_blank_lines(self.pymodule.lines.get_line, + lineno - 1, 1, -1) def _get_import_name(self, import_stmt): import_info = import_stmt.import_info @@ -245,14 +240,17 @@ def _get_import_name(self, import_stmt): else: return import_info.names_and_aliases[0][0] - def _compare_imports(self, stmt1, stmt2): - str1 = stmt1.get_import_statement() - str2 = stmt2.get_import_statement() - if str1.startswith('from ') and not str2.startswith('from '): - return 1 - if not str1.startswith('from ') and str2.startswith('from '): - return -1 - return cmp(str1, str2) + def _key_imports(self, stm1): + str1 = stm1.get_import_statement() + return str1.startswith("from "), str1 + + #str1 = stmt1.get_import_statement() + #str2 = stmt2.get_import_statement() + #if str1.startswith('from ') and not str2.startswith('from '): + # return 1 + #if not str1.startswith('from ') and str2.startswith('from '): + # return -1 + #return cmp(str1, str2) def _move_imports(self, imports, index, blank_lines): if imports: @@ -282,6 +280,16 @@ def remove_pyname(self, pyname): import_stmt.accept(visitor) +def _count_blank_lines(get_line, start, end, step=1): + count = 0 + for idx in range(start, end, step): + if get_line(idx).strip() == '': + count += 1 + else: + break + return count + + class _OneTimeSelector(object): def __init__(self, names): @@ -429,24 +437,11 @@ def visit_import(self, node, end_line): self.imports.append(import_statement) def _count_empty_lines_before(self, lineno): - result = 0 - for current in range(lineno - 1, 0, -1): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result + return _count_blank_lines(self.lines.get_line, lineno - 1, 0, -1) def _count_empty_lines_after(self, lineno): - result = 0 - for current in range(lineno + 1, self.lines.length()): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result + return _count_blank_lines(self.lines.get_line, lineno + 1, + self.lines.length()) def get_separating_line_count(self): if not self.imports: diff --git a/pymode/libs2/rope/refactor/inline.py b/pymode/libs/rope/refactor/inline.py similarity index 99% rename from pymode/libs2/rope/refactor/inline.py rename to pymode/libs/rope/refactor/inline.py index 0ae1f8f4..467edefa 100644 --- a/pymode/libs2/rope/refactor/inline.py +++ b/pymode/libs/rope/refactor/inline.py @@ -398,7 +398,7 @@ def _calculate_definition(self, primary, pyname, call, host_vars, returns): # inside the inlined function are renamed if len(set(all_names).intersection(set(host_vars))) > 0: - prefix = _DefinitionGenerator.unique_prefix.next() + prefix = next(_DefinitionGenerator.unique_prefix) guest = libutils.get_string_module(self.project, source, self.resource) diff --git a/pymode/libs2/rope/refactor/introduce_factory.py b/pymode/libs/rope/refactor/introduce_factory.py similarity index 100% rename from pymode/libs2/rope/refactor/introduce_factory.py rename to pymode/libs/rope/refactor/introduce_factory.py diff --git a/pymode/libs2/rope/refactor/introduce_parameter.py b/pymode/libs/rope/refactor/introduce_parameter.py similarity index 100% rename from pymode/libs2/rope/refactor/introduce_parameter.py rename to pymode/libs/rope/refactor/introduce_parameter.py diff --git a/pymode/libs2/rope/refactor/localtofield.py b/pymode/libs/rope/refactor/localtofield.py similarity index 100% rename from pymode/libs2/rope/refactor/localtofield.py rename to pymode/libs/rope/refactor/localtofield.py diff --git a/pymode/libs2/rope/refactor/method_object.py b/pymode/libs/rope/refactor/method_object.py similarity index 100% rename from pymode/libs2/rope/refactor/method_object.py rename to pymode/libs/rope/refactor/method_object.py diff --git a/pymode/libs2/rope/refactor/move.py b/pymode/libs/rope/refactor/move.py similarity index 85% rename from pymode/libs2/rope/refactor/move.py rename to pymode/libs/rope/refactor/move.py index 60df493e..ce618277 100644 --- a/pymode/libs2/rope/refactor/move.py +++ b/pymode/libs/rope/refactor/move.py @@ -22,21 +22,21 @@ def create_move(project, resource, offset=None): return MoveModule(project, resource) this_pymodule = project.get_pymodule(resource) pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None: - raise exceptions.RefactoringError( - 'Move only works on classes, functions, modules and methods.') - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.PyModule) or \ - isinstance(pyobject, pyobjects.PyPackage): - return MoveModule(project, pyobject.get_resource()) - if isinstance(pyobject, pyobjects.PyFunction) and \ - isinstance(pyobject.parent, pyobjects.PyClass): - return MoveMethod(project, resource, offset) - if isinstance(pyobject, pyobjects.PyDefinedObject) and \ - isinstance(pyobject.parent, pyobjects.PyModule): - return MoveGlobal(project, resource, offset) + if pyname is not None: + pyobject = pyname.get_object() + if isinstance(pyobject, pyobjects.PyModule) or \ + isinstance(pyobject, pyobjects.PyPackage): + return MoveModule(project, pyobject.get_resource()) + if isinstance(pyobject, pyobjects.PyFunction) and \ + isinstance(pyobject.parent, pyobjects.PyClass): + return MoveMethod(project, resource, offset) + if isinstance(pyobject, pyobjects.PyDefinedObject) and \ + isinstance(pyobject.parent, pyobjects.PyModule) or \ + isinstance(pyname, pynames.AssignedName): + return MoveGlobal(project, resource, offset) raise exceptions.RefactoringError( - 'Move only works on global classes/functions, modules and methods.') + 'Move only works on global classes/functions/variables, modules and ' + 'methods.') class MoveMethod(object): @@ -203,29 +203,63 @@ def __init__(self, project, resource, offset): self.project = project this_pymodule = self.project.get_pymodule(resource) self.old_pyname = evaluate.eval_location(this_pymodule, offset) - self.old_name = self.old_pyname.get_object().get_name() - pymodule = self.old_pyname.get_object().get_module() + if self.old_pyname is None: + raise exceptions.RefactoringError( + 'Move refactoring should be performed on a ' + 'class/function/variable.') + if self._is_variable(self.old_pyname): + self.old_name = worder.get_name_at(resource, offset) + pymodule = this_pymodule + else: + self.old_name = self.old_pyname.get_object().get_name() + pymodule = self.old_pyname.get_object().get_module() + self._check_exceptional_conditions() self.source = pymodule.get_resource() self.tools = _MoveTools(self.project, self.source, self.old_pyname, self.old_name) self.import_tools = self.tools.import_tools - self._check_exceptional_conditions() + + def _import_filter(self, stmt): + module_name = libutils.modname(self.source) + + if isinstance(stmt.import_info, importutils.NormalImport): + # Affect any statement that imports the source module + return any(module_name == name + for name, alias in stmt.import_info.names_and_aliases) + elif isinstance(stmt.import_info, importutils.FromImport): + # Affect statements importing from the source package + if '.' in module_name: + package_name, basename = module_name.rsplit('.', 1) + if (stmt.import_info.module_name == package_name and + any(basename == name + for name, alias in stmt.import_info.names_and_aliases)): + return True + return stmt.import_info.module_name == module_name + return False def _check_exceptional_conditions(self): - if self.old_pyname is None or \ - not isinstance(self.old_pyname.get_object(), - pyobjects.PyDefinedObject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed on a class/function.') - moving_pyobject = self.old_pyname.get_object() - if not self._is_global(moving_pyobject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed ' + - 'on a global class/function.') + if self._is_variable(self.old_pyname): + pymodule = self.old_pyname.get_definition_location()[0] + try: + pymodule.get_scope().get_name(self.old_name) + except exceptions.NameNotFoundError: + self._raise_refactoring_error() + elif not (isinstance(self.old_pyname.get_object(), + pyobjects.PyDefinedObject) and + self._is_global(self.old_pyname.get_object())): + self._raise_refactoring_error() + + def _raise_refactoring_error(self): + raise exceptions.RefactoringError( + 'Move refactoring should be performed on a global class, function ' + 'or variable.') def _is_global(self, pyobject): return pyobject.get_scope().parent == pyobject.get_module().get_scope() + def _is_variable(self, pyname): + return isinstance(pyname, pynames.AssignedName) + def get_changes(self, dest, resources=None, task_handle=taskhandle.NullTaskHandle()): if resources is None: @@ -262,7 +296,8 @@ def _calculate_changes(self, dest, resources, task_handle): should_import = source is not None # Removing out of date imports pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.remove_old_imports(pymodule) + source = self.import_tools.organize_imports( + pymodule, sort=False, import_filter=self._import_filter) # Adding new import if should_import: pymodule = self.tools.new_pymodule(pymodule, source) @@ -285,6 +320,8 @@ def _source_module_changes(self, dest): renamer = ModuleSkipRenamer(occurrence_finder, self.source, handle, start, end) source = renamer.get_changed_module() + pymodule = libutils.get_string_module(self.project, source, self.source) + source = self.import_tools.organize_imports(pymodule, sort=False) if handle.occurred: pymodule = libutils.get_string_module( self.project, source, self.source) @@ -304,8 +341,6 @@ def _dest_module_changes(self, dest): pymodule = self.tools.new_pymodule(pymodule, source) moving, imports = self._get_moving_element_with_imports() - source = self.tools.remove_old_imports(pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) pymodule, has_changed = self._add_imports2(pymodule, imports) module_with_imports = self.import_tools.module_imports(pymodule) @@ -329,6 +364,11 @@ def _dest_module_changes(self, dest): pymodule = libutils.get_string_module(self.project, source, dest) source = self.import_tools.organize_imports(pymodule, sort=False, unused=False) + # Remove unused imports of the old module + pymodule = libutils.get_string_module(self.project, source, dest) + source = self.import_tools.organize_imports( + pymodule, sort=False, selfs=False, unused=True, + import_filter=self._import_filter) return ChangeContents(dest, source) def _get_moving_element_with_imports(self): @@ -348,9 +388,23 @@ def _get_moving_element(self): def _get_moving_region(self): pymodule = self.project.get_pymodule(self.source) lines = pymodule.lines - scope = self.old_pyname.get_object().get_scope() - start = lines.get_line_start(scope.get_start()) - end_line = scope.get_end() + if self._is_variable(self.old_pyname): + logical_lines = pymodule.logical_lines + lineno = logical_lines.logical_line_in( + self.old_pyname.get_definition_location()[1])[0] + start = lines.get_line_start(lineno) + end_line = logical_lines.logical_line_in(lineno)[1] + else: + scope = self.old_pyname.get_object().get_scope() + start = lines.get_line_start(scope.get_start()) + end_line = scope.get_end() + + # Include comment lines before the definition + start_line = lines.get_line_number(start) + while start_line > 1 and lines.get_line(start_line - 1).startswith('#'): + start_line -= 1 + start = lines.get_line_start(start_line) + while end_line < lines.length() and \ lines.get_line(end_line + 1).strip() == '': end_line += 1 @@ -446,7 +500,6 @@ def _change_occurrences_in_module(self, dest, pymodule=None, new_name = self._new_modname(dest) module_imports = importutils.get_module_imports(self.project, pymodule) changed = False - source = None if libutils.modname(dest): changed = self._change_import_statements(dest, new_name, @@ -472,7 +525,6 @@ def _change_occurrences_in_module(self, dest, pymodule=None, return source return None - def _change_import_statements(self, dest, new_name, module_imports): moving_module = self.source parent_module = moving_module.parent @@ -605,7 +657,8 @@ def occurs_in_module(self, pymodule=None, resource=None, imports=True): def _create_finder(self, imports): return occurrences.create_finder(self.project, self.old_name, - self.old_pyname, imports=imports) + self.old_pyname, imports=imports, + keywords=False) def new_pymodule(self, pymodule, source): if source is not None: @@ -632,6 +685,17 @@ def _add_imports_to_module(import_tools, pymodule, new_imports): def moving_code_with_imports(project, resource, source): import_tools = importutils.ImportTools(project) pymodule = libutils.get_string_module(project, source, resource) + + # Strip comment prefix, if any. These need to stay before the moving + # section, but imports would be added between them. + lines = codeanalyze.SourceLinesAdapter(source) + start = 1 + while start < lines.length() and lines.get_line(start).startswith('#'): + start += 1 + moving_prefix = source[:lines.get_line_start(start)] + pymodule = libutils.get_string_module( + project, source[lines.get_line_start(start):], resource) + origin = project.get_pymodule(resource) imports = [] @@ -662,7 +726,9 @@ def moving_code_with_imports(project, resource, source): lines = codeanalyze.SourceLinesAdapter(source) while start < lines.length() and not lines.get_line(start).strip(): start += 1 - moving = source[lines.get_line_start(start):] + + # Reinsert the prefix which was removed at the beginning + moving = moving_prefix + source[lines.get_line_start(start):] return moving, imports diff --git a/pymode/libs2/rope/refactor/multiproject.py b/pymode/libs/rope/refactor/multiproject.py similarity index 100% rename from pymode/libs2/rope/refactor/multiproject.py rename to pymode/libs/rope/refactor/multiproject.py diff --git a/pymode/libs2/rope/refactor/occurrences.py b/pymode/libs/rope/refactor/occurrences.py similarity index 95% rename from pymode/libs2/rope/refactor/occurrences.py rename to pymode/libs/rope/refactor/occurrences.py index 14a2d7de..dfc2d685 100644 --- a/pymode/libs2/rope/refactor/occurrences.py +++ b/pymode/libs/rope/refactor/occurrences.py @@ -30,6 +30,9 @@ * `instance`: Used only when you want implicit interfaces to be considered. + + * `keywords`: If False, don't return instances that are the names of keyword + arguments """ import re @@ -81,7 +84,8 @@ def find_occurrences(self, resource=None, pymodule=None): def create_finder(project, name, pyname, only_calls=False, imports=True, - unsure=None, docs=False, instance=None, in_hierarchy=False): + unsure=None, docs=False, instance=None, in_hierarchy=False, + keywords=True): """A factory for `Finder` Based on the arguments it creates a list of filters. `instance` @@ -95,6 +99,8 @@ def create_finder(project, name, pyname, only_calls=False, imports=True, filters.append(CallsFilter()) if not imports: filters.append(NoImportsFilter()) + if not keywords: + filters.append(NoKeywordsFilter()) if isinstance(instance, pynames.ParameterName): for pyobject in instance.get_objects(): try: @@ -163,6 +169,10 @@ def is_written(self): def is_unsure(self): return unsure_pyname(self.get_pyname()) + def is_function_keyword_parameter(self): + return self.tools.word_finder.is_function_keyword_parameter( + self.offset) + @property @utils.saveit def lineno(self): @@ -274,6 +284,14 @@ def __call__(self, occurrence): return False +class NoKeywordsFilter(object): + """Filter out keyword parameters.""" + + def __call__(self, occurrence): + if occurrence.is_function_keyword_parameter(): + return False + + class _TextualFinder(object): def __init__(self, name, docs=False): diff --git a/pymode/libs2/rope/refactor/patchedast.py b/pymode/libs/rope/refactor/patchedast.py similarity index 87% rename from pymode/libs2/rope/refactor/patchedast.py rename to pymode/libs/rope/refactor/patchedast.py index 28d36d5a..10f0a05c 100644 --- a/pymode/libs2/rope/refactor/patchedast.py +++ b/pymode/libs/rope/refactor/patchedast.py @@ -3,6 +3,12 @@ import warnings from rope.base import ast, codeanalyze, exceptions +from rope.base.utils import pycompat + +try: + basestring +except NameError: + basestring = (str, bytes) def get_patched_ast(source, sorted_children=False): @@ -265,11 +271,11 @@ def _Call(self, node): children = [node.func, '('] args = list(node.args) + node.keywords children.extend(self._child_nodes(args, ',')) - if node.starargs is not None: + if getattr(node, 'starargs', None): if args: children.append(',') children.extend(['*', node.starargs]) - if node.kwargs is not None: + if getattr(node, 'kwargs', None): if args or node.starargs is not None: children.append(',') children.extend(['**', node.kwargs]) @@ -396,11 +402,11 @@ def _arguments(self, node): if node.vararg is not None: if args: children.append(',') - children.extend(['*', node.vararg]) + children.extend(['*', pycompat.get_ast_arg_arg(node.vararg)]) if node.kwarg is not None: if args or node.vararg is not None: children.append(',') - children.extend(['**', node.kwarg]) + children.extend(['**', pycompat.get_ast_arg_arg(node.kwarg)]) self._handle(node, children) def _add_args_to_children(self, children, arg, default): @@ -475,7 +481,12 @@ def _Import(self, node): self._handle(node, children) def _keyword(self, node): - self._handle(node, [node.arg, '=', node.value]) + children = [] + if node.arg is None: + children.append(node.value) + else: + children.extend([node.arg, '=', node.value]) + self._handle(node, children) def _Lambda(self, node): self._handle(node, ['lambda', node.args, ':', node.body]) @@ -489,12 +500,41 @@ def _ListComp(self, node): children.append(']') self._handle(node, children) + def _Set(self, node): + if node.elts: + self._handle(node, + ['{'] + self._child_nodes(node.elts, ',') + ['}']) + return + # Python doesn't have empty set literals + warnings.warn('Tried to handle empty literal; please report!', + RuntimeWarning) + self._handle(node, ['set(', ')']) + + def _SetComp(self, node): + children = ['{', node.elt] + children.extend(node.generators) + children.append('}') + self._handle(node, children) + + def _DictComp(self, node): + children = ['{'] + children.extend([node.key, ':', node.value]) + children.extend(node.generators) + children.append('}') + self._handle(node, children) + def _Module(self, node): self._handle(node, list(node.body), eat_spaces=True) def _Name(self, node): self._handle(node, [node.id]) + def _NameConstant(self, node): + self._handle(node, [str(node.value)]) + + def _arg(self, node): + self._handle(node, [node.arg]) + def _Pass(self, node): self._handle(node, ['pass']) @@ -510,15 +550,30 @@ def _Print(self, node): self._handle(node, children) def _Raise(self, node): - children = ['raise'] - if node.type: - children.append(node.type) - if node.inst: - children.append(',') - children.append(node.inst) - if node.tback: - children.append(',') - children.append(node.tback) + + def get_python3_raise_children(node): + children = ['raise'] + if node.exc: + children.append(node.exc) + if node.cause: + children.append(node.cause) + return children + + def get_python2_raise_children(node): + children = ['raise'] + if node.type: + children.append(node.type) + if node.inst: + children.append(',') + children.append(node.inst) + if node.tback: + children.append(',') + children.append(node.tback) + return children + if pycompat.PY2: + children = get_python2_raise_children(node) + else: + children = get_python3_raise_children(node) self._handle(node, children) def _Return(self, node): @@ -555,10 +610,25 @@ def _Slice(self, node): self._handle(node, children) def _TryFinally(self, node): + # @todo fixme + is_there_except_handler = False + not_empty_body = True + if len(node.finalbody) == 1: + if pycompat.PY2: + is_there_except_handler = isinstance(node.body[0], ast.TryExcept) + not_empty_body = not bool(len(node.body)) + elif pycompat.PY3: + try: + is_there_except_handler = isinstance(node.handlers[0], ast.ExceptHandler) + not_empty_body = True + except IndexError: + pass children = [] - if len(node.body) != 1 or not isinstance(node.body[0], ast.TryExcept): + if not_empty_body or not is_there_except_handler: children.extend(['try', ':']) children.extend(node.body) + if pycompat.PY3: + children.extend(node.handlers) children.extend(['finally', ':']) children.extend(node.finalbody) self._handle(node, children) @@ -572,6 +642,12 @@ def _TryExcept(self, node): children.extend(node.orelse) self._handle(node, children) + def _Try(self, node): + if len(node.finalbody): + self._TryFinally(node) + else: + self._TryExcept(node) + def _ExceptHandler(self, node): self._excepthandler(node) @@ -615,9 +691,11 @@ def _While(self, node): self._handle(node, children) def _With(self, node): - children = ['with', node.context_expr] - if node.optional_vars: - children.extend(['as', node.optional_vars]) + children = [] + for item in pycompat.get_ast_with_items(node): + children.extend(['with', item.context_expr]) + if item.optional_vars: + children.extend(['as', item.optional_vars]) children.append(':') children.extend(node.body) self._handle(node, children) @@ -630,6 +708,8 @@ def _child_nodes(self, nodes, separator): children.append(separator) return children + def _Starred(self, node): + self._handle(node, [node.value]) class _Source(object): @@ -741,8 +821,8 @@ def __getslice__(self, i, j): def _get_number_pattern(self): # HACK: It is merely an approaximation and does the job - integer = r'(0|0x)?[\da-fA-F]+[lL]?' - return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d*)?[jJ]?' % integer + integer = r'\-?(0x[\da-fA-F]+|\d+)[lL]?' + return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d+)?[jJ]?' % integer _string_pattern = None _number_pattern = None diff --git a/pymode/libs2/rope/refactor/rename.py b/pymode/libs/rope/refactor/rename.py similarity index 100% rename from pymode/libs2/rope/refactor/rename.py rename to pymode/libs/rope/refactor/rename.py diff --git a/pymode/libs2/rope/refactor/restructure.py b/pymode/libs/rope/refactor/restructure.py similarity index 100% rename from pymode/libs2/rope/refactor/restructure.py rename to pymode/libs/rope/refactor/restructure.py diff --git a/pymode/libs2/rope/refactor/similarfinder.py b/pymode/libs/rope/refactor/similarfinder.py similarity index 99% rename from pymode/libs2/rope/refactor/similarfinder.py rename to pymode/libs/rope/refactor/similarfinder.py index f1a7d42d..425f9ed9 100644 --- a/pymode/libs2/rope/refactor/similarfinder.py +++ b/pymode/libs/rope/refactor/similarfinder.py @@ -28,7 +28,7 @@ def __init__(self, pymodule, wildcards=None): self.raw_finder = RawSimilarFinder( pymodule.source_code, pymodule.get_ast(), self._does_match) except MismatchedTokenError: - print "in file %s" % pymodule.resource.path + print("in file %s" % pymodule.resource.path) raise self.pymodule = pymodule if wildcards is None: diff --git a/pymode/libs2/rope/refactor/sourceutils.py b/pymode/libs/rope/refactor/sourceutils.py similarity index 100% rename from pymode/libs2/rope/refactor/sourceutils.py rename to pymode/libs/rope/refactor/sourceutils.py diff --git a/pymode/libs2/rope/refactor/suites.py b/pymode/libs/rope/refactor/suites.py similarity index 86% rename from pymode/libs2/rope/refactor/suites.py rename to pymode/libs/rope/refactor/suites.py index 4f9a8c71..68785080 100644 --- a/pymode/libs2/rope/refactor/suites.py +++ b/pymode/libs/rope/refactor/suites.py @@ -1,4 +1,5 @@ from rope.base import ast +from rope.base.utils import pycompat def find_visible(node, lines): @@ -115,13 +116,27 @@ def _With(self, node): self.suites.append(Suite(node.body, node.lineno, self.suite)) def _TryFinally(self, node): - if len(node.finalbody) == 1 and \ - isinstance(node.body[0], ast.TryExcept): - self._TryExcept(node.body[0]) + proceed_to_except_handler = False + if len(node.finalbody) == 1: + if pycompat.PY2: + proceed_to_except_handler = isinstance(node.body[0], ast.TryExcept) + elif pycompat.PY3: + try: + proceed_to_except_handler = isinstance(node.handlers[0], ast.ExceptHandler) + except IndexError: + pass + if proceed_to_except_handler: + self._TryExcept(node if pycompat.PY3 else node.body[0]) else: self.suites.append(Suite(node.body, node.lineno, self.suite)) self.suites.append(Suite(node.finalbody, node.lineno, self.suite)) + def _Try(self, node): + if len(node.finalbody) == 1: + self._TryFinally(node) + else: + self._TryExcept(node) + def _TryExcept(self, node): self.suites.append(Suite(node.body, node.lineno, self.suite)) for handler in node.handlers: diff --git a/pymode/libs2/rope/refactor/topackage.py b/pymode/libs/rope/refactor/topackage.py similarity index 100% rename from pymode/libs2/rope/refactor/topackage.py rename to pymode/libs/rope/refactor/topackage.py diff --git a/pymode/libs2/rope/refactor/usefunction.py b/pymode/libs/rope/refactor/usefunction.py similarity index 100% rename from pymode/libs2/rope/refactor/usefunction.py rename to pymode/libs/rope/refactor/usefunction.py diff --git a/pymode/libs2/rope/refactor/wildcards.py b/pymode/libs/rope/refactor/wildcards.py similarity index 100% rename from pymode/libs2/rope/refactor/wildcards.py rename to pymode/libs/rope/refactor/wildcards.py diff --git a/pymode/libs2/rope/base/astutils.py b/pymode/libs2/rope/base/astutils.py deleted file mode 100644 index 8ace1a92..00000000 --- a/pymode/libs2/rope/base/astutils.py +++ /dev/null @@ -1,61 +0,0 @@ -from rope.base import ast - - -def get_name_levels(node): - """Return a list of ``(name, level)`` tuples for assigned names - - The `level` is `None` for simple assignments and is a list of - numbers for tuple assignments for example in:: - - a, (b, c) = x - - The levels for for `a` is ``[0]``, for `b` is ``[1, 0]`` and for - `c` is ``[1, 1]``. - - """ - visitor = _NodeNameCollector() - ast.walk(node, visitor) - return visitor.names - - -class _NodeNameCollector(object): - - def __init__(self, levels=None): - self.names = [] - self.levels = levels - self.index = 0 - - def _add_node(self, node): - new_levels = [] - if self.levels is not None: - new_levels = list(self.levels) - new_levels.append(self.index) - self.index += 1 - self._added(node, new_levels) - - def _added(self, node, levels): - if hasattr(node, 'id'): - self.names.append((node.id, levels)) - - def _Name(self, node): - self._add_node(node) - - def _Tuple(self, node): - new_levels = [] - if self.levels is not None: - new_levels = list(self.levels) - new_levels.append(self.index) - self.index += 1 - visitor = _NodeNameCollector(new_levels) - for child in ast.get_child_nodes(node): - ast.walk(child, visitor) - self.names.extend(visitor.names) - - def _Subscript(self, node): - self._add_node(node) - - def _Attribute(self, node): - self._add_node(node) - - def _Slice(self, node): - self._add_node(node) diff --git a/pymode/libs3/rope/__init__.py b/pymode/libs3/rope/__init__.py deleted file mode 100644 index a936fe29..00000000 --- a/pymode/libs3/rope/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -"""rope, a python refactoring library""" - -INFO = __doc__ -VERSION = '0.9.4-1' -COPYRIGHT = """\ -Copyright (C) 2006-2010 Ali Gholami Rudi -Copyright (C) 2009-2010 Anton Gritsay -Copyright (C) 2011 Dmitriy Zhukov - -This program is free software; you can redistribute it and/or modify it -under the terms of GNU General Public License as published by the -Free Software Foundation; either version 2 of the license, or (at your -opinion) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details.""" diff --git a/pymode/libs3/rope/base/__init__.py b/pymode/libs3/rope/base/__init__.py deleted file mode 100644 index ff5f8c63..00000000 --- a/pymode/libs3/rope/base/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Base rope package - -This package contains rope core modules that are used by other modules -and packages. - -""" - -__all__ = ['project', 'libutils', 'exceptions'] diff --git a/pymode/libs3/rope/base/arguments.py b/pymode/libs3/rope/base/arguments.py deleted file mode 100644 index 342e2ae5..00000000 --- a/pymode/libs3/rope/base/arguments.py +++ /dev/null @@ -1,109 +0,0 @@ -import rope.base.evaluate -from rope.base import ast - - -class Arguments(object): - """A class for evaluating parameters passed to a function - - You can use the `create_arguments` factory. It handles implicit - first arguments. - - """ - - def __init__(self, args, scope): - self.args = args - self.scope = scope - self.instance = None - - def get_arguments(self, parameters): - result = [] - for pyname in self.get_pynames(parameters): - if pyname is None: - result.append(None) - else: - result.append(pyname.get_object()) - return result - - def get_pynames(self, parameters): - result = [None] * max(len(parameters), len(self.args)) - for index, arg in enumerate(self.args): - if isinstance(arg, ast.keyword) and arg.arg in parameters: - result[parameters.index(arg.arg)] = self._evaluate(arg.value) - else: - result[index] = self._evaluate(arg) - return result - - def get_instance_pyname(self): - if self.args: - return self._evaluate(self.args[0]) - - def _evaluate(self, ast_node): - return rope.base.evaluate.eval_node(self.scope, ast_node) - - -def create_arguments(primary, pyfunction, call_node, scope): - """A factory for creating `Arguments`""" - args = list(call_node.args) - args.extend(call_node.keywords) - called = call_node.func - # XXX: Handle constructors - if _is_method_call(primary, pyfunction) and \ - isinstance(called, ast.Attribute): - args.insert(0, called.value) - return Arguments(args, scope) - - -class ObjectArguments(object): - - def __init__(self, pynames): - self.pynames = pynames - - def get_arguments(self, parameters): - result = [] - for pyname in self.pynames: - if pyname is None: - result.append(None) - else: - result.append(pyname.get_object()) - return result - - def get_pynames(self, parameters): - return self.pynames - - def get_instance_pyname(self): - return self.pynames[0] -class MixedArguments(object): - - def __init__(self, pyname, arguments, scope): - """`argumens` is an instance of `Arguments`""" - self.pyname = pyname - self.args = arguments - - def get_pynames(self, parameters): - return [self.pyname] + self.args.get_pynames(parameters[1:]) - - def get_arguments(self, parameters): - result = [] - for pyname in self.get_pynames(parameters): - if pyname is None: - result.append(None) - else: - result.append(pyname.get_object()) - return result - - def get_instance_pyname(self): - return self.pyname - - -def _is_method_call(primary, pyfunction): - if primary is None: - return False - pyobject = primary.get_object() - if isinstance(pyobject.get_type(), rope.base.pyobjects.PyClass) and \ - isinstance(pyfunction, rope.base.pyobjects.PyFunction) and \ - isinstance(pyfunction.parent, rope.base.pyobjects.PyClass): - return True - if isinstance(pyobject.get_type(), rope.base.pyobjects.AbstractClass) and \ - isinstance(pyfunction, rope.base.builtins.BuiltinFunction): - return True - return False diff --git a/pymode/libs3/rope/base/ast.py b/pymode/libs3/rope/base/ast.py deleted file mode 100644 index 680a4ba3..00000000 --- a/pymode/libs3/rope/base/ast.py +++ /dev/null @@ -1,68 +0,0 @@ -import _ast -from _ast import * - -from rope.base import fscommands - - -def parse(source, filename=''): - # NOTE: the raw string should be given to `compile` function - if isinstance(source, str): - source = fscommands.unicode_to_file_data(source) - source = source.decode() - if '\r' in source: - source = source.replace('\r\n', '\n').replace('\r', '\n') - if not source.endswith('\n'): - source += '\n' - try: - return compile(source.encode(), filename, 'exec', _ast.PyCF_ONLY_AST) - except (TypeError, ValueError) as e: - error = SyntaxError() - error.lineno = 1 - error.filename = filename - error.msg = str(e) - raise error - - -def walk(node, walker): - """Walk the syntax tree""" - method_name = '_' + node.__class__.__name__ - method = getattr(walker, method_name, None) - if method is not None: - return method(node) - for child in get_child_nodes(node): - walk(child, walker) - - -def get_child_nodes(node): - if isinstance(node, _ast.Module): - return node.body - result = [] - if node._fields is not None: - for name in node._fields: - child = getattr(node, name) - if isinstance(child, list): - for entry in child: - if isinstance(entry, _ast.AST): - result.append(entry) - if isinstance(child, _ast.AST): - result.append(child) - return result - - -def call_for_nodes(node, callback, recursive=False): - """If callback returns `True` the child nodes are skipped""" - result = callback(node) - if recursive and not result: - for child in get_child_nodes(node): - call_for_nodes(child, callback, recursive) - - -def get_children(node): - result = [] - if node._fields is not None: - for name in node._fields: - if name in ['lineno', 'col_offset']: - continue - child = getattr(node, name) - result.append(child) - return result diff --git a/pymode/libs3/rope/base/builtins.py b/pymode/libs3/rope/base/builtins.py deleted file mode 100644 index 3101631a..00000000 --- a/pymode/libs3/rope/base/builtins.py +++ /dev/null @@ -1,782 +0,0 @@ -"""This module trys to support builtin types and functions.""" -import inspect - -import rope.base.evaluate -from rope.base import pynames, pyobjects, arguments, utils, ast - - -class BuiltinModule(pyobjects.AbstractModule): - - def __init__(self, name, pycore=None, initial={}): - super(BuiltinModule, self).__init__() - self.name = name - self.pycore = pycore - self.initial = initial - - parent = None - - def get_attributes(self): - return self.attributes - - def get_doc(self): - if self.module: - return self.module.__doc__ - - def get_name(self): - return self.name.split('.')[-1] - - @property - @utils.saveit - def attributes(self): - result = _object_attributes(self.module, self) - result.update(self.initial) - if self.pycore is not None: - submodules = self.pycore._builtin_submodules(self.name) - for name, module in submodules.items(): - result[name] = rope.base.builtins.BuiltinName(module) - return result - - @property - @utils.saveit - def module(self): - try: - result = __import__(self.name) - for token in self.name.split('.')[1:]: - result = getattr(result, token, None) - return result - except ImportError: - return - - -class _BuiltinElement(object): - - def __init__(self, builtin, parent=None): - self.builtin = builtin - self._parent = parent - - def get_doc(self): - if self.builtin: - return getattr(self.builtin, '__doc__', None) - - def get_name(self): - if self.builtin: - return getattr(self.builtin, '__name__', None) - - @property - def parent(self): - if self._parent is None: - return builtins - return self._parent - - -class BuiltinClass(_BuiltinElement, pyobjects.AbstractClass): - - def __init__(self, builtin, attributes, parent=None): - _BuiltinElement.__init__(self, builtin, parent) - pyobjects.AbstractClass.__init__(self) - self.initial = attributes - - @utils.saveit - def get_attributes(self): - result = _object_attributes(self.builtin, self) - result.update(self.initial) - return result - - -class BuiltinFunction(_BuiltinElement, pyobjects.AbstractFunction): - - def __init__(self, returned=None, function=None, builtin=None, - argnames=[], parent=None): - _BuiltinElement.__init__(self, builtin, parent) - pyobjects.AbstractFunction.__init__(self) - self.argnames = argnames - self.returned = returned - self.function = function - - def get_returned_object(self, args): - if self.function is not None: - return self.function(_CallContext(self.argnames, args)) - else: - return self.returned - - def get_param_names(self, special_args=True): - return self.argnames - - @utils.saveit - def get_attributes(self): - result = _object_attributes(self.builtin.__class__, self) - return result - - -class BuiltinUnknown(_BuiltinElement, pyobjects.PyObject): - - def __init__(self, builtin): - super(BuiltinUnknown, self).__init__(pyobjects.get_unknown()) - self.builtin = builtin - self.type = pyobjects.get_unknown() - - def get_name(self): - return getattr(type(self.builtin), '__name__', None) - - @utils.saveit - def get_attributes(self): - return _object_attributes(self.builtin, self) - - -def _object_attributes(obj, parent): - attributes = {} - for name in dir(obj): - if name == 'None': - continue - try: - child = getattr(obj, name) - except AttributeError: - # descriptors are allowed to raise AttributeError - # even if they are in dir() - continue - pyobject = None - if inspect.isclass(child): - pyobject = BuiltinClass(child, {}, parent=parent) - elif inspect.isroutine(child): - if inspect.ismethoddescriptor(child) and "__weakref__" in dir(obj): - try: - weak = child.__get__(obj.__weakref__.__objclass__()) - except: - weak = child - pyobject = BuiltinFunction(builtin=weak, parent=parent) - else: - pyobject = BuiltinFunction(builtin=child, parent=parent) - else: - pyobject = BuiltinUnknown(builtin=child) - attributes[name] = BuiltinName(pyobject) - return attributes - - -def _create_builtin_type_getter(cls): - def _get_builtin(*args): - if not hasattr(cls, '_generated'): - cls._generated = {} - if args not in cls._generated: - cls._generated[args] = cls(*args) - return cls._generated[args] - return _get_builtin - -def _create_builtin_getter(cls): - type_getter = _create_builtin_type_getter(cls) - def _get_builtin(*args): - return pyobjects.PyObject(type_getter(*args)) - return _get_builtin - - -class _CallContext(object): - - def __init__(self, argnames, args): - self.argnames = argnames - self.args = args - - def _get_scope_and_pyname(self, pyname): - if pyname is not None and isinstance(pyname, pynames.AssignedName): - pymodule, lineno = pyname.get_definition_location() - if pymodule is None: - return None, None - if lineno is None: - lineno = 1 - scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - name = None - while name is None and scope is not None: - for current in scope.get_names(): - if scope[current] is pyname: - name = current - break - else: - scope = scope.parent - return scope, name - return None, None - - def get_argument(self, name): - if self.args: - args = self.args.get_arguments(self.argnames) - return args[self.argnames.index(name)] - - def get_pyname(self, name): - if self.args: - args = self.args.get_pynames(self.argnames) - if name in self.argnames: - return args[self.argnames.index(name)] - - def get_arguments(self, argnames): - if self.args: - return self.args.get_arguments(argnames) - - def get_pynames(self, argnames): - if self.args: - return self.args.get_pynames(argnames) - - def get_per_name(self): - if self.args is None: - return None - pyname = self.args.get_instance_pyname() - scope, name = self._get_scope_and_pyname(pyname) - if name is not None: - pymodule = pyname.get_definition_location()[0] - return pymodule.pycore.object_info.get_per_name(scope, name) - return None - - def save_per_name(self, value): - if self.args is None: - return None - pyname = self.args.get_instance_pyname() - scope, name = self._get_scope_and_pyname(pyname) - if name is not None: - pymodule = pyname.get_definition_location()[0] - pymodule.pycore.object_info.save_per_name(scope, name, value) - - -class _AttributeCollector(object): - - def __init__(self, type): - self.attributes = {} - self.type = type - - def __call__(self, name, returned=None, function=None, - argnames=['self'], check_existence=True): - try: - builtin = getattr(self.type, name) - except AttributeError: - if check_existence: - raise - builtin=None - self.attributes[name] = BuiltinName( - BuiltinFunction(returned=returned, function=function, - argnames=argnames, builtin=builtin)) - - def __setitem__(self, name, value): - self.attributes[name] = value - - -class List(BuiltinClass): - - def __init__(self, holding=None): - self.holding = holding - collector = _AttributeCollector(list) - - collector('__iter__', function=self._iterator_get) - collector('__new__', function=self._new_list) - - # Adding methods - collector('append', function=self._list_add, argnames=['self', 'value']) - collector('__setitem__', function=self._list_add, - argnames=['self', 'index', 'value']) - collector('insert', function=self._list_add, - argnames=['self', 'index', 'value']) - collector('extend', function=self._self_set, - argnames=['self', 'iterable']) - - # Getting methods - collector('__getitem__', function=self._list_get) - collector('pop', function=self._list_get) - - super(List, self).__init__(list, collector.attributes) - - def _new_list(self, args): - return _create_builtin(args, get_list) - - def _list_add(self, context): - if self.holding is not None: - return - holding = context.get_argument('value') - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _self_set(self, context): - if self.holding is not None: - return - iterable = context.get_pyname('iterable') - holding = _infer_sequence_for_pyname(iterable) - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _list_get(self, context): - if self.holding is not None: - args = context.get_arguments(['self', 'key']) - if len(args) > 1 and args[1] is not None \ - and args[1].get_type() == builtins['slice'].get_object(): - return get_list(self.holding) - return self.holding - return context.get_per_name() - - def _iterator_get(self, context): - return get_iterator(self._list_get(context)) - - def _self_get(self, context): - return get_list(self._list_get(context)) - - -get_list = _create_builtin_getter(List) -get_list_type = _create_builtin_type_getter(List) - - -class Dict(BuiltinClass): - - def __init__(self, keys=None, values=None): - self.keys = keys - self.values = values - item = get_tuple(self.keys, self.values) - collector = _AttributeCollector(dict) - collector('__new__', function=self._new_dict) - collector('__setitem__', function=self._dict_add) - collector('popitem', function=self._item_get) - collector('pop', function=self._value_get) - collector('get', function=self._key_get) - collector('keys', function=self._key_list) - collector('values', function=self._value_list) - collector('items', function=self._item_list) - collector('copy', function=self._self_get) - collector('__getitem__', function=self._value_get) - collector('__iter__', function=self._key_iter) - collector('update', function=self._self_set) - super(Dict, self).__init__(dict, collector.attributes) - - def _new_dict(self, args): - def do_create(holding=None): - if holding is None: - return get_dict() - type = holding.get_type() - if isinstance(type, Tuple) and len(type.get_holding_objects()) == 2: - return get_dict(*type.get_holding_objects()) - return _create_builtin(args, do_create) - - def _dict_add(self, context): - if self.keys is not None: - return - key, value = context.get_arguments(['self', 'key', 'value'])[1:] - if key is not None and key != pyobjects.get_unknown(): - context.save_per_name(get_tuple(key, value)) - - def _item_get(self, context): - if self.keys is not None: - return get_tuple(self.keys, self.values) - item = context.get_per_name() - if item is None or not isinstance(item.get_type(), Tuple): - return get_tuple(self.keys, self.values) - return item - - def _value_get(self, context): - item = self._item_get(context).get_type() - return item.get_holding_objects()[1] - - def _key_get(self, context): - item = self._item_get(context).get_type() - return item.get_holding_objects()[0] - - def _value_list(self, context): - return get_list(self._value_get(context)) - - def _key_list(self, context): - return get_list(self._key_get(context)) - - def _item_list(self, context): - return get_list(self._item_get(context)) - - def _value_iter(self, context): - return get_iterator(self._value_get(context)) - - def _key_iter(self, context): - return get_iterator(self._key_get(context)) - - def _item_iter(self, context): - return get_iterator(self._item_get(context)) - - def _self_get(self, context): - item = self._item_get(context).get_type() - key, value = item.get_holding_objects()[:2] - return get_dict(key, value) - - def _self_set(self, context): - if self.keys is not None: - return - new_dict = context.get_pynames(['self', 'd'])[1] - if new_dict and isinstance(new_dict.get_object().get_type(), Dict): - args = arguments.ObjectArguments([new_dict]) - items = new_dict.get_object()['popitem'].\ - get_object().get_returned_object(args) - context.save_per_name(items) - else: - holding = _infer_sequence_for_pyname(new_dict) - if holding is not None and isinstance(holding.get_type(), Tuple): - context.save_per_name(holding) - - -get_dict = _create_builtin_getter(Dict) -get_dict_type = _create_builtin_type_getter(Dict) - - -class Tuple(BuiltinClass): - - def __init__(self, *objects): - self.objects = objects - first = None - if objects: - first = objects[0] - attributes = { - '__getitem__': BuiltinName(BuiltinFunction(first)), - '__getslice__': BuiltinName(BuiltinFunction(pyobjects.PyObject(self))), - '__new__': BuiltinName(BuiltinFunction(function=self._new_tuple)), - '__iter__': BuiltinName(BuiltinFunction(get_iterator(first)))} - super(Tuple, self).__init__(tuple, attributes) - - def get_holding_objects(self): - return self.objects - - def _new_tuple(self, args): - return _create_builtin(args, get_tuple) - - -get_tuple = _create_builtin_getter(Tuple) -get_tuple_type = _create_builtin_type_getter(Tuple) - - -class Set(BuiltinClass): - - def __init__(self, holding=None): - self.holding = holding - collector = _AttributeCollector(set) - collector('__new__', function=self._new_set) - - self_methods = ['copy', 'difference', 'intersection', - 'symmetric_difference', 'union'] - for method in self_methods: - collector(method, function=self._self_get) - collector('add', function=self._set_add) - collector('update', function=self._self_set) - collector('update', function=self._self_set) - collector('symmetric_difference_update', function=self._self_set) - collector('difference_update', function=self._self_set) - - collector('pop', function=self._set_get) - collector('__iter__', function=self._iterator_get) - super(Set, self).__init__(set, collector.attributes) - - def _new_set(self, args): - return _create_builtin(args, get_set) - - def _set_add(self, context): - if self.holding is not None: - return - holding = context.get_arguments(['self', 'value'])[1] - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _self_set(self, context): - if self.holding is not None: - return - iterable = context.get_pyname('iterable') - holding = _infer_sequence_for_pyname(iterable) - if holding is not None and holding != pyobjects.get_unknown(): - context.save_per_name(holding) - - def _set_get(self, context): - if self.holding is not None: - return self.holding - return context.get_per_name() - - def _iterator_get(self, context): - return get_iterator(self._set_get(context)) - - def _self_get(self, context): - return get_list(self._set_get(context)) - - -get_set = _create_builtin_getter(Set) -get_set_type = _create_builtin_type_getter(Set) - - -class Str(BuiltinClass): - - def __init__(self): - self_object = pyobjects.PyObject(self) - collector = _AttributeCollector(str) - collector('__iter__', get_iterator(self_object), check_existence=False) - - self_methods = ['__getitem__', 'capitalize', 'center', - 'encode', 'expandtabs', 'join', 'ljust', - 'lower', 'lstrip', 'replace', 'rjust', 'rstrip', 'strip', - 'swapcase', 'title', 'translate', 'upper', 'zfill'] - for method in self_methods: - collector(method, self_object) - - for method in ['rsplit', 'split', 'splitlines']: - collector(method, get_list(self_object)) - - super(Str, self).__init__(str, collector.attributes) - - def get_doc(self): - return str.__doc__ - - -get_str = _create_builtin_getter(Str) -get_str_type = _create_builtin_type_getter(Str) - - -class BuiltinName(pynames.PyName): - - def __init__(self, pyobject): - self.pyobject = pyobject - - def get_object(self): - return self.pyobject - - def get_definition_location(self): - return (None, None) - -class Iterator(pyobjects.AbstractClass): - - def __init__(self, holding=None): - super(Iterator, self).__init__() - self.holding = holding - self.attributes = { - 'next': BuiltinName(BuiltinFunction(self.holding)), - '__iter__': BuiltinName(BuiltinFunction(self))} - - def get_attributes(self): - return self.attributes - - def get_returned_object(self, args): - return self.holding - -get_iterator = _create_builtin_getter(Iterator) - - -class Generator(pyobjects.AbstractClass): - - def __init__(self, holding=None): - super(Generator, self).__init__() - self.holding = holding - self.attributes = { - 'next': BuiltinName(BuiltinFunction(self.holding)), - '__iter__': BuiltinName(BuiltinFunction(get_iterator(self.holding))), - 'close': BuiltinName(BuiltinFunction()), - 'send': BuiltinName(BuiltinFunction()), - 'throw': BuiltinName(BuiltinFunction())} - - def get_attributes(self): - return self.attributes - - def get_returned_object(self, args): - return self.holding - -get_generator = _create_builtin_getter(Generator) - - -class File(BuiltinClass): - - def __init__(self): - self_object = pyobjects.PyObject(self) - str_object = get_str() - str_list = get_list(get_str()) - attributes = {} - def add(name, returned=None, function=None): - builtin = getattr(open, name, None) - attributes[name] = BuiltinName( - BuiltinFunction(returned=returned, function=function, - builtin=builtin)) - add('__iter__', get_iterator(str_object)) - for method in ['next', 'read', 'readline', 'readlines']: - add(method, str_list) - for method in ['close', 'flush', 'lineno', 'isatty', 'seek', 'tell', - 'truncate', 'write', 'writelines']: - add(method) - super(File, self).__init__(open, attributes) - - -get_file = _create_builtin_getter(File) -get_file_type = _create_builtin_type_getter(File) - - -class Property(BuiltinClass): - - def __init__(self, fget=None, fset=None, fdel=None, fdoc=None): - self._fget = fget - self._fdoc = fdoc - attributes = { - 'fget': BuiltinName(BuiltinFunction()), - 'fset': BuiltinName(pynames.UnboundName()), - 'fdel': BuiltinName(pynames.UnboundName()), - '__new__': BuiltinName(BuiltinFunction(function=_property_function))} - super(Property, self).__init__(property, attributes) - - def get_property_object(self, args): - if isinstance(self._fget, pyobjects.AbstractFunction): - return self._fget.get_returned_object(args) - - -def _property_function(args): - parameters = args.get_arguments(['fget', 'fset', 'fdel', 'fdoc']) - return pyobjects.PyObject(Property(parameters[0])) - - -class Lambda(pyobjects.AbstractFunction): - - def __init__(self, node, scope): - super(Lambda, self).__init__() - self.node = node - self.arguments = node.args - self.scope = scope - - def get_returned_object(self, args): - result = rope.base.evaluate.eval_node(self.scope, self.node.body) - if result is not None: - return result.get_object() - else: - return pyobjects.get_unknown() - - def get_module(self): - return self.parent.get_module() - - def get_scope(self): - return self.scope - - def get_kind(self): - return 'lambda' - - def get_ast(self): - return self.node - - def get_attributes(self): - return {} - - def get_name(self): - return 'lambda' - - def get_param_names(self, special_args=True): - result = [node.arg for node in self.arguments.args - if isinstance(node, ast.arg)] - if self.arguments.vararg: - result.append('*' + self.arguments.vararg) - if self.arguments.kwarg: - result.append('**' + self.arguments.kwarg) - return result - - @property - def parent(self): - return self.scope.pyobject - - -class BuiltinObject(BuiltinClass): - - def __init__(self): - super(BuiltinObject, self).__init__(object, {}) - - -class BuiltinType(BuiltinClass): - - def __init__(self): - super(BuiltinType, self).__init__(type, {}) - - -def _infer_sequence_for_pyname(pyname): - if pyname is None: - return None - seq = pyname.get_object() - args = arguments.ObjectArguments([pyname]) - if '__iter__' in seq: - obj = seq['__iter__'].get_object() - if not isinstance(obj, pyobjects.AbstractFunction): - return None - iter = obj.get_returned_object(args) - if iter is not None and 'next' in iter: - holding = iter['next'].get_object().\ - get_returned_object(args) - return holding - - -def _create_builtin(args, creator): - passed = args.get_pynames(['sequence'])[0] - if passed is None: - holding = None - else: - holding = _infer_sequence_for_pyname(passed) - if holding is not None: - return creator(holding) - else: - return creator() - - -def _range_function(args): - return get_list() - -def _reversed_function(args): - return _create_builtin(args, get_iterator) - -def _sorted_function(args): - return _create_builtin(args, get_list) - -def _super_function(args): - passed_class, passed_self = args.get_arguments(['type', 'self']) - if passed_self is None: - return passed_class - else: - #pyclass = passed_self.get_type() - pyclass = passed_class - if isinstance(pyclass, pyobjects.AbstractClass): - supers = pyclass.get_superclasses() - if supers: - return pyobjects.PyObject(supers[0]) - return passed_self - -def _zip_function(args): - args = args.get_pynames(['sequence']) - objects = [] - for seq in args: - if seq is None: - holding = None - else: - holding = _infer_sequence_for_pyname(seq) - objects.append(holding) - tuple = get_tuple(*objects) - return get_list(tuple) - -def _enumerate_function(args): - passed = args.get_pynames(['sequence'])[0] - if passed is None: - holding = None - else: - holding = _infer_sequence_for_pyname(passed) - tuple = get_tuple(None, holding) - return get_iterator(tuple) - -def _iter_function(args): - passed = args.get_pynames(['sequence'])[0] - if passed is None: - holding = None - else: - holding = _infer_sequence_for_pyname(passed) - return get_iterator(holding) - -def _input_function(args): - return get_str() - - -_initial_builtins = { - 'list': BuiltinName(get_list_type()), - 'dict': BuiltinName(get_dict_type()), - 'tuple': BuiltinName(get_tuple_type()), - 'set': BuiltinName(get_set_type()), - 'str': BuiltinName(get_str_type()), - 'file': BuiltinName(get_file_type()), - 'open': BuiltinName(get_file_type()), - 'unicode': BuiltinName(get_str_type()), - 'range': BuiltinName(BuiltinFunction(function=_range_function, builtin=range)), - 'reversed': BuiltinName(BuiltinFunction(function=_reversed_function, builtin=reversed)), - 'sorted': BuiltinName(BuiltinFunction(function=_sorted_function, builtin=sorted)), - 'super': BuiltinName(BuiltinFunction(function=_super_function, builtin=super)), - 'property': BuiltinName(BuiltinFunction(function=_property_function, builtin=property)), - 'zip': BuiltinName(BuiltinFunction(function=_zip_function, builtin=zip)), - 'enumerate': BuiltinName(BuiltinFunction(function=_enumerate_function, builtin=enumerate)), - 'object': BuiltinName(BuiltinObject()), - 'type': BuiltinName(BuiltinType()), - 'iter': BuiltinName(BuiltinFunction(function=_iter_function, builtin=iter)), - 'input': BuiltinName(BuiltinFunction(function=_input_function, builtin=input)), - } - -builtins = BuiltinModule('builtins', initial=_initial_builtins) diff --git a/pymode/libs3/rope/base/change.py b/pymode/libs3/rope/base/change.py deleted file mode 100644 index 3b0d8a14..00000000 --- a/pymode/libs3/rope/base/change.py +++ /dev/null @@ -1,448 +0,0 @@ -import datetime -import difflib -import os -import time -import warnings - -import rope.base.fscommands -from rope.base import taskhandle, exceptions, utils - - -class Change(object): - """The base class for changes - - Rope refactorings return `Change` objects. They can be previewed, - committed or undone. - """ - - def do(self, job_set=None): - """Perform the change - - .. note:: Do use this directly. Use `Project.do()` instead. - """ - - def undo(self, job_set=None): - """Perform the change - - .. note:: Do use this directly. Use `History.undo()` instead. - """ - - def get_description(self): - """Return the description of this change - - This can be used for previewing the changes. - """ - return str(self) - - def get_changed_resources(self): - """Return the list of resources that will be changed""" - return [] - - @property - @utils.saveit - def _operations(self): - return _ResourceOperations(self.resource.project) - - -class ChangeSet(Change): - """A collection of `Change` objects - - This class holds a collection of changes. This class provides - these fields: - - * `changes`: the list of changes - * `description`: the goal of these changes - """ - - def __init__(self, description, timestamp=None): - self.changes = [] - self.description = description - self.time = timestamp - - def do(self, job_set=taskhandle.NullJobSet()): - try: - done = [] - for change in self.changes: - change.do(job_set) - done.append(change) - self.time = time.time() - except Exception: - for change in done: - change.undo() - raise - - def undo(self, job_set=taskhandle.NullJobSet()): - try: - done = [] - for change in reversed(self.changes): - change.undo(job_set) - done.append(change) - except Exception: - for change in done: - change.do() - raise - - def add_change(self, change): - self.changes.append(change) - - def get_description(self): - result = [str(self) + ':\n\n\n'] - for change in self.changes: - result.append(change.get_description()) - result.append('\n') - return ''.join(result) - - def __str__(self): - if self.time is not None: - date = datetime.datetime.fromtimestamp(self.time) - if date.date() == datetime.date.today(): - string_date = 'today' - elif date.date() == (datetime.date.today() - datetime.timedelta(1)): - string_date = 'yesterday' - elif date.year == datetime.date.today().year: - string_date = date.strftime('%b %d') - else: - string_date = date.strftime('%d %b, %Y') - string_time = date.strftime('%H:%M:%S') - string_time = '%s %s ' % (string_date, string_time) - return self.description + ' - ' + string_time - return self.description - - def get_changed_resources(self): - result = set() - for change in self.changes: - result.update(change.get_changed_resources()) - return result - - -def _handle_job_set(function): - """A decorator for handling `taskhandle.JobSet`\s - - A decorator for handling `taskhandle.JobSet`\s for `do` and `undo` - methods of `Change`\s. - """ - def call(self, job_set=taskhandle.NullJobSet()): - job_set.started_job(str(self)) - function(self) - job_set.finished_job() - return call - - -class ChangeContents(Change): - """A class to change the contents of a file - - Fields: - - * `resource`: The `rope.base.resources.File` to change - * `new_contents`: What to write in the file - """ - - def __init__(self, resource, new_contents, old_contents=None): - self.resource = resource - # IDEA: Only saving diffs; possible problems when undo/redoing - self.new_contents = new_contents - self.old_contents = old_contents - - @_handle_job_set - def do(self): - if self.old_contents is None: - self.old_contents = self.resource.read() - self._operations.write_file(self.resource, self.new_contents) - - @_handle_job_set - def undo(self): - if self.old_contents is None: - raise exceptions.HistoryError( - 'Undoing a change that is not performed yet!') - self._operations.write_file(self.resource, self.old_contents) - - def __str__(self): - return 'Change <%s>' % self.resource.path - - def get_description(self): - new = self.new_contents - old = self.old_contents - if old is None: - if self.resource.exists(): - old = self.resource.read() - else: - old = '' - result = difflib.unified_diff( - old.splitlines(True), new.splitlines(True), - 'a/' + self.resource.path, 'b/' + self.resource.path) - return ''.join(list(result)) - - def get_changed_resources(self): - return [self.resource] - - -class MoveResource(Change): - """Move a resource to a new location - - Fields: - - * `resource`: The `rope.base.resources.Resource` to move - * `new_resource`: The destination for move; It is the moved - resource not the folder containing that resource. - """ - - def __init__(self, resource, new_location, exact=False): - self.project = resource.project - self.resource = resource - if not exact: - new_location = _get_destination_for_move(resource, new_location) - if resource.is_folder(): - self.new_resource = self.project.get_folder(new_location) - else: - self.new_resource = self.project.get_file(new_location) - - @_handle_job_set - def do(self): - self._operations.move(self.resource, self.new_resource) - - @_handle_job_set - def undo(self): - self._operations.move(self.new_resource, self.resource) - - def __str__(self): - return 'Move <%s>' % self.resource.path - - def get_description(self): - return 'rename from %s\nrename to %s' % (self.resource.path, - self.new_resource.path) - - def get_changed_resources(self): - return [self.resource, self.new_resource] - - -class CreateResource(Change): - """A class to create a resource - - Fields: - - * `resource`: The resource to create - """ - - def __init__(self, resource): - self.resource = resource - - @_handle_job_set - def do(self): - self._operations.create(self.resource) - - @_handle_job_set - def undo(self): - self._operations.remove(self.resource) - - def __str__(self): - return 'Create Resource <%s>' % (self.resource.path) - - def get_description(self): - return 'new file %s' % (self.resource.path) - - def get_changed_resources(self): - return [self.resource] - - def _get_child_path(self, parent, name): - if parent.path == '': - return name - else: - return parent.path + '/' + name - - -class CreateFolder(CreateResource): - """A class to create a folder - - See docs for `CreateResource`. - """ - - def __init__(self, parent, name): - resource = parent.project.get_folder(self._get_child_path(parent, name)) - super(CreateFolder, self).__init__(resource) - - -class CreateFile(CreateResource): - """A class to create a file - - See docs for `CreateResource`. - """ - - def __init__(self, parent, name): - resource = parent.project.get_file(self._get_child_path(parent, name)) - super(CreateFile, self).__init__(resource) - - -class RemoveResource(Change): - """A class to remove a resource - - Fields: - - * `resource`: The resource to be removed - """ - - def __init__(self, resource): - self.resource = resource - - @_handle_job_set - def do(self): - self._operations.remove(self.resource) - - # TODO: Undoing remove operations - @_handle_job_set - def undo(self): - raise NotImplementedError( - 'Undoing `RemoveResource` is not implemented yet.') - - def __str__(self): - return 'Remove <%s>' % (self.resource.path) - - def get_changed_resources(self): - return [self.resource] - - -def count_changes(change): - """Counts the number of basic changes a `Change` will make""" - if isinstance(change, ChangeSet): - result = 0 - for child in change.changes: - result += count_changes(child) - return result - return 1 - -def create_job_set(task_handle, change): - return task_handle.create_jobset(str(change), count_changes(change)) - - -class _ResourceOperations(object): - - def __init__(self, project): - self.project = project - self.fscommands = project.fscommands - self.direct_commands = rope.base.fscommands.FileSystemCommands() - - def _get_fscommands(self, resource): - if self.project.is_ignored(resource): - return self.direct_commands - return self.fscommands - - def write_file(self, resource, contents): - data = rope.base.fscommands.unicode_to_file_data(contents) - fscommands = self._get_fscommands(resource) - fscommands.write(resource.real_path, data) - for observer in list(self.project.observers): - observer.resource_changed(resource) - - def move(self, resource, new_resource): - fscommands = self._get_fscommands(resource) - fscommands.move(resource.real_path, new_resource.real_path) - for observer in list(self.project.observers): - observer.resource_moved(resource, new_resource) - - def create(self, resource): - if resource.is_folder(): - self._create_resource(resource.path, kind='folder') - else: - self._create_resource(resource.path) - for observer in list(self.project.observers): - observer.resource_created(resource) - - def remove(self, resource): - fscommands = self._get_fscommands(resource) - fscommands.remove(resource.real_path) - for observer in list(self.project.observers): - observer.resource_removed(resource) - - def _create_resource(self, file_name, kind='file'): - resource_path = self.project._get_resource_path(file_name) - if os.path.exists(resource_path): - raise exceptions.RopeError('Resource <%s> already exists' - % resource_path) - resource = self.project.get_file(file_name) - if not resource.parent.exists(): - raise exceptions.ResourceNotFoundError( - 'Parent folder of <%s> does not exist' % resource.path) - fscommands = self._get_fscommands(resource) - try: - if kind == 'file': - fscommands.create_file(resource_path) - else: - fscommands.create_folder(resource_path) - except IOError as e: - raise exceptions.RopeError(e) - - -def _get_destination_for_move(resource, destination): - dest_path = resource.project._get_resource_path(destination) - if os.path.isdir(dest_path): - if destination != '': - return destination + '/' + resource.name - else: - return resource.name - return destination - - -class ChangeToData(object): - - def convertChangeSet(self, change): - description = change.description - changes = [] - for child in change.changes: - changes.append(self(child)) - return (description, changes, change.time) - - def convertChangeContents(self, change): - return (change.resource.path, change.new_contents, change.old_contents) - - def convertMoveResource(self, change): - return (change.resource.path, change.new_resource.path) - - def convertCreateResource(self, change): - return (change.resource.path, change.resource.is_folder()) - - def convertRemoveResource(self, change): - return (change.resource.path, change.resource.is_folder()) - - def __call__(self, change): - change_type = type(change) - if change_type in (CreateFolder, CreateFile): - change_type = CreateResource - method = getattr(self, 'convert' + change_type.__name__) - return (change_type.__name__, method(change)) - - -class DataToChange(object): - - def __init__(self, project): - self.project = project - - def makeChangeSet(self, description, changes, time=None): - result = ChangeSet(description, time) - for child in changes: - result.add_change(self(child)) - return result - - def makeChangeContents(self, path, new_contents, old_contents): - resource = self.project.get_file(path) - return ChangeContents(resource, new_contents, old_contents) - - def makeMoveResource(self, old_path, new_path): - resource = self.project.get_file(old_path) - return MoveResource(resource, new_path, exact=True) - - def makeCreateResource(self, path, is_folder): - if is_folder: - resource = self.project.get_folder(path) - else: - resource = self.project.get_file(path) - return CreateResource(resource) - - def makeRemoveResource(self, path, is_folder): - if is_folder: - resource = self.project.get_folder(path) - else: - resource = self.project.get_file(path) - return RemoveResource(resource) - - def __call__(self, data): - method = getattr(self, 'make' + data[0]) - return method(*data[1]) diff --git a/pymode/libs3/rope/base/codeanalyze.py b/pymode/libs3/rope/base/codeanalyze.py deleted file mode 100644 index 843f477d..00000000 --- a/pymode/libs3/rope/base/codeanalyze.py +++ /dev/null @@ -1,358 +0,0 @@ -import bisect -import re -import token -import tokenize - - -class ChangeCollector(object): - - def __init__(self, text): - self.text = text - self.changes = [] - - def add_change(self, start, end, new_text=None): - if new_text is None: - new_text = self.text[start:end] - self.changes.append((start, end, new_text)) - - def get_changed(self): - if not self.changes: - return None - def compare_changes(change1, change2): - return cmp(change1[:2], change2[:2]) - self.changes.sort(key=lambda change: change[:2]) - pieces = [] - last_changed = 0 - for change in self.changes: - start, end, text = change - pieces.append(self.text[last_changed:start] + text) - last_changed = end - if last_changed < len(self.text): - pieces.append(self.text[last_changed:]) - result = ''.join(pieces) - if result != self.text: - return result - - -class SourceLinesAdapter(object): - """Adapts source to Lines interface - - Note: The creation of this class is expensive. - """ - - def __init__(self, source_code): - self.code = source_code - self.starts = None - self._initialize_line_starts() - - def _initialize_line_starts(self): - self.starts = [] - self.starts.append(0) - try: - i = 0 - while True: - i = self.code.index('\n', i) + 1 - self.starts.append(i) - except ValueError: - pass - self.starts.append(len(self.code) + 1) - - def get_line(self, lineno): - return self.code[self.starts[lineno - 1]: - self.starts[lineno] - 1] - - def length(self): - return len(self.starts) - 1 - - def get_line_number(self, offset): - return bisect.bisect(self.starts, offset) - - def get_line_start(self, lineno): - return self.starts[lineno - 1] - - def get_line_end(self, lineno): - return self.starts[lineno] - 1 - - -class ArrayLinesAdapter(object): - - def __init__(self, lines): - self.lines = lines - - def get_line(self, line_number): - return self.lines[line_number - 1] - - def length(self): - return len(self.lines) - - -class LinesToReadline(object): - - def __init__(self, lines, start): - self.lines = lines - self.current = start - - def readline(self): - if self.current <= self.lines.length(): - self.current += 1 - return self.lines.get_line(self.current - 1) + '\n' - return '' - - def __call__(self): - return self.readline() - - -class _CustomGenerator(object): - - def __init__(self, lines): - self.lines = lines - self.in_string = '' - self.open_count = 0 - self.continuation = False - - def __call__(self): - size = self.lines.length() - result = [] - i = 1 - while i <= size: - while i <= size and not self.lines.get_line(i).strip(): - i += 1 - if i <= size: - start = i - while True: - line = self.lines.get_line(i) - self._analyze_line(line) - if not (self.continuation or self.open_count or - self.in_string) or i == size: - break - i += 1 - result.append((start, i)) - i += 1 - return result - - _main_chars = re.compile(r'[\'|"|#|\\|\[|\]|\{|\}|\(|\)]') - def _analyze_line(self, line): - char = None - for match in self._main_chars.finditer(line): - char = match.group() - i = match.start() - if char in '\'"': - if not self.in_string: - self.in_string = char - if char * 3 == line[i:i + 3]: - self.in_string = char * 3 - elif self.in_string == line[i:i + len(self.in_string)] and \ - not (i > 0 and line[i - 1] == '\\' and - not (i > 1 and line[i - 2] == '\\')): - self.in_string = '' - if self.in_string: - continue - if char == '#': - break - if char in '([{': - self.open_count += 1 - elif char in ')]}': - self.open_count -= 1 - if line and char != '#' and line.endswith('\\'): - self.continuation = True - else: - self.continuation = False - -def custom_generator(lines): - return _CustomGenerator(lines)() - - -class LogicalLineFinder(object): - - def __init__(self, lines): - self.lines = lines - - def logical_line_in(self, line_number): - indents = count_line_indents(self.lines.get_line(line_number)) - tries = 0 - while True: - block_start = get_block_start(self.lines, line_number, indents) - try: - return self._block_logical_line(block_start, line_number) - except IndentationError as e: - tries += 1 - if tries == 5: - raise e - lineno = e.lineno + block_start - 1 - indents = count_line_indents(self.lines.get_line(lineno)) - - def generate_starts(self, start_line=1, end_line=None): - for start, end in self.generate_regions(start_line, end_line): - yield start - - def generate_regions(self, start_line=1, end_line=None): - # XXX: `block_start` should be at a better position! - block_start = 1 - readline = LinesToReadline(self.lines, block_start) - shifted = start_line - block_start + 1 - try: - for start, end in self._logical_lines(readline): - real_start = start + block_start - 1 - real_start = self._first_non_blank(real_start) - if end_line is not None and real_start >= end_line: - break - real_end = end + block_start - 1 - if real_start >= start_line: - yield (real_start, real_end) - except tokenize.TokenError as e: - pass - - def _block_logical_line(self, block_start, line_number): - readline = LinesToReadline(self.lines, block_start) - shifted = line_number - block_start + 1 - region = self._calculate_logical(readline, shifted) - start = self._first_non_blank(region[0] + block_start - 1) - if region[1] is None: - end = self.lines.length() - else: - end = region[1] + block_start - 1 - return start, end - - def _calculate_logical(self, readline, line_number): - last_end = 1 - try: - for start, end in self._logical_lines(readline): - if line_number <= end: - return (start, end) - last_end = end + 1 - except tokenize.TokenError as e: - current = e.args[1][0] - return (last_end, max(last_end, current - 1)) - return (last_end, None) - - def _logical_lines(self, readline): - last_end = 1 - for current_token in tokenize.generate_tokens(readline): - current = current_token[2][0] - if current_token[0] == token.NEWLINE: - yield (last_end, current) - last_end = current + 1 - - def _first_non_blank(self, line_number): - current = line_number - while current < self.lines.length(): - line = self.lines.get_line(current).strip() - if line and not line.startswith('#'): - return current - current += 1 - return current - - -def tokenizer_generator(lines): - return LogicalLineFinder(lines).generate_regions() - - -class CachingLogicalLineFinder(object): - - def __init__(self, lines, generate=custom_generator): - self.lines = lines - self._generate = generate - - _starts = None - @property - def starts(self): - if self._starts is None: - self._init_logicals() - return self._starts - - _ends = None - @property - def ends(self): - if self._ends is None: - self._init_logicals() - return self._ends - - def _init_logicals(self): - """Should initialize _starts and _ends attributes""" - size = self.lines.length() + 1 - self._starts = [None] * size - self._ends = [None] * size - for start, end in self._generate(self.lines): - self._starts[start] = True - self._ends[end] = True - - def logical_line_in(self, line_number): - start = line_number - while start > 0 and not self.starts[start]: - start -= 1 - if start == 0: - try: - start = self.starts.index(True, line_number) - except ValueError: - return (line_number, line_number) - return (start, self.ends.index(True, start)) - - def generate_starts(self, start_line=1, end_line=None): - if end_line is None: - end_line = self.lines.length() - for index in range(start_line, end_line): - if self.starts[index]: - yield index - - -def get_block_start(lines, lineno, maximum_indents=80): - """Approximate block start""" - pattern = get_block_start_patterns() - for i in range(lineno, 0, -1): - match = pattern.search(lines.get_line(i)) - if match is not None and \ - count_line_indents(lines.get_line(i)) <= maximum_indents: - striped = match.string.lstrip() - # Maybe we're in a list comprehension or generator expression - if i > 1 and striped.startswith('if') or striped.startswith('for'): - bracs = 0 - for j in range(i, min(i + 5, lines.length() + 1)): - for c in lines.get_line(j): - if c == '#': - break - if c in '[(': - bracs += 1 - if c in ')]': - bracs -= 1 - if bracs < 0: - break - if bracs < 0: - break - if bracs < 0: - continue - return i - return 1 - - -_block_start_pattern = None - -def get_block_start_patterns(): - global _block_start_pattern - if not _block_start_pattern: - pattern = '^\\s*(((def|class|if|elif|except|for|while|with)\\s)|'\ - '((try|else|finally|except)\\s*:))' - _block_start_pattern = re.compile(pattern, re.M) - return _block_start_pattern - - -def count_line_indents(line): - indents = 0 - for char in line: - if char == ' ': - indents += 1 - elif char == '\t': - indents += 8 - else: - return indents - return 0 - - -def get_string_pattern(): - start = r'(\b[uU]?[rR]?)?' - longstr = r'%s"""(\\.|"(?!"")|\\\n|[^"\\])*"""' % start - shortstr = r'%s"(\\.|[^"\\\n])*"' % start - return '|'.join([longstr, longstr.replace('"', "'"), - shortstr, shortstr.replace('"', "'")]) - -def get_comment_pattern(): - return r'#[^\n]*' diff --git a/pymode/libs3/rope/base/default_config.py b/pymode/libs3/rope/base/default_config.py deleted file mode 100644 index 126cf7bf..00000000 --- a/pymode/libs3/rope/base/default_config.py +++ /dev/null @@ -1,86 +0,0 @@ -# The default ``config.py`` - - -def set_prefs(prefs): - """This function is called before opening the project""" - - # Specify which files and folders to ignore in the project. - # Changes to ignored resources are not added to the history and - # VCSs. Also they are not returned in `Project.get_files()`. - # Note that ``?`` and ``*`` match all characters but slashes. - # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc' - # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc' - # '.svn': matches 'pkg/.svn' and all of its children - # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' - # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' - prefs['ignored_resources'] = [ - '*.pyc', '*~', '.ropeproject', '.hg', '.svn', '_svn', '.git', - '__pycache__', '.tox', '.env', 'node_modules', 'bower_components'] - - # Specifies which files should be considered python files. It is - # useful when you have scripts inside your project. Only files - # ending with ``.py`` are considered to be python files by - # default. - #prefs['python_files'] = ['*.py'] - - # Custom source folders: By default rope searches the project - # for finding source folders (folders that should be searched - # for finding modules). You can add paths to that list. Note - # that rope guesses project source folders correctly most of the - # time; use this if you have any problems. - # The folders should be relative to project root and use '/' for - # separating folders regardless of the platform rope is running on. - # 'src/my_source_folder' for instance. - #prefs.add('source_folders', 'src') - - # You can extend python path for looking up modules - #prefs.add('python_path', '~/python/') - - # Should rope save object information or not. - prefs['save_objectdb'] = True - prefs['compress_objectdb'] = False - - # If `True`, rope analyzes each module when it is being saved. - prefs['automatic_soa'] = True - # The depth of calls to follow in static object analysis - prefs['soa_followed_calls'] = 0 - - # If `False` when running modules or unit tests "dynamic object - # analysis" is turned off. This makes them much faster. - prefs['perform_doa'] = True - - # Rope can check the validity of its object DB when running. - prefs['validate_objectdb'] = True - - # How many undos to hold? - prefs['max_history_items'] = 32 - - # Shows whether to save history across sessions. - prefs['save_history'] = True - prefs['compress_history'] = False - - # Set the number spaces used for indenting. According to - # :PEP:`8`, it is best to use 4 spaces. Since most of rope's - # unit-tests use 4 spaces it is more reliable, too. - prefs['indent_size'] = 4 - - # Builtin and c-extension modules that are allowed to be imported - # and inspected by rope. - prefs['extension_modules'] = [] - - # Add all standard c-extensions to extension_modules list. - prefs['import_dynload_stdmods'] = True - - # If `True` modules with syntax errors are considered to be empty. - # The default value is `False`; When `False` syntax errors raise - # `rope.base.exceptions.ModuleSyntaxError` exception. - prefs['ignore_syntax_errors'] = False - - # If `True`, rope ignores unresolvable imports. Otherwise, they - # appear in the importing namespace. - prefs['ignore_bad_imports'] = False - - -def project_opened(project): - """This function is called after opening the project""" - # Do whatever you like here! diff --git a/pymode/libs3/rope/base/evaluate.py b/pymode/libs3/rope/base/evaluate.py deleted file mode 100644 index 659646c0..00000000 --- a/pymode/libs3/rope/base/evaluate.py +++ /dev/null @@ -1,325 +0,0 @@ -import rope.base.builtins -import rope.base.pynames -import rope.base.pyobjects -from rope.base import ast, astutils, exceptions, pyobjects, arguments, worder - - -BadIdentifierError = exceptions.BadIdentifierError - -def eval_location(pymodule, offset): - """Find the pyname at the offset""" - return eval_location2(pymodule, offset)[1] - - -def eval_location2(pymodule, offset): - """Find the primary and pyname at offset""" - pyname_finder = ScopeNameFinder(pymodule) - return pyname_finder.get_primary_and_pyname_at(offset) - - -def eval_node(scope, node): - """Evaluate a `ast.AST` node and return a PyName - - Return `None` if the expression cannot be evaluated. - """ - return eval_node2(scope, node)[1] - - -def eval_node2(scope, node): - evaluator = StatementEvaluator(scope) - ast.walk(node, evaluator) - return evaluator.old_result, evaluator.result - - -def eval_str(holding_scope, name): - return eval_str2(holding_scope, name)[1] - - -def eval_str2(holding_scope, name): - try: - # parenthesizing for handling cases like 'a_var.\nattr' - node = ast.parse('(%s)' % name) - except SyntaxError: - raise BadIdentifierError('Not a resolvable python identifier selected.') - return eval_node2(holding_scope, node) - - -class ScopeNameFinder(object): - - def __init__(self, pymodule): - self.module_scope = pymodule.get_scope() - self.lines = pymodule.lines - self.worder = worder.Worder(pymodule.source_code, True) - - def _is_defined_in_class_body(self, holding_scope, offset, lineno): - if lineno == holding_scope.get_start() and \ - holding_scope.parent is not None and \ - holding_scope.parent.get_kind() == 'Class' and \ - self.worder.is_a_class_or_function_name_in_header(offset): - return True - if lineno != holding_scope.get_start() and \ - holding_scope.get_kind() == 'Class' and \ - self.worder.is_name_assigned_in_class_body(offset): - return True - return False - - def _is_function_name_in_function_header(self, scope, offset, lineno): - if scope.get_start() <= lineno <= scope.get_body_start() and \ - scope.get_kind() == 'Function' and \ - self.worder.is_a_class_or_function_name_in_header(offset): - return True - return False - - def get_pyname_at(self, offset): - return self.get_primary_and_pyname_at(offset)[1] - - def get_primary_and_pyname_at(self, offset): - lineno = self.lines.get_line_number(offset) - holding_scope = self.module_scope.get_inner_scope_for_line(lineno) - # function keyword parameter - if self.worder.is_function_keyword_parameter(offset): - keyword_name = self.worder.get_word_at(offset) - pyobject = self.get_enclosing_function(offset) - if isinstance(pyobject, pyobjects.PyFunction): - return (None, pyobject.get_parameters().get(keyword_name, None)) - # class body - if self._is_defined_in_class_body(holding_scope, offset, lineno): - class_scope = holding_scope - if lineno == holding_scope.get_start(): - class_scope = holding_scope.parent - name = self.worder.get_primary_at(offset).strip() - try: - return (None, class_scope.pyobject[name]) - except rope.base.exceptions.AttributeNotFoundError: - return (None, None) - # function header - if self._is_function_name_in_function_header(holding_scope, offset, lineno): - name = self.worder.get_primary_at(offset).strip() - return (None, holding_scope.parent[name]) - # from statement module - if self.worder.is_from_statement_module(offset): - module = self.worder.get_primary_at(offset) - module_pyname = self._find_module(module) - return (None, module_pyname) - if self.worder.is_from_aliased(offset): - name = self.worder.get_from_aliased(offset) - else: - name = self.worder.get_primary_at(offset) - return eval_str2(holding_scope, name) - - def get_enclosing_function(self, offset): - function_parens = self.worder.find_parens_start_from_inside(offset) - try: - function_pyname = self.get_pyname_at(function_parens - 1) - except BadIdentifierError: - function_pyname = None - if function_pyname is not None: - pyobject = function_pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - return pyobject - elif isinstance(pyobject, pyobjects.AbstractClass) and \ - '__init__' in pyobject: - return pyobject['__init__'].get_object() - elif '__call__' in pyobject: - return pyobject['__call__'].get_object() - return None - - def _find_module(self, module_name): - dots = 0 - while module_name[dots] == '.': - dots += 1 - return rope.base.pynames.ImportedModule( - self.module_scope.pyobject, module_name[dots:], dots) - - -class StatementEvaluator(object): - - def __init__(self, scope): - self.scope = scope - self.result = None - self.old_result = None - - def _Name(self, node): - self.result = self.scope.lookup(node.id) - - def _Attribute(self, node): - pyname = eval_node(self.scope, node.value) - if pyname is None: - pyname = rope.base.pynames.UnboundName() - self.old_result = pyname - if pyname.get_object() != rope.base.pyobjects.get_unknown(): - try: - self.result = pyname.get_object()[node.attr] - except exceptions.AttributeNotFoundError: - self.result = None - - def _Call(self, node): - primary, pyobject = self._get_primary_and_object_for_node(node.func) - if pyobject is None: - return - def _get_returned(pyobject): - args = arguments.create_arguments(primary, pyobject, - node, self.scope) - return pyobject.get_returned_object(args) - if isinstance(pyobject, rope.base.pyobjects.AbstractClass): - result = None - if '__new__' in pyobject: - new_function = pyobject['__new__'].get_object() - result = _get_returned(new_function) - if result is None or \ - result == rope.base.pyobjects.get_unknown(): - result = rope.base.pyobjects.PyObject(pyobject) - self.result = rope.base.pynames.UnboundName(pyobject=result) - return - - pyfunction = None - if isinstance(pyobject, rope.base.pyobjects.AbstractFunction): - pyfunction = pyobject - elif '__call__' in pyobject: - pyfunction = pyobject['__call__'].get_object() - if pyfunction is not None: - self.result = rope.base.pynames.UnboundName( - pyobject=_get_returned(pyfunction)) - - def _Str(self, node): - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_str()) - - def _Num(self, node): - type_name = type(node.n).__name__ - self.result = self._get_builtin_name(type_name) - - def _get_builtin_name(self, type_name): - pytype = rope.base.builtins.builtins[type_name].get_object() - return rope.base.pynames.UnboundName( - rope.base.pyobjects.PyObject(pytype)) - - def _BinOp(self, node): - self.result = rope.base.pynames.UnboundName( - self._get_object_for_node(node.left)) - - def _BoolOp(self, node): - pyobject = self._get_object_for_node(node.values[0]) - if pyobject is None: - pyobject = self._get_object_for_node(node.values[1]) - self.result = rope.base.pynames.UnboundName(pyobject) - - def _Repr(self, node): - self.result = self._get_builtin_name('str') - - def _UnaryOp(self, node): - self.result = rope.base.pynames.UnboundName( - self._get_object_for_node(node.operand)) - - def _Compare(self, node): - self.result = self._get_builtin_name('bool') - - def _Dict(self, node): - keys = None - values = None - if node.keys: - keys = self._get_object_for_node(node.keys[0]) - values = self._get_object_for_node(node.values[0]) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_dict(keys, values)) - - def _List(self, node): - holding = None - if node.elts: - holding = self._get_object_for_node(node.elts[0]) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_list(holding)) - - def _ListComp(self, node): - pyobject = self._what_does_comprehension_hold(node) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_list(pyobject)) - - def _GeneratorExp(self, node): - pyobject = self._what_does_comprehension_hold(node) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_iterator(pyobject)) - - def _what_does_comprehension_hold(self, node): - scope = self._make_comprehension_scope(node) - pyname = eval_node(scope, node.elt) - return pyname.get_object() if pyname is not None else None - - def _make_comprehension_scope(self, node): - scope = self.scope - module = scope.pyobject.get_module() - names = {} - for comp in node.generators: - new_names = _get_evaluated_names(comp.target, comp.iter, module, - '.__iter__().next()', node.lineno) - names.update(new_names) - return rope.base.pyscopes.TemporaryScope(scope.pycore, scope, names) - - def _Tuple(self, node): - objects = [] - if len(node.elts) < 4: - for stmt in node.elts: - pyobject = self._get_object_for_node(stmt) - objects.append(pyobject) - else: - objects.append(self._get_object_for_node(node.elts[0])) - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.get_tuple(*objects)) - - def _get_object_for_node(self, stmt): - pyname = eval_node(self.scope, stmt) - pyobject = None - if pyname is not None: - pyobject = pyname.get_object() - return pyobject - - def _get_primary_and_object_for_node(self, stmt): - primary, pyname = eval_node2(self.scope, stmt) - pyobject = None - if pyname is not None: - pyobject = pyname.get_object() - return primary, pyobject - - def _Subscript(self, node): - if isinstance(node.slice, ast.Index): - self._call_function(node.value, '__getitem__', - [node.slice.value]) - elif isinstance(node.slice, ast.Slice): - self._call_function(node.value, '__getitem__', - [node.slice]) - - def _Slice(self, node): - self.result = self._get_builtin_name('slice') - - def _call_function(self, node, function_name, other_args=None): - pyname = eval_node(self.scope, node) - if pyname is not None: - pyobject = pyname.get_object() - else: - return - if function_name in pyobject: - called = pyobject[function_name].get_object() - if not called or not isinstance(called, pyobjects.AbstractFunction): - return - args = [node] - if other_args: - args += other_args - arguments_ = arguments.Arguments(args, self.scope) - self.result = rope.base.pynames.UnboundName( - pyobject=called.get_returned_object(arguments_)) - - def _Lambda(self, node): - self.result = rope.base.pynames.UnboundName( - pyobject=rope.base.builtins.Lambda(node, self.scope)) - - -def _get_evaluated_names(targets, assigned, module, evaluation, lineno): - result = {} - for name, levels in astutils.get_name_levels(targets): - assignment = rope.base.pynames.AssignmentValue(assigned, levels, - evaluation) - # XXX: this module should not access `rope.base.pynamesdef`! - pyname = rope.base.pynamesdef.AssignedName(lineno, module) - pyname.assignments.append(assignment) - result[name] = pyname - return result diff --git a/pymode/libs3/rope/base/exceptions.py b/pymode/libs3/rope/base/exceptions.py deleted file mode 100644 index d161c89e..00000000 --- a/pymode/libs3/rope/base/exceptions.py +++ /dev/null @@ -1,61 +0,0 @@ -class RopeError(Exception): - """Base exception for rope""" - - -class ResourceNotFoundError(RopeError): - """Resource not found exception""" - - -class RefactoringError(RopeError): - """Errors for performing a refactoring""" - - -class InterruptedTaskError(RopeError): - """The task has been interrupted""" - - -class HistoryError(RopeError): - """Errors for history undo/redo operations""" - - -class ModuleNotFoundError(RopeError): - """Module not found exception""" - - -class AttributeNotFoundError(RopeError): - """Attribute not found exception""" - - -class NameNotFoundError(RopeError): - """Name not found exception""" - - -class BadIdentifierError(RopeError): - """The name cannot be resolved""" - - -class ModuleSyntaxError(RopeError): - """Module has syntax errors - - The `filename` and `lineno` fields indicate where the error has - occurred. - - """ - - def __init__(self, filename, lineno, message): - self.filename = filename - self.lineno = lineno - self.message_ = message - super(ModuleSyntaxError, self).__init__( - 'Syntax error in file <%s> line <%s>: %s' % - (filename, lineno, message)) - - -class ModuleDecodeError(RopeError): - """Cannot decode module""" - - def __init__(self, filename, message): - self.filename = filename - self.message_ = message - super(ModuleDecodeError, self).__init__( - 'Cannot decode file <%s>: %s' % (filename, message)) diff --git a/pymode/libs3/rope/base/fscommands.py b/pymode/libs3/rope/base/fscommands.py deleted file mode 100644 index ea1a061a..00000000 --- a/pymode/libs3/rope/base/fscommands.py +++ /dev/null @@ -1,267 +0,0 @@ -"""Project file system commands. - -This modules implements file system operations used by rope. Different -version control systems can be supported by implementing the interface -provided by `FileSystemCommands` class. See `SubversionCommands` and -`MercurialCommands` for example. - -""" -import os, re -import shutil -import subprocess - - -def create_fscommands(root): - dirlist = os.listdir(root) - commands = {'.hg': MercurialCommands, - '.svn': SubversionCommands, - '.git': GITCommands, - '_svn': SubversionCommands, - '_darcs': DarcsCommands} - for key in commands: - if key in dirlist: - try: - return commands[key](root) - except (ImportError, OSError): - pass - return FileSystemCommands() - - -class FileSystemCommands(object): - - def create_file(self, path): - open(path, 'w').close() - - def create_folder(self, path): - os.mkdir(path) - - def move(self, path, new_location): - shutil.move(path, new_location) - - def remove(self, path): - if os.path.isfile(path): - os.remove(path) - else: - shutil.rmtree(path) - - def write(self, path, data): - file_ = open(path, 'wb') - try: - file_.write(data) - finally: - file_.close() - - -class SubversionCommands(object): - - def __init__(self, *args): - self.normal_actions = FileSystemCommands() - import pysvn - self.client = pysvn.Client() - - def create_file(self, path): - self.normal_actions.create_file(path) - self.client.add(path, force=True) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - self.client.add(path, force=True) - - def move(self, path, new_location): - self.client.move(path, new_location, force=True) - - def remove(self, path): - self.client.remove(path, force=True) - - def write(self, path, data): - self.normal_actions.write(path, data) - - -class MercurialCommands(object): - - def __init__(self, root): - self.hg = self._import_mercurial() - self.normal_actions = FileSystemCommands() - try: - self.ui = self.hg.ui.ui( - verbose=False, debug=False, quiet=True, - interactive=False, traceback=False, report_untrusted=False) - except: - self.ui = self.hg.ui.ui() - self.ui.setconfig('ui', 'interactive', 'no') - self.ui.setconfig('ui', 'debug', 'no') - self.ui.setconfig('ui', 'traceback', 'no') - self.ui.setconfig('ui', 'verbose', 'no') - self.ui.setconfig('ui', 'report_untrusted', 'no') - self.ui.setconfig('ui', 'quiet', 'yes') - - self.repo = self.hg.hg.repository(self.ui, root) - - def _import_mercurial(self): - import mercurial.commands - import mercurial.hg - import mercurial.ui - return mercurial - - def create_file(self, path): - self.normal_actions.create_file(path) - self.hg.commands.add(self.ui, self.repo, path) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - - def move(self, path, new_location): - self.hg.commands.rename(self.ui, self.repo, path, - new_location, after=False) - - def remove(self, path): - self.hg.commands.remove(self.ui, self.repo, path) - - def write(self, path, data): - self.normal_actions.write(path, data) - - -class GITCommands(object): - - def __init__(self, root): - self.root = root - self._do(['version']) - self.normal_actions = FileSystemCommands() - - def create_file(self, path): - self.normal_actions.create_file(path) - self._do(['add', self._in_dir(path)]) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - - def move(self, path, new_location): - self._do(['mv', self._in_dir(path), self._in_dir(new_location)]) - - def remove(self, path): - self._do(['rm', self._in_dir(path)]) - - def write(self, path, data): - # XXX: should we use ``git add``? - self.normal_actions.write(path, data) - - def _do(self, args): - _execute(['git'] + args, cwd=self.root) - - def _in_dir(self, path): - if path.startswith(self.root): - return path[len(self.root) + 1:] - return self.root - - -class DarcsCommands(object): - - def __init__(self, root): - self.root = root - self.normal_actions = FileSystemCommands() - - def create_file(self, path): - self.normal_actions.create_file(path) - self._do(['add', path]) - - def create_folder(self, path): - self.normal_actions.create_folder(path) - self._do(['add', path]) - - def move(self, path, new_location): - self._do(['mv', path, new_location]) - - def remove(self, path): - self.normal_actions.remove(path) - - def write(self, path, data): - self.normal_actions.write(path, data) - - def _do(self, args): - _execute(['darcs'] + args, cwd=self.root) - - -def _execute(args, cwd=None): - process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE) - process.wait() - return process.returncode - - -def unicode_to_file_data(contents, encoding=None): - if not isinstance(contents, str): - return contents - if encoding is None: - encoding = read_str_coding(contents) - if encoding is not None: - return contents.encode(encoding) - try: - return contents.encode() - except UnicodeEncodeError: - return contents.encode('utf-8') - -def file_data_to_unicode(data, encoding=None): - result = _decode_data(data, encoding) - if '\r' in result: - result = result.replace('\r\n', '\n').replace('\r', '\n') - return result - -def _decode_data(data, encoding): - if isinstance(data, str): - return data - if encoding is None: - encoding = read_str_coding(data) - if encoding is None: - # there is no encoding tip, we need to guess. - # PEP263 says that "encoding not explicitly defined" means it is ascii, - # but we will use utf8 instead since utf8 fully covers ascii and btw is - # the only non-latin sane encoding. - encoding = 'utf-8' - try: - return data.decode(encoding) - except (UnicodeError, LookupError): - # fallback to utf-8: it should never fail - return data.decode('utf-8') - - -def read_file_coding(path): - file = open(path, 'b') - count = 0 - result = [] - buffsize = 10 - while True: - current = file.read(10) - if not current: - break - count += current.count('\n') - result.append(current) - file.close() - return _find_coding(''.join(result)) - - -def read_str_coding(source): - if not isinstance(source, str): - source = source.decode("utf-8", "ignore") - #TODO: change it to precompiled version - mex = re.search("\-\*\-\s+coding:\s*(.*?)\s+\-\*\-", source) - if mex: - return mex.group(1) - return "utf-8" - -def _find_coding(text): - coding = 'coding' - try: - start = text.index(coding) + len(coding) - if text[start] not in '=:': - return - start += 1 - while start < len(text) and text[start].isspace(): - start += 1 - end = start - while end < len(text): - c = text[end] - if not c.isalnum() and c not in '-_': - break - end += 1 - return text[start:end] - except ValueError: - pass diff --git a/pymode/libs3/rope/base/history.py b/pymode/libs3/rope/base/history.py deleted file mode 100644 index d3c523d3..00000000 --- a/pymode/libs3/rope/base/history.py +++ /dev/null @@ -1,235 +0,0 @@ -from rope.base import exceptions, change, taskhandle - - -class History(object): - """A class that holds project history""" - - def __init__(self, project, maxundos=None): - self.project = project - self._undo_list = [] - self._redo_list = [] - self._maxundos = maxundos - self._load_history() - self.project.data_files.add_write_hook(self.write) - self.current_change = None - - def _load_history(self): - if self.save: - result = self.project.data_files.read_data( - 'history', compress=self.compress, import_=True) - if result is not None: - to_change = change.DataToChange(self.project) - for data in result[0]: - self._undo_list.append(to_change(data)) - for data in result[1]: - self._redo_list.append(to_change(data)) - - def do(self, changes, task_handle=taskhandle.NullTaskHandle()): - """Perform the change and add it to the `self.undo_list` - - Note that uninteresting changes (changes to ignored files) - will not be appended to `self.undo_list`. - - """ - try: - self.current_change = changes - changes.do(change.create_job_set(task_handle, changes)) - finally: - self.current_change = None - if self._is_change_interesting(changes): - self.undo_list.append(changes) - self._remove_extra_items() - del self.redo_list[:] - - def _remove_extra_items(self): - if len(self.undo_list) > self.max_undos: - del self.undo_list[0:len(self.undo_list) - self.max_undos] - - def _is_change_interesting(self, changes): - for resource in changes.get_changed_resources(): - if not self.project.is_ignored(resource): - return True - return False - - def undo(self, change=None, drop=False, - task_handle=taskhandle.NullTaskHandle()): - """Redo done changes from the history - - When `change` is `None`, the last done change will be undone. - If change is not `None` it should be an item from - `self.undo_list`; this change and all changes that depend on - it will be undone. In both cases the list of undone changes - will be returned. - - If `drop` is `True`, the undone change will not be appended to - the redo list. - - """ - if not self._undo_list: - raise exceptions.HistoryError('Undo list is empty') - if change is None: - change = self.undo_list[-1] - dependencies = self._find_dependencies(self.undo_list, change) - self._move_front(self.undo_list, dependencies) - self._perform_undos(len(dependencies), task_handle) - result = self.redo_list[-len(dependencies):] - if drop: - del self.redo_list[-len(dependencies):] - return result - - def redo(self, change=None, task_handle=taskhandle.NullTaskHandle()): - """Redo undone changes from the history - - When `change` is `None`, the last undone change will be - redone. If change is not `None` it should be an item from - `self.redo_list`; this change and all changes that depend on - it will be redone. In both cases the list of redone changes - will be returned. - - """ - if not self.redo_list: - raise exceptions.HistoryError('Redo list is empty') - if change is None: - change = self.redo_list[-1] - dependencies = self._find_dependencies(self.redo_list, change) - self._move_front(self.redo_list, dependencies) - self._perform_redos(len(dependencies), task_handle) - return self.undo_list[-len(dependencies):] - - def _move_front(self, change_list, changes): - for change in changes: - change_list.remove(change) - change_list.append(change) - - def _find_dependencies(self, change_list, change): - index = change_list.index(change) - return _FindChangeDependencies(change_list[index:])() - - def _perform_undos(self, count, task_handle): - for i in range(count): - self.current_change = self.undo_list[-1] - try: - job_set = change.create_job_set(task_handle, - self.current_change) - self.current_change.undo(job_set) - finally: - self.current_change = None - self.redo_list.append(self.undo_list.pop()) - - def _perform_redos(self, count, task_handle): - for i in range(count): - self.current_change = self.redo_list[-1] - try: - job_set = change.create_job_set(task_handle, - self.current_change) - self.current_change.do(job_set) - finally: - self.current_change = None - self.undo_list.append(self.redo_list.pop()) - - def contents_before_current_change(self, file): - if self.current_change is None: - return None - result = self._search_for_change_contents([self.current_change], file) - if result is not None: - return result - if file.exists() and not file.is_folder(): - return file.read() - else: - return None - - def _search_for_change_contents(self, change_list, file): - for change_ in reversed(change_list): - if isinstance(change_, change.ChangeSet): - result = self._search_for_change_contents(change_.changes, - file) - if result is not None: - return result - if isinstance(change_, change.ChangeContents) and \ - change_.resource == file: - return change_.old_contents - - def write(self): - if self.save: - data = [] - to_data = change.ChangeToData() - self._remove_extra_items() - data.append([to_data(change_) for change_ in self.undo_list]) - data.append([to_data(change_) for change_ in self.redo_list]) - self.project.data_files.write_data('history', data, - compress=self.compress) - - def get_file_undo_list(self, resource): - result = [] - for change in self.undo_list: - if resource in change.get_changed_resources(): - result.append(change) - return result - - def __str__(self): - return 'History holds %s changes in memory' % \ - (len(self.undo_list) + len(self.redo_list)) - - undo_list = property(lambda self: self._undo_list) - redo_list = property(lambda self: self._redo_list) - - @property - def tobe_undone(self): - """The last done change if available, `None` otherwise""" - if self.undo_list: - return self.undo_list[-1] - - @property - def tobe_redone(self): - """The last undone change if available, `None` otherwise""" - if self.redo_list: - return self.redo_list[-1] - - @property - def max_undos(self): - if self._maxundos is None: - return self.project.prefs.get('max_history_items', 100) - else: - return self._maxundos - - @property - def save(self): - return self.project.prefs.get('save_history', False) - - @property - def compress(self): - return self.project.prefs.get('compress_history', False) - - def clear(self): - """Forget all undo and redo information""" - del self.undo_list[:] - del self.redo_list[:] - - -class _FindChangeDependencies(object): - - def __init__(self, change_list): - self.change = change_list[0] - self.change_list = change_list - self.changed_resources = set(self.change.get_changed_resources()) - - def __call__(self): - result = [self.change] - for change in self.change_list[1:]: - if self._depends_on(change, result): - result.append(change) - self.changed_resources.update(change.get_changed_resources()) - return result - - def _depends_on(self, changes, result): - for resource in changes.get_changed_resources(): - if resource is None: - continue - if resource in self.changed_resources: - return True - for changed in self.changed_resources: - if resource.is_folder() and resource.contains(changed): - return True - if changed.is_folder() and changed.contains(resource): - return True - return False diff --git a/pymode/libs3/rope/base/libutils.py b/pymode/libs3/rope/base/libutils.py deleted file mode 100644 index cb9381e3..00000000 --- a/pymode/libs3/rope/base/libutils.py +++ /dev/null @@ -1,65 +0,0 @@ -"""A few useful functions for using rope as a library""" -import os.path - -import rope.base.project -import rope.base.pycore -from rope.base import taskhandle - - -def path_to_resource(project, path, type=None): - """Get the resource at path - - You only need to specify `type` if `path` does not exist. It can - be either 'file' or 'folder'. If the type is `None` it is assumed - that the resource already exists. - - Note that this function uses `Project.get_resource()`, - `Project.get_file()`, and `Project.get_folder()` methods. - - """ - project_path = relative(project.address, path) - if project_path is None: - project_path = rope.base.project._realpath(path) - project = rope.base.project.get_no_project() - if type is None: - return project.get_resource(project_path) - if type == 'file': - return project.get_file(project_path) - if type == 'folder': - return project.get_folder(project_path) - return None - -def relative(root, path): - root = rope.base.project._realpath(root).replace(os.path.sep, '/') - path = rope.base.project._realpath(path).replace(os.path.sep, '/') - if path == root: - return '' - if path.startswith(root + '/'): - return path[len(root) + 1:] - -def report_change(project, path, old_content): - """Report that the contents of file at `path` was changed - - The new contents of file is retrieved by reading the file. - - """ - resource = path_to_resource(project, path) - if resource is None: - return - for observer in list(project.observers): - observer.resource_changed(resource) - if project.pycore.automatic_soa: - rope.base.pycore.perform_soa_on_changed_scopes(project, resource, - old_content) - -def analyze_modules(project, task_handle=taskhandle.NullTaskHandle()): - """Perform static object analysis on all python files in the project - - Note that this might be really time consuming. - """ - resources = project.pycore.get_python_files() - job_set = task_handle.create_jobset('Analyzing Modules', len(resources)) - for resource in resources: - job_set.started_job(resource.path) - project.pycore.analyze_module(resource) - job_set.finished_job() diff --git a/pymode/libs3/rope/base/oi/__init__.py b/pymode/libs3/rope/base/oi/__init__.py deleted file mode 100644 index 0b1a1525..00000000 --- a/pymode/libs3/rope/base/oi/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Rope object analysis and inference package - -Rope makes some simplifying assumptions about a python program. It -assumes that a program only performs assignments and function calls. -Tracking assignments is simple and `PyName` objects handle that. The -main problem is function calls. Rope uses these two approaches for -obtaining call information: - -* Static object analysis: `rope.base.pycore.PyCore.analyze_module()` - - It can analyze modules to obtain information about functions. This - is done by analyzing function calls in a module or scope. Currently - SOA analyzes the scopes that are changed while saving or when the - user asks to analyze a module. That is mainly because static - analysis is time-consuming. - -* Dynamic object analysis: `rope.base.pycore.PyCore.run_module()` - - When you run a module or your testsuite, when DOA is enabled, it - collects information about parameters passed to and objects returned - from functions. The main problem with this approach is that it is - quite slow; Not when looking up the information but when collecting - them. - -An instance of `rope.base.oi.objectinfo.ObjectInfoManager` can be used -for accessing these information. It saves the data in a -`rope.base.oi.objectdb.ObjectDB` internally. - -Now if our objectdb does not know anything about a function and we -need the value returned by it, static object inference, SOI, comes -into play. It analyzes function body and tries to infer the object -that is returned from it (we usually need the returned value for the -given parameter objects). - -Rope might collect and store information for other `PyName`\s, too. -For instance rope stores the object builtin containers hold. - -""" diff --git a/pymode/libs3/rope/base/oi/doa.py b/pymode/libs3/rope/base/oi/doa.py deleted file mode 100644 index 1efb994c..00000000 --- a/pymode/libs3/rope/base/oi/doa.py +++ /dev/null @@ -1,162 +0,0 @@ -import pickle -import marshal -import os -import socket -import subprocess -import sys -import tempfile -import threading - - -class PythonFileRunner(object): - """A class for running python project files""" - - def __init__(self, pycore, file_, args=None, stdin=None, - stdout=None, analyze_data=None): - self.pycore = pycore - self.file = file_ - self.analyze_data = analyze_data - self.observers = [] - self.args = args - self.stdin = stdin - self.stdout = stdout - - def run(self): - """Execute the process""" - env = dict(os.environ) - file_path = self.file.real_path - path_folders = self.pycore.get_source_folders() + \ - self.pycore.get_python_path_folders() - env['PYTHONPATH'] = os.pathsep.join(folder.real_path - for folder in path_folders) - runmod_path = self.pycore.find_module('rope.base.oi.runmod').real_path - self.receiver = None - self._init_data_receiving() - send_info = '-' - if self.receiver: - send_info = self.receiver.get_send_info() - args = [sys.executable, runmod_path, send_info, - self.pycore.project.address, self.file.real_path] - if self.analyze_data is None: - del args[1:4] - if self.args is not None: - args.extend(self.args) - self.process = subprocess.Popen( - executable=sys.executable, args=args, env=env, - cwd=os.path.split(file_path)[0], stdin=self.stdin, - stdout=self.stdout, stderr=self.stdout, close_fds=os.name != 'nt') - - def _init_data_receiving(self): - if self.analyze_data is None: - return - # Disabling FIFO data transfer due to blocking when running - # unittests in the GUI. - # XXX: Handle FIFO data transfer for `rope.ui.testview` - if True or os.name == 'nt': - self.receiver = _SocketReceiver() - else: - self.receiver = _FIFOReceiver() - self.receiving_thread = threading.Thread(target=self._receive_information) - self.receiving_thread.setDaemon(True) - self.receiving_thread.start() - - def _receive_information(self): - #temp = open('/dev/shm/info', 'w') - for data in self.receiver.receive_data(): - self.analyze_data(data) - #temp.write(str(data) + '\n') - #temp.close() - for observer in self.observers: - observer() - - def wait_process(self): - """Wait for the process to finish""" - self.process.wait() - if self.analyze_data: - self.receiving_thread.join() - - def kill_process(self): - """Stop the process""" - if self.process.poll() is not None: - return - try: - if hasattr(self.process, 'terminate'): - self.process.terminate() - elif os.name != 'nt': - os.kill(self.process.pid, 9) - else: - import ctypes - handle = int(self.process._handle) - ctypes.windll.kernel32.TerminateProcess(handle, -1) - except OSError: - pass - - def add_finishing_observer(self, observer): - """Notify this observer when execution finishes""" - self.observers.append(observer) - - -class _MessageReceiver(object): - - def receive_data(self): - pass - - def get_send_info(self): - pass - - -class _SocketReceiver(_MessageReceiver): - - def __init__(self): - self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.data_port = 3037 - while self.data_port < 4000: - try: - self.server_socket.bind(('', self.data_port)) - break - except socket.error as e: - self.data_port += 1 - self.server_socket.listen(1) - - def get_send_info(self): - return str(self.data_port) - - def receive_data(self): - conn, addr = self.server_socket.accept() - self.server_socket.close() - my_file = conn.makefile('rb') - while True: - try: - yield pickle.load(my_file) - except EOFError: - break - my_file.close() - conn.close() - - -class _FIFOReceiver(_MessageReceiver): - - def __init__(self): - # XXX: this is insecure and might cause race conditions - self.file_name = self._get_file_name() - os.mkfifo(self.file_name) - - def _get_file_name(self): - prefix = tempfile.gettempdir() + '/__rope_' - i = 0 - while os.path.exists(prefix + str(i).rjust(4, '0')): - i += 1 - return prefix + str(i).rjust(4, '0') - - def get_send_info(self): - return self.file_name - - def receive_data(self): - my_file = open(self.file_name, 'rb') - while True: - try: - yield marshal.load(my_file) - except EOFError: - break - my_file.close() - os.remove(self.file_name) diff --git a/pymode/libs3/rope/base/oi/memorydb.py b/pymode/libs3/rope/base/oi/memorydb.py deleted file mode 100644 index e4c3a1c8..00000000 --- a/pymode/libs3/rope/base/oi/memorydb.py +++ /dev/null @@ -1,106 +0,0 @@ -from rope.base.oi import objectdb - - -class MemoryDB(objectdb.FileDict): - - def __init__(self, project, persist=None): - self.project = project - self._persist = persist - self.files = self - self._load_files() - self.project.data_files.add_write_hook(self.write) - - def _load_files(self): - self._files = {} - if self.persist: - result = self.project.data_files.read_data( - 'objectdb', compress=self.compress, import_=True) - if result is not None: - self._files = result - - def keys(self): - return list(self._files.keys()) - - def __contains__(self, key): - return key in self._files - - def __getitem__(self, key): - return FileInfo(self._files[key]) - - def create(self, path): - self._files[path] = {} - - def rename(self, file, newfile): - if file not in self._files: - return - self._files[newfile] = self._files[file] - del self[file] - - def __delitem__(self, file): - del self._files[file] - - def write(self): - if self.persist: - self.project.data_files.write_data('objectdb', self._files, - self.compress) - - @property - def compress(self): - return self.project.prefs.get('compress_objectdb', False) - - @property - def persist(self): - if self._persist is not None: - return self._persist - else: - return self.project.prefs.get('save_objectdb', False) - - -class FileInfo(objectdb.FileInfo): - - def __init__(self, scopes): - self.scopes = scopes - - def create_scope(self, key): - self.scopes[key] = ScopeInfo() - - def keys(self): - return list(self.scopes.keys()) - - def __contains__(self, key): - return key in self.scopes - - def __getitem__(self, key): - return self.scopes[key] - - def __delitem__(self, key): - del self.scopes[key] - - -class ScopeInfo(objectdb.ScopeInfo): - - def __init__(self): - self.call_info = {} - self.per_name = {} - - def get_per_name(self, name): - return self.per_name.get(name, None) - - def save_per_name(self, name, value): - self.per_name[name] = value - - def get_returned(self, parameters): - return self.call_info.get(parameters, None) - - def get_call_infos(self): - for args, returned in self.call_info.items(): - yield objectdb.CallInfo(args, returned) - - def add_call(self, parameters, returned): - self.call_info[parameters] = returned - - def __getstate__(self): - return (self.call_info, self.per_name) - - def __setstate__(self, data): - self.call_info, self.per_name = data diff --git a/pymode/libs3/rope/base/oi/objectdb.py b/pymode/libs3/rope/base/oi/objectdb.py deleted file mode 100644 index 97d4c2eb..00000000 --- a/pymode/libs3/rope/base/oi/objectdb.py +++ /dev/null @@ -1,192 +0,0 @@ -from collections import UserDict, MutableMapping - -class ObjectDB(object): - - def __init__(self, db, validation): - self.db = db - self.validation = validation - self.observers = [] - self.files = db.files - - def validate_files(self): - for file in list(self.files): - if not self.validation.is_file_valid(file): - del self.files[file] - self._file_removed(file) - - def validate_file(self, file): - if file not in self.files: - return - for key in list(self.files[file]): - if not self.validation.is_scope_valid(file, key): - del self.files[file][key] - - def file_moved(self, file, newfile): - if file not in self.files: - return - self.files.rename(file, newfile) - self._file_removed(file) - self._file_added(newfile) - - def get_files(self): - return list(self.files.keys()) - - def get_returned(self, path, key, args): - scope_info = self._get_scope_info(path, key, readonly=True) - result = scope_info.get_returned(args) - if self.validation.is_value_valid(result): - return result - - def get_pername(self, path, key, name): - scope_info = self._get_scope_info(path, key, readonly=True) - result = scope_info.get_per_name(name) - if self.validation.is_value_valid(result): - return result - - def get_callinfos(self, path, key): - scope_info = self._get_scope_info(path, key, readonly=True) - return scope_info.get_call_infos() - - def add_callinfo(self, path, key, args, returned): - scope_info = self._get_scope_info(path, key, readonly=False) - old_returned = scope_info.get_returned(args) - if self.validation.is_more_valid(returned, old_returned): - scope_info.add_call(args, returned) - - def add_pername(self, path, key, name, value): - scope_info = self._get_scope_info(path, key, readonly=False) - old_value = scope_info.get_per_name(name) - if self.validation.is_more_valid(value, old_value): - scope_info.save_per_name(name, value) - - def add_file_list_observer(self, observer): - self.observers.append(observer) - - def write(self): - self.db.write() - - def _get_scope_info(self, path, key, readonly=True): - if path not in self.files: - if readonly: - return _NullScopeInfo() - self.files.create(path) - self._file_added(path) - if key not in self.files[path]: - if readonly: - return _NullScopeInfo() - self.files[path].create_scope(key) - result = self.files[path][key] - if isinstance(result, dict): - print(self.files, self.files[path], self.files[path][key]) - return result - - def _file_removed(self, path): - for observer in self.observers: - observer.removed(path) - - def _file_added(self, path): - for observer in self.observers: - observer.added(path) - - def __str__(self): - scope_count = 0 - for file_dict in self.files.values(): - scope_count += len(file_dict) - return 'ObjectDB holds %s file and %s scope infos' % \ - (len(self.files), scope_count) - - -class _NullScopeInfo(object): - - def __init__(self, error_on_write=True): - self.error_on_write = error_on_write - - def get_per_name(self, name): - pass - - def save_per_name(self, name, value): - if self.error_on_write: - raise NotImplementedError() - - def get_returned(self, parameters): - pass - - def get_call_infos(self): - return [] - - def add_call(self, parameters, returned): - if self.error_on_write: - raise NotImplementedError() - - -class FileInfo(MutableMapping): - - def create_scope(self, key): - pass - - def __iter__(self): - for key in self.keys(): - yield key - - def __len__(self): - return len(self.keys()) - - def __setitem__(self, key, value): - self[key] = value - -class FileDict(MutableMapping): - - def create(self, key): - pass - - def rename(self, key, new_key): - pass - - def __iter__(self): - for key in self.keys(): - yield key - - def __len__(self): - return len(self.keys()) - - def __setitem__(self, key, value): - self[key] = value - -class ScopeInfo(object): - - def get_per_name(self, name): - pass - - def save_per_name(self, name, value): - pass - - def get_returned(self, parameters): - pass - - def get_call_infos(self): - pass - - def add_call(self, parameters, returned): - pass - - -class CallInfo(object): - - def __init__(self, args, returned): - self.args = args - self.returned = returned - - def get_parameters(self): - return self.args - - def get_returned(self): - return self.returned - - -class FileListObserver(object): - - def added(self, path): - pass - - def removed(self, path): - pass diff --git a/pymode/libs3/rope/base/oi/objectinfo.py b/pymode/libs3/rope/base/oi/objectinfo.py deleted file mode 100644 index f86d72e0..00000000 --- a/pymode/libs3/rope/base/oi/objectinfo.py +++ /dev/null @@ -1,232 +0,0 @@ -import warnings - -from rope.base import exceptions, resourceobserver -from rope.base.oi import objectdb, memorydb, transform - - -class ObjectInfoManager(object): - """Stores object information - - It uses an instance of `objectdb.ObjectDB` for storing - information. - - """ - - def __init__(self, project): - self.project = project - self.to_textual = transform.PyObjectToTextual(project) - self.to_pyobject = transform.TextualToPyObject(project) - self.doi_to_pyobject = transform.DOITextualToPyObject(project) - self._init_objectdb() - if project.prefs.get('validate_objectdb', False): - self._init_validation() - - def _init_objectdb(self): - dbtype = self.project.get_prefs().get('objectdb_type', None) - persist = None - if dbtype is not None: - warnings.warn( - '"objectdb_type" project config is deprecated;\n' - 'Use "save_objectdb" instead in your project ' - 'config file.\n(".ropeproject/config.py" by default)\n', - DeprecationWarning) - if dbtype != 'memory' and self.project.ropefolder is not None: - persist = True - self.validation = TextualValidation(self.to_pyobject) - db = memorydb.MemoryDB(self.project, persist=persist) - self.objectdb = objectdb.ObjectDB(db, self.validation) - - def _init_validation(self): - self.objectdb.validate_files() - observer = resourceobserver.ResourceObserver( - changed=self._resource_changed, moved=self._resource_moved, - removed=self._resource_moved) - files = [] - for path in self.objectdb.get_files(): - resource = self.to_pyobject.path_to_resource(path) - if resource is not None and resource.project == self.project: - files.append(resource) - self.observer = resourceobserver.FilteredResourceObserver(observer, - files) - self.objectdb.add_file_list_observer(_FileListObserver(self)) - self.project.add_observer(self.observer) - - def _resource_changed(self, resource): - try: - self.objectdb.validate_file( - self.to_textual.resource_to_path(resource)) - except exceptions.ModuleSyntaxError: - pass - - def _resource_moved(self, resource, new_resource=None): - self.observer.remove_resource(resource) - if new_resource is not None: - old = self.to_textual.resource_to_path(resource) - new = self.to_textual.resource_to_path(new_resource) - self.objectdb.file_moved(old, new) - self.observer.add_resource(new_resource) - - def get_returned(self, pyobject, args): - result = self.get_exact_returned(pyobject, args) - if result is not None: - return result - path, key = self._get_scope(pyobject) - if path is None: - return None - for call_info in self.objectdb.get_callinfos(path, key): - returned = call_info.get_returned() - if returned and returned[0] not in ('unknown', 'none'): - result = returned - break - if result is None: - result = returned - if result is not None: - return self.to_pyobject(result) - - def get_exact_returned(self, pyobject, args): - path, key = self._get_scope(pyobject) - if path is not None: - returned = self.objectdb.get_returned( - path, key, self._args_to_textual(pyobject, args)) - if returned is not None: - return self.to_pyobject(returned) - - def _args_to_textual(self, pyfunction, args): - parameters = list(pyfunction.get_param_names(special_args=False)) - arguments = args.get_arguments(parameters)[:len(parameters)] - textual_args = tuple([self.to_textual(arg) - for arg in arguments]) - return textual_args - - def get_parameter_objects(self, pyobject): - path, key = self._get_scope(pyobject) - if path is None: - return None - arg_count = len(pyobject.get_param_names(special_args=False)) - unknowns = arg_count - parameters = [None] * arg_count - for call_info in self.objectdb.get_callinfos(path, key): - args = call_info.get_parameters() - for index, arg in enumerate(args[:arg_count]): - old = parameters[index] - if self.validation.is_more_valid(arg, old): - parameters[index] = arg - if self.validation.is_value_valid(arg): - unknowns -= 1 - if unknowns == 0: - break - if unknowns < arg_count: - return [self.to_pyobject(parameter) - for parameter in parameters] - - def get_passed_objects(self, pyfunction, parameter_index): - path, key = self._get_scope(pyfunction) - if path is None: - return [] - result = [] - for call_info in self.objectdb.get_callinfos(path, key): - args = call_info.get_parameters() - if len(args) > parameter_index: - parameter = self.to_pyobject(args[parameter_index]) - if parameter is not None: - result.append(parameter) - return result - - def doa_data_received(self, data): - def doi_to_normal(textual): - pyobject = self.doi_to_pyobject(textual) - return self.to_textual(pyobject) - function = doi_to_normal(data[0]) - args = tuple([doi_to_normal(textual) for textual in data[1]]) - returned = doi_to_normal(data[2]) - if function[0] == 'defined' and len(function) == 3: - self._save_data(function, args, returned) - - def function_called(self, pyfunction, params, returned=None): - function_text = self.to_textual(pyfunction) - params_text = tuple([self.to_textual(param) - for param in params]) - returned_text = ('unknown',) - if returned is not None: - returned_text = self.to_textual(returned) - self._save_data(function_text, params_text, returned_text) - - def save_per_name(self, scope, name, data): - path, key = self._get_scope(scope.pyobject) - if path is not None: - self.objectdb.add_pername(path, key, name, self.to_textual(data)) - - def get_per_name(self, scope, name): - path, key = self._get_scope(scope.pyobject) - if path is not None: - result = self.objectdb.get_pername(path, key, name) - if result is not None: - return self.to_pyobject(result) - - def _save_data(self, function, args, returned=('unknown',)): - self.objectdb.add_callinfo(function[1], function[2], args, returned) - - def _get_scope(self, pyobject): - resource = pyobject.get_module().get_resource() - if resource is None: - return None, None - textual = self.to_textual(pyobject) - if textual[0] == 'defined': - path = textual[1] - if len(textual) == 3: - key = textual[2] - else: - key = '' - return path, key - return None, None - - def sync(self): - self.objectdb.sync() - - def __str__(self): - return str(self.objectdb) - - -class TextualValidation(object): - - def __init__(self, to_pyobject): - self.to_pyobject = to_pyobject - - def is_value_valid(self, value): - # ???: Should none and unknown be considered valid? - if value is None or value[0] in ('none', 'unknown'): - return False - return self.to_pyobject(value) is not None - - def is_more_valid(self, new, old): - if old is None: - return True - return new[0] not in ('unknown', 'none') - - def is_file_valid(self, path): - return self.to_pyobject.path_to_resource(path) is not None - - def is_scope_valid(self, path, key): - if key == '': - textual = ('defined', path) - else: - textual = ('defined', path, key) - return self.to_pyobject(textual) is not None - - -class _FileListObserver(object): - - def __init__(self, object_info): - self.object_info = object_info - self.observer = self.object_info.observer - self.to_pyobject = self.object_info.to_pyobject - - def removed(self, path): - resource = self.to_pyobject.path_to_resource(path) - if resource is not None: - self.observer.remove_resource(resource) - - def added(self, path): - resource = self.to_pyobject.path_to_resource(path) - if resource is not None: - self.observer.add_resource(resource) diff --git a/pymode/libs3/rope/base/oi/runmod.py b/pymode/libs3/rope/base/oi/runmod.py deleted file mode 100644 index 45b33fbc..00000000 --- a/pymode/libs3/rope/base/oi/runmod.py +++ /dev/null @@ -1,215 +0,0 @@ - -def __rope_start_everything(): - import os - import sys - import socket - import pickle - import marshal - import inspect - import types - import threading - - class _MessageSender(object): - - def send_data(self, data): - pass - - class _SocketSender(_MessageSender): - - def __init__(self, port): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect(('127.0.0.1', port)) - self.my_file = s.makefile('wb') - - def send_data(self, data): - if not self.my_file.closed: - pickle.dump(data, self.my_file) - - def close(self): - self.my_file.close() - - class _FileSender(_MessageSender): - - def __init__(self, file_name): - self.my_file = open(file_name, 'wb') - - def send_data(self, data): - if not self.my_file.closed: - marshal.dump(data, self.my_file) - - def close(self): - self.my_file.close() - - - def _cached(func): - cache = {} - def newfunc(self, arg): - if arg in cache: - return cache[arg] - result = func(self, arg) - cache[arg] = result - return result - return newfunc - - class _FunctionCallDataSender(object): - - def __init__(self, send_info, project_root): - self.project_root = project_root - if send_info.isdigit(): - self.sender = _SocketSender(int(send_info)) - else: - self.sender = _FileSender(send_info) - - def global_trace(frame, event, arg): - # HACK: Ignoring out->in calls - # This might lose some information - if self._is_an_interesting_call(frame): - return self.on_function_call - sys.settrace(global_trace) - threading.settrace(global_trace) - - def on_function_call(self, frame, event, arg): - if event != 'return': - return - args = [] - returned = ('unknown',) - code = frame.f_code - for argname in code.co_varnames[:code.co_argcount]: - try: - args.append(self._object_to_persisted_form(frame.f_locals[argname])) - except (TypeError, AttributeError): - args.append(('unknown',)) - try: - returned = self._object_to_persisted_form(arg) - except (TypeError, AttributeError): - pass - try: - data = (self._object_to_persisted_form(frame.f_code), - tuple(args), returned) - self.sender.send_data(data) - except (TypeError): - pass - return self.on_function_call - - def _is_an_interesting_call(self, frame): - #if frame.f_code.co_name in ['?', '']: - # return False - #return not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code) - - if not self._is_code_inside_project(frame.f_code) and \ - (not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code)): - return False - return True - - def _is_code_inside_project(self, code): - source = self._path(code.co_filename) - return source is not None and os.path.exists(source) and \ - _realpath(source).startswith(self.project_root) - - @_cached - def _get_persisted_code(self, object_): - source = self._path(object_.co_filename) - if not os.path.exists(source): - raise TypeError('no source') - return ('defined', _realpath(source), str(object_.co_firstlineno)) - - @_cached - def _get_persisted_class(self, object_): - try: - return ('defined', _realpath(inspect.getsourcefile(object_)), - object_.__name__) - except (TypeError, AttributeError): - return ('unknown',) - - def _get_persisted_builtin(self, object_): - if isinstance(object_, str): - return ('builtin', 'str') - if isinstance(object_, list): - holding = None - if len(object_) > 0: - holding = object_[0] - return ('builtin', 'list', self._object_to_persisted_form(holding)) - if isinstance(object_, dict): - keys = None - values = None - if len(object_) > 0: - keys = list(object_.keys())[0] - values = object_[keys] - if values == object_ and len(object_) > 1: - keys = list(object_.keys())[1] - values = object_[keys] - return ('builtin', 'dict', - self._object_to_persisted_form(keys), - self._object_to_persisted_form(values)) - if isinstance(object_, tuple): - objects = [] - if len(object_) < 3: - for holding in object_: - objects.append(self._object_to_persisted_form(holding)) - else: - objects.append(self._object_to_persisted_form(object_[0])) - return tuple(['builtin', 'tuple'] + objects) - if isinstance(object_, set): - holding = None - if len(object_) > 0: - for o in object_: - holding = o - break - return ('builtin', 'set', self._object_to_persisted_form(holding)) - return ('unknown',) - - def _object_to_persisted_form(self, object_): - if object_ is None: - return ('none',) - if isinstance(object_, types.CodeType): - return self._get_persisted_code(object_) - if isinstance(object_, types.FunctionType): - return self._get_persisted_code(object_.__code__) - if isinstance(object_, types.MethodType): - return self._get_persisted_code(object_.__func__.__code__) - if isinstance(object_, types.ModuleType): - return self._get_persisted_module(object_) - if isinstance(object_, (str, list, dict, tuple, set)): - return self._get_persisted_builtin(object_) - if isinstance(object_, type): - return self._get_persisted_class(object_) - return ('instance', self._get_persisted_class(type(object_))) - - @_cached - def _get_persisted_module(self, object_): - path = self._path(object_.__file__) - if path and os.path.exists(path): - return ('defined', _realpath(path)) - return ('unknown',) - - def _path(self, path): - if path.endswith('.pyc'): - path = path[:-1] - if path.endswith('.py'): - return path - - def close(self): - self.sender.close() - sys.settrace(None) - - def _realpath(path): - return os.path.realpath(os.path.abspath(os.path.expanduser(path))) - - send_info = sys.argv[1] - project_root = sys.argv[2] - file_to_run = sys.argv[3] - run_globals = globals() - run_globals.update({'__name__': '__main__', - 'builtins': __builtins__, - '__file__': file_to_run}) - if send_info != '-': - data_sender = _FunctionCallDataSender(send_info, project_root) - del sys.argv[1:4] - with open(file_to_run) as file: - exec(compile(file.read(), file_to_run, 'exec'), run_globals) - if send_info != '-': - data_sender.close() - - -if __name__ == '__main__': - __rope_start_everything() diff --git a/pymode/libs3/rope/base/oi/soa.py b/pymode/libs3/rope/base/oi/soa.py deleted file mode 100644 index 38cd5c9d..00000000 --- a/pymode/libs3/rope/base/oi/soa.py +++ /dev/null @@ -1,136 +0,0 @@ -import rope.base.ast -import rope.base.oi.soi -import rope.base.pynames -from rope.base import pyobjects, evaluate, astutils, arguments - - -def analyze_module(pycore, pymodule, should_analyze, - search_subscopes, followed_calls): - """Analyze `pymodule` for static object inference - - Analyzes scopes for collecting object information. The analysis - starts from inner scopes. - - """ - _analyze_node(pycore, pymodule, should_analyze, - search_subscopes, followed_calls) - - -def _analyze_node(pycore, pydefined, should_analyze, - search_subscopes, followed_calls): - if search_subscopes(pydefined): - for scope in pydefined.get_scope().get_scopes(): - _analyze_node(pycore, scope.pyobject, should_analyze, - search_subscopes, followed_calls) - if should_analyze(pydefined): - new_followed_calls = max(0, followed_calls - 1) - return_true = lambda pydefined: True - return_false = lambda pydefined: False - def _follow(pyfunction): - _analyze_node(pycore, pyfunction, return_true, - return_false, new_followed_calls) - if not followed_calls: - _follow = None - visitor = SOAVisitor(pycore, pydefined, _follow) - for child in rope.base.ast.get_child_nodes(pydefined.get_ast()): - rope.base.ast.walk(child, visitor) - - -class SOAVisitor(object): - - def __init__(self, pycore, pydefined, follow_callback=None): - self.pycore = pycore - self.pymodule = pydefined.get_module() - self.scope = pydefined.get_scope() - self.follow = follow_callback - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - def _Call(self, node): - for child in rope.base.ast.get_child_nodes(node): - rope.base.ast.walk(child, self) - primary, pyname = evaluate.eval_node2(self.scope, node.func) - if pyname is None: - return - pyfunction = pyname.get_object() - if isinstance(pyfunction, pyobjects.AbstractFunction): - args = arguments.create_arguments(primary, pyfunction, - node, self.scope) - elif isinstance(pyfunction, pyobjects.PyClass): - pyclass = pyfunction - if '__init__' in pyfunction: - pyfunction = pyfunction['__init__'].get_object() - pyname = rope.base.pynames.UnboundName(pyobjects.PyObject(pyclass)) - args = self._args_with_self(primary, pyname, pyfunction, node) - elif '__call__' in pyfunction: - pyfunction = pyfunction['__call__'].get_object() - args = self._args_with_self(primary, pyname, pyfunction, node) - else: - return - self._call(pyfunction, args) - - def _args_with_self(self, primary, self_pyname, pyfunction, node): - base_args = arguments.create_arguments(primary, pyfunction, - node, self.scope) - return arguments.MixedArguments(self_pyname, base_args, self.scope) - - def _call(self, pyfunction, args): - if isinstance(pyfunction, pyobjects.PyFunction): - if self.follow is not None: - before = self._parameter_objects(pyfunction) - self.pycore.object_info.function_called( - pyfunction, args.get_arguments(pyfunction.get_param_names())) - pyfunction._set_parameter_pyobjects(None) - if self.follow is not None: - after = self._parameter_objects(pyfunction) - if after != before: - self.follow(pyfunction) - # XXX: Maybe we should not call every builtin function - if isinstance(pyfunction, rope.base.builtins.BuiltinFunction): - pyfunction.get_returned_object(args) - - def _parameter_objects(self, pyfunction): - result = [] - for i in range(len(pyfunction.get_param_names(False))): - result.append(pyfunction.get_parameter(i)) - return result - - def _Assign(self, node): - for child in rope.base.ast.get_child_nodes(node): - rope.base.ast.walk(child, self) - visitor = _SOAAssignVisitor() - nodes = [] - for child in node.targets: - rope.base.ast.walk(child, visitor) - nodes.extend(visitor.nodes) - for subscript, levels in nodes: - instance = evaluate.eval_node(self.scope, subscript.value) - args_pynames = [] - args_pynames.append(evaluate.eval_node(self.scope, - subscript.slice.value)) - value = rope.base.oi.soi._infer_assignment( - rope.base.pynames.AssignmentValue(node.value, levels), self.pymodule) - args_pynames.append(rope.base.pynames.UnboundName(value)) - if instance is not None and value is not None: - pyobject = instance.get_object() - if '__setitem__' in pyobject: - pyfunction = pyobject['__setitem__'].get_object() - args = arguments.ObjectArguments([instance] + args_pynames) - self._call(pyfunction, args) - # IDEA: handle `__setslice__`, too - - -class _SOAAssignVisitor(astutils._NodeNameCollector): - - def __init__(self): - super(_SOAAssignVisitor, self).__init__() - self.nodes = [] - - def _added(self, node, levels): - if isinstance(node, rope.base.ast.Subscript) and \ - isinstance(node.slice, rope.base.ast.Index): - self.nodes.append((node, levels)) diff --git a/pymode/libs3/rope/base/oi/soi.py b/pymode/libs3/rope/base/oi/soi.py deleted file mode 100644 index bf40af90..00000000 --- a/pymode/libs3/rope/base/oi/soi.py +++ /dev/null @@ -1,186 +0,0 @@ -"""A module for inferring objects - -For more information see the documentation in `rope.base.oi` -package. - -""" -import rope.base.builtins -import rope.base.pynames -import rope.base.pyobjects -from rope.base import evaluate, utils, arguments - - -_ignore_inferred = utils.ignore_exception( - rope.base.pyobjects.IsBeingInferredError) - - -@_ignore_inferred -def infer_returned_object(pyfunction, args): - """Infer the `PyObject` this `PyFunction` returns after calling""" - object_info = pyfunction.pycore.object_info - result = object_info.get_exact_returned(pyfunction, args) - if result is not None: - return result - result = _infer_returned(pyfunction, args) - if result is not None: - if args and pyfunction.get_module().get_resource() is not None: - params = args.get_arguments( - pyfunction.get_param_names(special_args=False)) - object_info.function_called(pyfunction, params, result) - return result - return object_info.get_returned(pyfunction, args) - -@_ignore_inferred -def infer_parameter_objects(pyfunction): - """Infer the `PyObject`\s of parameters of this `PyFunction`""" - object_info = pyfunction.pycore.object_info - result = object_info.get_parameter_objects(pyfunction) - if result is None: - result = _parameter_objects(pyfunction) - _handle_first_parameter(pyfunction, result) - return result - -def _handle_first_parameter(pyobject, parameters): - kind = pyobject.get_kind() - if parameters is None or kind not in ['method', 'classmethod']: - pass - if not parameters: - if not pyobject.get_param_names(special_args=False): - return - parameters.append(rope.base.pyobjects.get_unknown()) - if kind == 'method': - parameters[0] = rope.base.pyobjects.PyObject(pyobject.parent) - if kind == 'classmethod': - parameters[0] = pyobject.parent - -@_ignore_inferred -def infer_assigned_object(pyname): - if not pyname.assignments: - return - for assignment in reversed(pyname.assignments): - result = _infer_assignment(assignment, pyname.module) - if result is not None: - return result - -def get_passed_objects(pyfunction, parameter_index): - object_info = pyfunction.pycore.object_info - result = object_info.get_passed_objects(pyfunction, - parameter_index) - if not result: - statically_inferred = _parameter_objects(pyfunction) - if len(statically_inferred) > parameter_index: - result.append(statically_inferred[parameter_index]) - return result - -def _infer_returned(pyobject, args): - if args: - # HACK: Setting parameter objects manually - # This is not thread safe and might cause problems if `args` - # does not come from a good call site - pyobject.get_scope().invalidate_data() - pyobject._set_parameter_pyobjects( - args.get_arguments(pyobject.get_param_names(special_args=False))) - scope = pyobject.get_scope() - if not scope._get_returned_asts(): - return - maxtries = 3 - for returned_node in reversed(scope._get_returned_asts()[-maxtries:]): - try: - resulting_pyname = evaluate.eval_node(scope, returned_node) - if resulting_pyname is None: - continue - pyobject = resulting_pyname.get_object() - if pyobject == rope.base.pyobjects.get_unknown(): - continue - if not scope._is_generator(): - return pyobject - else: - return rope.base.builtins.get_generator(pyobject) - except rope.base.pyobjects.IsBeingInferredError: - pass - -def _parameter_objects(pyobject): - params = pyobject.get_param_names(special_args=False) - return [rope.base.pyobjects.get_unknown()] * len(params) - -# handling `rope.base.pynames.AssignmentValue` - -@_ignore_inferred -def _infer_assignment(assignment, pymodule): - result = _follow_pyname(assignment, pymodule) - if result is None: - return None - pyname, pyobject = result - pyobject = _follow_evaluations(assignment, pyname, pyobject) - if pyobject is None: - return None - return _follow_levels(assignment, pyobject) - -def _follow_levels(assignment, pyobject): - for index in assignment.levels: - if isinstance(pyobject.get_type(), rope.base.builtins.Tuple): - holdings = pyobject.get_type().get_holding_objects() - if holdings: - pyobject = holdings[min(len(holdings) - 1, index)] - else: - pyobject = None - elif isinstance(pyobject.get_type(), rope.base.builtins.List): - pyobject = pyobject.get_type().holding - else: - pyobject = None - if pyobject is None: - break - return pyobject - -@_ignore_inferred -def _follow_pyname(assignment, pymodule, lineno=None): - assign_node = assignment.ast_node - if lineno is None: - lineno = _get_lineno_for_node(assign_node) - holding_scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - pyname = evaluate.eval_node(holding_scope, assign_node) - if pyname is not None: - result = pyname.get_object() - if isinstance(result.get_type(), rope.base.builtins.Property) and \ - holding_scope.get_kind() == 'Class': - arg = rope.base.pynames.UnboundName( - rope.base.pyobjects.PyObject(holding_scope.pyobject)) - return pyname, result.get_type().get_property_object( - arguments.ObjectArguments([arg])) - return pyname, result - -@_ignore_inferred -def _follow_evaluations(assignment, pyname, pyobject): - new_pyname = pyname - tokens = assignment.evaluation.split('.') - for token in tokens: - call = token.endswith('()') - if call: - token = token[:-2] - if token: - pyname = new_pyname - new_pyname = _get_attribute(pyobject, token) - if new_pyname is not None: - pyobject = new_pyname.get_object() - if pyobject is not None and call: - if isinstance(pyobject, rope.base.pyobjects.AbstractFunction): - args = arguments.ObjectArguments([pyname]) - pyobject = pyobject.get_returned_object(args) - else: - pyobject = None - if pyobject is None: - break - if pyobject is not None and assignment.assign_type: - return rope.base.pyobjects.PyObject(pyobject) - return pyobject - - -def _get_lineno_for_node(assign_node): - if hasattr(assign_node, 'lineno') and \ - assign_node.lineno is not None: - return assign_node.lineno - return 1 - -def _get_attribute(pyobject, name): - if pyobject is not None and name in pyobject: - return pyobject[name] diff --git a/pymode/libs3/rope/base/oi/transform.py b/pymode/libs3/rope/base/oi/transform.py deleted file mode 100644 index 5a9d600e..00000000 --- a/pymode/libs3/rope/base/oi/transform.py +++ /dev/null @@ -1,285 +0,0 @@ -"""Provides classes for persisting `PyObject`\s""" -import os -import re - -import rope.base.builtins -from rope.base import exceptions - - -class PyObjectToTextual(object): - """For transforming `PyObject` to textual form - - This can be used for storing `PyObjects` in files. Use - `TextualToPyObject` for converting back. - - """ - - def __init__(self, project): - self.project = project - - def transform(self, pyobject): - """Transform a `PyObject` to textual form""" - if pyobject is None: - return ('none',) - object_type = type(pyobject) - try: - method = getattr(self, object_type.__name__ + '_to_textual') - return method(pyobject) - except AttributeError: - return ('unknown',) - - def __call__(self, pyobject): - return self.transform(pyobject) - - def PyObject_to_textual(self, pyobject): - if isinstance(pyobject.get_type(), rope.base.pyobjects.AbstractClass): - result = self.transform(pyobject.get_type()) - if result[0] == 'defined': - return ('instance', result) - return result - return ('unknown',) - - def PyFunction_to_textual(self, pyobject): - return self._defined_to_textual(pyobject) - - def PyClass_to_textual(self, pyobject): - return self._defined_to_textual(pyobject) - - def _defined_to_textual(self, pyobject): - address = [] - while pyobject.parent is not None: - address.insert(0, pyobject.get_name()) - pyobject = pyobject.parent - return ('defined', self._get_pymodule_path(pyobject.get_module()), - '.'.join(address)) - - def PyModule_to_textual(self, pyobject): - return ('defined', self._get_pymodule_path(pyobject)) - - def PyPackage_to_textual(self, pyobject): - return ('defined', self._get_pymodule_path(pyobject)) - - def List_to_textual(self, pyobject): - return ('builtin', 'list', self.transform(pyobject.holding)) - - def Dict_to_textual(self, pyobject): - return ('builtin', 'dict', self.transform(pyobject.keys), - self.transform(pyobject.values)) - - def Tuple_to_textual(self, pyobject): - objects = [self.transform(holding) - for holding in pyobject.get_holding_objects()] - return tuple(['builtin', 'tuple'] + objects) - - def Set_to_textual(self, pyobject): - return ('builtin', 'set', self.transform(pyobject.holding)) - - def Iterator_to_textual(self, pyobject): - return ('builtin', 'iter', self.transform(pyobject.holding)) - - def Generator_to_textual(self, pyobject): - return ('builtin', 'generator', self.transform(pyobject.holding)) - - def Str_to_textual(self, pyobject): - return ('builtin', 'str') - - def File_to_textual(self, pyobject): - return ('builtin', 'file') - - def BuiltinFunction_to_textual(self, pyobject): - return ('builtin', 'function', pyobject.get_name()) - - def _get_pymodule_path(self, pymodule): - return self.resource_to_path(pymodule.get_resource()) - - def resource_to_path(self, resource): - if resource.project == self.project: - return resource.path - else: - return resource.real_path - - -class TextualToPyObject(object): - """For transforming textual form to `PyObject`""" - - def __init__(self, project, allow_in_project_absolutes=False): - self.project = project - - def __call__(self, textual): - return self.transform(textual) - - def transform(self, textual): - """Transform an object from textual form to `PyObject`""" - if textual is None: - return None - type = textual[0] - try: - method = getattr(self, type + '_to_pyobject') - return method(textual) - except AttributeError: - return None - - def builtin_to_pyobject(self, textual): - name = textual[1] - method = getattr(self, 'builtin_%s_to_pyobject' % textual[1], None) - if method is not None: - return method(textual) - - def builtin_str_to_pyobject(self, textual): - return rope.base.builtins.get_str() - - def builtin_list_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_list(holding) - - def builtin_dict_to_pyobject(self, textual): - keys = self.transform(textual[2]) - values = self.transform(textual[3]) - return rope.base.builtins.get_dict(keys, values) - - def builtin_tuple_to_pyobject(self, textual): - objects = [] - for holding in textual[2:]: - objects.append(self.transform(holding)) - return rope.base.builtins.get_tuple(*objects) - - def builtin_set_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_set(holding) - - def builtin_iter_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_iterator(holding) - - def builtin_generator_to_pyobject(self, textual): - holding = self.transform(textual[2]) - return rope.base.builtins.get_generator(holding) - - def builtin_file_to_pyobject(self, textual): - return rope.base.builtins.get_file() - - def builtin_function_to_pyobject(self, textual): - if textual[2] in rope.base.builtins.builtins: - return rope.base.builtins.builtins[textual[2]].get_object() - - def unknown_to_pyobject(self, textual): - return None - - def none_to_pyobject(self, textual): - return None - - def _module_to_pyobject(self, textual): - path = textual[1] - return self._get_pymodule(path) - - def _hierarchical_defined_to_pyobject(self, textual): - path = textual[1] - names = textual[2].split('.') - pymodule = self._get_pymodule(path) - pyobject = pymodule - for name in names: - if pyobject is None: - return None - if isinstance(pyobject, rope.base.pyobjects.PyDefinedObject): - try: - pyobject = pyobject.get_scope()[name].get_object() - except exceptions.NameNotFoundError: - return None - else: - return None - return pyobject - - def defined_to_pyobject(self, textual): - if len(textual) == 2 or textual[2] == '': - return self._module_to_pyobject(textual) - else: - return self._hierarchical_defined_to_pyobject(textual) - - def instance_to_pyobject(self, textual): - type = self.transform(textual[1]) - if type is not None: - return rope.base.pyobjects.PyObject(type) - - def _get_pymodule(self, path): - resource = self.path_to_resource(path) - if resource is not None: - return self.project.pycore.resource_to_pyobject(resource) - - def path_to_resource(self, path): - try: - root = self.project.address - if not os.path.isabs(path): - return self.project.get_resource(path) - if path == root or path.startswith(root + os.sep): - # INFO: This is a project file; should not be absolute - return None - import rope.base.project - return rope.base.project.get_no_project().get_resource(path) - except exceptions.ResourceNotFoundError: - return None - - -class DOITextualToPyObject(TextualToPyObject): - """For transforming textual form to `PyObject` - - The textual form DOI uses is different from rope's standard - textual form. The reason is that we cannot find the needed - information by analyzing live objects. This class can be - used to transform DOI textual form to `PyObject` and later - we can convert it to standard textual form using - `TextualToPyObject` class. - - """ - - def _function_to_pyobject(self, textual): - path = textual[1] - lineno = int(textual[2]) - pymodule = self._get_pymodule(path) - if pymodule is not None: - scope = pymodule.get_scope() - inner_scope = scope.get_inner_scope_for_line(lineno) - return inner_scope.pyobject - - def _class_to_pyobject(self, textual): - path, name = textual[1:] - pymodule = self._get_pymodule(path) - if pymodule is None: - return None - module_scope = pymodule.get_scope() - suspected = None - if name in module_scope.get_names(): - suspected = module_scope[name].get_object() - if suspected is not None and \ - isinstance(suspected, rope.base.pyobjects.PyClass): - return suspected - else: - lineno = self._find_occurrence(name, pymodule.get_resource().read()) - if lineno is not None: - inner_scope = module_scope.get_inner_scope_for_line(lineno) - return inner_scope.pyobject - - def defined_to_pyobject(self, textual): - if len(textual) == 2: - return self._module_to_pyobject(textual) - else: - if textual[2].isdigit(): - result = self._function_to_pyobject(textual) - else: - result = self._class_to_pyobject(textual) - if not isinstance(result, rope.base.pyobjects.PyModule): - return result - - def _find_occurrence(self, name, source): - pattern = re.compile(r'^\s*class\s*' + name + r'\b') - lines = source.split('\n') - for i in range(len(lines)): - if pattern.match(lines[i]): - return i + 1 - - def path_to_resource(self, path): - import rope.base.libutils - root = self.project.address - relpath = rope.base.libutils.relative(root, path) - if relpath is not None: - path = relpath - return super(DOITextualToPyObject, self).path_to_resource(path) diff --git a/pymode/libs3/rope/base/prefs.py b/pymode/libs3/rope/base/prefs.py deleted file mode 100644 index 674a58ec..00000000 --- a/pymode/libs3/rope/base/prefs.py +++ /dev/null @@ -1,41 +0,0 @@ -class Prefs(object): - - def __init__(self): - self.prefs = {} - self.callbacks = {} - - def set(self, key, value): - """Set the value of `key` preference to `value`.""" - if key in self.callbacks: - self.callbacks[key](value) - else: - self.prefs[key] = value - - def add(self, key, value): - """Add an entry to a list preference - - Add `value` to the list of entries for the `key` preference. - - """ - if not key in self.prefs: - self.prefs[key] = [] - self.prefs[key].append(value) - - def get(self, key, default=None): - """Get the value of the key preference""" - return self.prefs.get(key, default) - - def add_callback(self, key, callback): - """Add `key` preference with `callback` function - - Whenever `key` is set the callback is called with the - given `value` as parameter. - - """ - self.callbacks[key] = callback - - def __setitem__(self, key, value): - self.set(key, value) - - def __getitem__(self, key): - return self.get(key) diff --git a/pymode/libs3/rope/base/project.py b/pymode/libs3/rope/base/project.py deleted file mode 100644 index 0c9952ba..00000000 --- a/pymode/libs3/rope/base/project.py +++ /dev/null @@ -1,375 +0,0 @@ -import pickle -import os -import shutil -import sys -import warnings - -import rope.base.fscommands -from rope.base import exceptions, taskhandle, prefs, history, pycore, utils -from rope.base.resourceobserver import * -from rope.base.resources import File, Folder, _ResourceMatcher - - -class _Project(object): - - def __init__(self, fscommands): - self.observers = [] - self.fscommands = fscommands - self.prefs = prefs.Prefs() - self.data_files = _DataFiles(self) - - def get_resource(self, resource_name): - """Get a resource in a project. - - `resource_name` is the path of a resource in a project. It is - the path of a resource relative to project root. Project root - folder address is an empty string. If the resource does not - exist a `exceptions.ResourceNotFound` exception would be - raised. Use `get_file()` and `get_folder()` when you need to - get nonexistent `Resource`\s. - - """ - path = self._get_resource_path(resource_name) - if not os.path.exists(path): - raise exceptions.ResourceNotFoundError( - 'Resource <%s> does not exist' % resource_name) - elif os.path.isfile(path): - return File(self, resource_name) - elif os.path.isdir(path): - return Folder(self, resource_name) - else: - raise exceptions.ResourceNotFoundError('Unknown resource ' - + resource_name) - - def validate(self, folder): - """Validate files and folders contained in this folder - - It validates all of the files and folders contained in this - folder if some observers are interested in them. - - """ - for observer in list(self.observers): - observer.validate(folder) - - def add_observer(self, observer): - """Register a `ResourceObserver` - - See `FilteredResourceObserver`. - """ - self.observers.append(observer) - - def remove_observer(self, observer): - """Remove a registered `ResourceObserver`""" - if observer in self.observers: - self.observers.remove(observer) - - def do(self, changes, task_handle=taskhandle.NullTaskHandle()): - """Apply the changes in a `ChangeSet` - - Most of the time you call this function for committing the - changes for a refactoring. - """ - self.history.do(changes, task_handle=task_handle) - - def get_pycore(self): - return self.pycore - - def get_file(self, path): - """Get the file with `path` (it may not exist)""" - return File(self, path) - - def get_folder(self, path): - """Get the folder with `path` (it may not exist)""" - return Folder(self, path) - - def is_ignored(self, resource): - return False - - def get_prefs(self): - return self.prefs - - def _get_resource_path(self, name): - pass - - @property - @utils.saveit - def history(self): - return history.History(self) - - @property - @utils.saveit - def pycore(self): - return pycore.PyCore(self) - - def close(self): - warnings.warn('Cannot close a NoProject', - DeprecationWarning, stacklevel=2) - - ropefolder = None - - -class Project(_Project): - """A Project containing files and folders""" - - def __init__(self, projectroot, fscommands=None, - ropefolder='.ropeproject', **prefs): - """A rope project - - :parameters: - - `projectroot`: The address of the root folder of the project - - `fscommands`: Implements the file system operations used - by rope; have a look at `rope.base.fscommands` - - `ropefolder`: The name of the folder in which rope stores - project configurations and data. Pass `None` for not using - such a folder at all. - - `prefs`: Specify project preferences. These values - overwrite config file preferences. - - """ - if projectroot != '/': - projectroot = _realpath(projectroot).rstrip('/\\') - self._address = projectroot - self._ropefolder_name = ropefolder - if not os.path.exists(self._address): - os.mkdir(self._address) - elif not os.path.isdir(self._address): - raise exceptions.RopeError('Project root exists and' - ' is not a directory') - if fscommands is None: - fscommands = rope.base.fscommands.create_fscommands(self._address) - super(Project, self).__init__(fscommands) - self.ignored = _ResourceMatcher() - self.file_list = _FileListCacher(self) - self.prefs.add_callback('ignored_resources', self.ignored.set_patterns) - if ropefolder is not None: - self.prefs['ignored_resources'] = [ropefolder] - self._init_prefs(prefs) - - def get_files(self): - return self.file_list.get_files() - - def _get_resource_path(self, name): - return os.path.join(self._address, *name.split('/')) - - def _init_ropefolder(self): - if self.ropefolder is not None: - if not self.ropefolder.exists(): - self._create_recursively(self.ropefolder) - if not self.ropefolder.has_child('config.py'): - config = self.ropefolder.create_file('config.py') - config.write(self._default_config()) - - def _create_recursively(self, folder): - if folder.parent != self.root and not folder.parent.exists(): - self._create_recursively(folder.parent) - folder.create() - - def _init_prefs(self, prefs): - run_globals = {} - if self.ropefolder is not None: - config = self.get_file(self.ropefolder.path + '/config.py') - run_globals.update({'__name__': '__main__', - '__builtins__': __builtins__, - '__file__': config.real_path}) - if config.exists(): - config = self.ropefolder.get_child('config.py') - exec(config.read(), run_globals) - else: - exec(self._default_config(), run_globals) - if 'set_prefs' in run_globals: - run_globals['set_prefs'](self.prefs) - for key, value in prefs.items(): - self.prefs[key] = value - self._init_other_parts() - self._init_ropefolder() - if 'project_opened' in run_globals: - run_globals['project_opened'](self) - - def _default_config(self): - import rope.base.default_config - import inspect - return inspect.getsource(rope.base.default_config) - - def _init_other_parts(self): - # Forcing the creation of `self.pycore` to register observers - self.pycore - - def is_ignored(self, resource): - return self.ignored.does_match(resource) - - def sync(self): - """Closes project open resources""" - self.close() - - def close(self): - """Closes project open resources""" - self.data_files.write() - - def set(self, key, value): - """Set the `key` preference to `value`""" - self.prefs.set(key, value) - - @property - def ropefolder(self): - if self._ropefolder_name is not None: - return self.get_folder(self._ropefolder_name) - - def validate(self, folder=None): - if folder is None: - folder = self.root - super(Project, self).validate(folder) - - root = property(lambda self: self.get_resource('')) - address = property(lambda self: self._address) - - -class NoProject(_Project): - """A null object for holding out of project files. - - This class is singleton use `get_no_project` global function - """ - - def __init__(self): - fscommands = rope.base.fscommands.FileSystemCommands() - super(NoProject, self).__init__(fscommands) - - def _get_resource_path(self, name): - real_name = name.replace('/', os.path.sep) - return _realpath(real_name) - - def get_resource(self, name): - universal_name = _realpath(name).replace(os.path.sep, '/') - return super(NoProject, self).get_resource(universal_name) - - def get_files(self): - return [] - - _no_project = None - - -def get_no_project(): - if NoProject._no_project is None: - NoProject._no_project = NoProject() - return NoProject._no_project - - -class _FileListCacher(object): - - def __init__(self, project): - self.project = project - self.files = None - rawobserver = ResourceObserver( - self._changed, self._invalid, self._invalid, - self._invalid, self._invalid) - self.project.add_observer(rawobserver) - - def get_files(self): - if self.files is None: - self.files = set() - self._add_files(self.project.root) - return self.files - - def _add_files(self, folder): - for child in folder.get_children(): - if child.is_folder(): - self._add_files(child) - elif not self.project.is_ignored(child): - self.files.add(child) - - def _changed(self, resource): - if resource.is_folder(): - self.files = None - - def _invalid(self, resource, new_resource=None): - self.files = None - - -class _DataFiles(object): - - def __init__(self, project): - self.project = project - self.hooks = [] - - def read_data(self, name, compress=False, import_=False): - if self.project.ropefolder is None: - return None - compress = compress and self._can_compress() - opener = self._get_opener(compress) - file = self._get_file(name, compress) - if not compress and import_: - self._import_old_files(name) - if file.exists(): - input = opener(file.real_path, 'rb') - try: - result = [] - try: - while True: - result.append(pickle.load(input)) - except EOFError: - pass - if len(result) == 1: - return result[0] - if len(result) > 1: - return result - finally: - input.close() - - def write_data(self, name, data, compress=False): - if self.project.ropefolder is not None: - compress = compress and self._can_compress() - file = self._get_file(name, compress) - opener = self._get_opener(compress) - output = opener(file.real_path, 'wb') - try: - pickle.dump(data, output, 2) - finally: - output.close() - - def add_write_hook(self, hook): - self.hooks.append(hook) - - def write(self): - for hook in self.hooks: - hook() - - def _can_compress(self): - try: - import gzip - return True - except ImportError: - return False - - def _import_old_files(self, name): - old = self._get_file(name + '.pickle', False) - new = self._get_file(name, False) - if old.exists() and not new.exists(): - shutil.move(old.real_path, new.real_path) - - def _get_opener(self, compress): - if compress: - try: - import gzip - return gzip.open - except ImportError: - pass - return open - - def _get_file(self, name, compress): - path = self.project.ropefolder.path + '/' + name - if compress: - path += '.gz' - return self.project.get_file(path) - - -def _realpath(path): - """Return the real path of `path` - - Is equivalent to ``realpath(abspath(expanduser(path)))``. - - """ - # there is a bug in cygwin for os.path.abspath() for abs paths - if sys.platform == 'cygwin': - if path[1:3] == ':\\': - return path - return os.path.abspath(os.path.expanduser(path)) - return os.path.realpath(os.path.abspath(os.path.expanduser(path))) diff --git a/pymode/libs3/rope/base/pycore.py b/pymode/libs3/rope/base/pycore.py deleted file mode 100644 index 700fcde6..00000000 --- a/pymode/libs3/rope/base/pycore.py +++ /dev/null @@ -1,410 +0,0 @@ -import bisect -import difflib -import sys -import warnings - -import rope.base.oi.doa -import rope.base.oi.objectinfo -import rope.base.oi.soa -from rope.base import ast, exceptions, taskhandle, utils, stdmods -from rope.base.exceptions import ModuleNotFoundError -from rope.base.pyobjectsdef import PyModule, PyPackage, PyClass -import rope.base.resources -import rope.base.resourceobserver -from rope.base import builtins - - -class PyCore(object): - - def __init__(self, project): - self.project = project - self._init_resource_observer() - self.cache_observers = [] - self.module_cache = _ModuleCache(self) - self.extension_cache = _ExtensionCache(self) - self.object_info = rope.base.oi.objectinfo.ObjectInfoManager(project) - self._init_python_files() - self._init_automatic_soa() - self._init_source_folders() - - def _init_python_files(self): - self.python_matcher = None - patterns = self.project.prefs.get('python_files', None) - if patterns is not None: - self.python_matcher = rope.base.resources._ResourceMatcher() - self.python_matcher.set_patterns(patterns) - - def _init_resource_observer(self): - callback = self._invalidate_resource_cache - observer = rope.base.resourceobserver.ResourceObserver( - changed=callback, moved=callback, removed=callback) - self.observer = rope.base.resourceobserver.FilteredResourceObserver(observer) - self.project.add_observer(self.observer) - - def _init_source_folders(self): - self._custom_source_folders = [] - for path in self.project.prefs.get('source_folders', []): - folder = self.project.get_resource(path) - self._custom_source_folders.append(folder) - - def _init_automatic_soa(self): - if not self.automatic_soa: - return - callback = self._file_changed_for_soa - observer = rope.base.resourceobserver.ResourceObserver( - changed=callback, moved=callback, removed=callback) - self.project.add_observer(observer) - - @property - def automatic_soa(self): - auto_soa = self.project.prefs.get('automatic_soi', None) - return self.project.prefs.get('automatic_soa', auto_soa) - - def _file_changed_for_soa(self, resource, new_resource=None): - old_contents = self.project.history.\ - contents_before_current_change(resource) - if old_contents is not None: - perform_soa_on_changed_scopes(self.project, resource, old_contents) - - def is_python_file(self, resource): - if resource.is_folder(): - return False - if self.python_matcher is None: - return resource.name.endswith('.py') - return self.python_matcher.does_match(resource) - - def get_module(self, name, folder=None): - """Returns a `PyObject` if the module was found.""" - # check if this is a builtin module - pymod = self._builtin_module(name) - if pymod is not None: - return pymod - module = self.find_module(name, folder) - if module is None: - raise ModuleNotFoundError('Module %s not found' % name) - return self.resource_to_pyobject(module) - - def _builtin_submodules(self, modname): - result = {} - for extension in self.extension_modules: - if extension.startswith(modname + '.'): - name = extension[len(modname) + 1:] - if '.' not in name: - result[name] = self._builtin_module(extension) - return result - - def _builtin_module(self, name): - return self.extension_cache.get_pymodule(name) - - def get_relative_module(self, name, folder, level): - module = self.find_relative_module(name, folder, level) - if module is None: - raise ModuleNotFoundError('Module %s not found' % name) - return self.resource_to_pyobject(module) - - def get_string_module(self, code, resource=None, force_errors=False): - """Returns a `PyObject` object for the given code - - If `force_errors` is `True`, `exceptions.ModuleSyntaxError` is - raised if module has syntax errors. This overrides - ``ignore_syntax_errors`` project config. - - """ - return PyModule(self, code, resource, force_errors=force_errors) - - def get_string_scope(self, code, resource=None): - """Returns a `Scope` object for the given code""" - return self.get_string_module(code, resource).get_scope() - - def _invalidate_resource_cache(self, resource, new_resource=None): - for observer in self.cache_observers: - observer(resource) - - def _find_module_in_folder(self, folder, modname): - module = folder - packages = modname.split('.') - for pkg in packages[:-1]: - if module.is_folder() and module.has_child(pkg): - module = module.get_child(pkg) - else: - return None - if module.is_folder(): - if module.has_child(packages[-1]) and \ - module.get_child(packages[-1]).is_folder(): - return module.get_child(packages[-1]) - elif module.has_child(packages[-1] + '.py') and \ - not module.get_child(packages[-1] + '.py').is_folder(): - return module.get_child(packages[-1] + '.py') - - def get_python_path_folders(self): - import rope.base.project - result = [] - for src in self.project.prefs.get('python_path', []) + sys.path: - try: - src_folder = rope.base.project.get_no_project().get_resource(src) - result.append(src_folder) - except rope.base.exceptions.ResourceNotFoundError: - pass - return result - - def find_module(self, modname, folder=None): - """Returns a resource corresponding to the given module - - returns None if it can not be found - """ - return self._find_module(modname, folder) - - def find_relative_module(self, modname, folder, level): - for i in range(level - 1): - folder = folder.parent - if modname == '': - return folder - else: - return self._find_module_in_folder(folder, modname) - - def _find_module(self, modname, folder=None): - """Return `modname` module resource""" - for src in self.get_source_folders(): - module = self._find_module_in_folder(src, modname) - if module is not None: - return module - for src in self.get_python_path_folders(): - module = self._find_module_in_folder(src, modname) - if module is not None: - return module - if folder is not None: - module = self._find_module_in_folder(folder, modname) - if module is not None: - return module - return None - - # INFO: It was decided not to cache source folders, since: - # - Does not take much time when the root folder contains - # packages, that is most of the time - # - We need a separate resource observer; `self.observer` - # does not get notified about module and folder creations - def get_source_folders(self): - """Returns project source folders""" - if self.project.root is None: - return [] - result = list(self._custom_source_folders) - result.extend(self._find_source_folders(self.project.root)) - return result - - def resource_to_pyobject(self, resource, force_errors=False): - return self.module_cache.get_pymodule(resource, force_errors) - - def get_python_files(self): - """Returns all python files available in the project""" - return [resource for resource in self.project.get_files() - if self.is_python_file(resource)] - - def _is_package(self, folder): - if folder.has_child('__init__.py') and \ - not folder.get_child('__init__.py').is_folder(): - return True - else: - return False - - def _find_source_folders(self, folder): - for resource in folder.get_folders(): - if self._is_package(resource): - return [folder] - result = [] - for resource in folder.get_files(): - if resource.name.endswith('.py'): - result.append(folder) - break - for resource in folder.get_folders(): - result.extend(self._find_source_folders(resource)) - return result - - def run_module(self, resource, args=None, stdin=None, stdout=None): - """Run `resource` module - - Returns a `rope.base.oi.doa.PythonFileRunner` object for - controlling the process. - - """ - perform_doa = self.project.prefs.get('perform_doi', True) - perform_doa = self.project.prefs.get('perform_doa', perform_doa) - receiver = self.object_info.doa_data_received - if not perform_doa: - receiver = None - runner = rope.base.oi.doa.PythonFileRunner( - self, resource, args, stdin, stdout, receiver) - runner.add_finishing_observer(self.module_cache.forget_all_data) - runner.run() - return runner - - def analyze_module(self, resource, should_analyze=lambda py: True, - search_subscopes=lambda py: True, followed_calls=None): - """Analyze `resource` module for static object inference - - This function forces rope to analyze this module to collect - information about function calls. `should_analyze` is a - function that is called with a `PyDefinedObject` argument. If - it returns `True` the element is analyzed. If it is `None` or - returns `False` the element is not analyzed. - - `search_subscopes` is like `should_analyze`; The difference is - that if it returns `False` the sub-scopes are all ignored. - That is it is assumed that `should_analyze` returns `False` - for all of its subscopes. - - `followed_calls` override the value of ``soa_followed_calls`` - project config. - """ - if followed_calls is None: - followed_calls = self.project.prefs.get('soa_followed_calls', 0) - pymodule = self.resource_to_pyobject(resource) - self.module_cache.forget_all_data() - rope.base.oi.soa.analyze_module( - self, pymodule, should_analyze, search_subscopes, followed_calls) - - def get_classes(self, task_handle=taskhandle.NullTaskHandle()): - warnings.warn('`PyCore.get_classes()` is deprecated', - DeprecationWarning, stacklevel=2) - return [] - - def __str__(self): - return str(self.module_cache) + str(self.object_info) - - def modname(self, resource): - if resource.is_folder(): - module_name = resource.name - source_folder = resource.parent - elif resource.name == '__init__.py': - module_name = resource.parent.name - source_folder = resource.parent.parent - else: - module_name = resource.name[:-3] - source_folder = resource.parent - - while source_folder != source_folder.parent and \ - source_folder.has_child('__init__.py'): - module_name = source_folder.name + '.' + module_name - source_folder = source_folder.parent - return module_name - - @property - @utils.cacheit - def extension_modules(self): - result = set(self.project.prefs.get('extension_modules', [])) - if self.project.prefs.get('import_dynload_stdmods', False): - result.update(stdmods.dynload_modules()) - return result - - -class _ModuleCache(object): - - def __init__(self, pycore): - self.pycore = pycore - self.module_map = {} - self.pycore.cache_observers.append(self._invalidate_resource) - self.observer = self.pycore.observer - - def _invalidate_resource(self, resource): - if resource in self.module_map: - self.forget_all_data() - self.observer.remove_resource(resource) - del self.module_map[resource] - - def get_pymodule(self, resource, force_errors=False): - if resource in self.module_map: - return self.module_map[resource] - if resource.is_folder(): - result = PyPackage(self.pycore, resource, - force_errors=force_errors) - else: - result = PyModule(self.pycore, resource=resource, - force_errors=force_errors) - if result.has_errors: - return result - self.module_map[resource] = result - self.observer.add_resource(resource) - return result - - def forget_all_data(self): - for pymodule in self.module_map.values(): - pymodule._forget_concluded_data() - - def __str__(self): - return 'PyCore caches %d PyModules\n' % len(self.module_map) - - -class _ExtensionCache(object): - - def __init__(self, pycore): - self.pycore = pycore - self.extensions = {} - - def get_pymodule(self, name): - if name == '__builtin__': - return builtins.builtins - allowed = self.pycore.extension_modules - if name not in self.extensions and name in allowed: - self.extensions[name] = builtins.BuiltinModule(name, self.pycore) - return self.extensions.get(name) - - -def perform_soa_on_changed_scopes(project, resource, old_contents): - pycore = project.pycore - if resource.exists() and pycore.is_python_file(resource): - try: - new_contents = resource.read() - # detecting changes in new_contents relative to old_contents - detector = _TextChangeDetector(new_contents, old_contents) - def search_subscopes(pydefined): - scope = pydefined.get_scope() - return detector.is_changed(scope.get_start(), scope.get_end()) - def should_analyze(pydefined): - scope = pydefined.get_scope() - start = scope.get_start() - end = scope.get_end() - return detector.consume_changes(start, end) - pycore.analyze_module(resource, should_analyze, search_subscopes) - except exceptions.ModuleSyntaxError: - pass - - -class _TextChangeDetector(object): - - def __init__(self, old, new): - self.old = old - self.new = new - self._set_diffs() - - def _set_diffs(self): - differ = difflib.Differ() - self.lines = [] - lineno = 0 - for line in differ.compare(self.old.splitlines(True), - self.new.splitlines(True)): - if line.startswith(' '): - lineno += 1 - elif line.startswith('-'): - lineno += 1 - self.lines.append(lineno) - - def is_changed(self, start, end): - """Tell whether any of start till end lines have changed - - The end points are inclusive and indices start from 1. - """ - left, right = self._get_changed(start, end) - if left < right: - return True - return False - - def consume_changes(self, start, end): - """Clear the changed status of lines from start till end""" - left, right = self._get_changed(start, end) - if left < right: - del self.lines[left:right] - return left < right - - def _get_changed(self, start, end): - left = bisect.bisect_left(self.lines, start) - right = bisect.bisect_right(self.lines, end) - return left, right diff --git a/pymode/libs3/rope/base/pynames.py b/pymode/libs3/rope/base/pynames.py deleted file mode 100644 index 79bba156..00000000 --- a/pymode/libs3/rope/base/pynames.py +++ /dev/null @@ -1,199 +0,0 @@ -import rope.base.pyobjects -from rope.base import exceptions, utils - - -class PyName(object): - """References to `PyObject`\s inside python programs""" - - def get_object(self): - """Return the `PyObject` object referenced by this `PyName`""" - - def get_definition_location(self): - """Return a (module, lineno) tuple""" - - -class DefinedName(PyName): - - def __init__(self, pyobject): - self.pyobject = pyobject - - def get_object(self): - return self.pyobject - - def get_definition_location(self): - return (self.pyobject.get_module(), self.pyobject.get_ast().lineno) - - -class AssignedName(PyName): - """Only a placeholder""" - - -class UnboundName(PyName): - - def __init__(self, pyobject=None): - self.pyobject = pyobject - if self.pyobject is None: - self.pyobject = rope.base.pyobjects.get_unknown() - - def get_object(self): - return self.pyobject - - def get_definition_location(self): - return (None, None) - - -class AssignmentValue(object): - """An assigned expression""" - - def __init__(self, ast_node, levels=None, evaluation='', - assign_type=False): - """The `level` is `None` for simple assignments and is - a list of numbers for tuple assignments for example in:: - - a, (b, c) = x - - The levels for for `a` is ``[0]``, for `b` is ``[1, 0]`` and for - `c` is ``[1, 1]``. - - """ - self.ast_node = ast_node - if levels == None: - self.levels = [] - else: - self.levels = levels - self.evaluation = evaluation - self.assign_type = assign_type - - def get_lineno(self): - return self.ast_node.lineno - - -class EvaluatedName(PyName): - """A name whose object will be evaluated later""" - - def __init__(self, callback, module=None, lineno=None): - self.module = module - self.lineno = lineno - self.callback = callback - self.pyobject = _Inferred(callback, _get_concluded_data(module)) - - def get_object(self): - return self.pyobject.get() - - def get_definition_location(self): - return (self.module, self.lineno) - - def invalidate(self): - """Forget the `PyObject` this `PyName` holds""" - self.pyobject.set(None) - - -class ParameterName(PyName): - """Only a placeholder""" - - -class ImportedModule(PyName): - - def __init__(self, importing_module, module_name=None, - level=0, resource=None): - self.importing_module = importing_module - self.module_name = module_name - self.level = level - self.resource = resource - self.pymodule = _get_concluded_data(self.importing_module) - - def _current_folder(self): - resource = self.importing_module.get_module().get_resource() - if resource is None: - return None - return resource.parent - - def _get_pymodule(self): - if self.pymodule.get() is None: - pycore = self.importing_module.pycore - if self.resource is not None: - self.pymodule.set(pycore.resource_to_pyobject(self.resource)) - elif self.module_name is not None: - try: - if self.level == 0: - pymodule = pycore.get_module(self.module_name, - self._current_folder()) - else: - pymodule = pycore.get_relative_module( - self.module_name, self._current_folder(), self.level) - self.pymodule.set(pymodule) - except exceptions.ModuleNotFoundError: - pass - return self.pymodule.get() - - def get_object(self): - if self._get_pymodule() is None: - return rope.base.pyobjects.get_unknown() - return self._get_pymodule() - - def get_definition_location(self): - pymodule = self._get_pymodule() - if not isinstance(pymodule, rope.base.pyobjects.PyDefinedObject): - return (None, None) - return (pymodule.get_module(), 1) - - -class ImportedName(PyName): - - def __init__(self, imported_module, imported_name): - self.imported_module = imported_module - self.imported_name = imported_name - - def _get_imported_pyname(self): - try: - result = self.imported_module.get_object()[self.imported_name] - if result != self: - return result - except exceptions.AttributeNotFoundError: - pass - return UnboundName() - - @utils.prevent_recursion(rope.base.pyobjects.get_unknown) - def get_object(self): - return self._get_imported_pyname().get_object() - - @utils.prevent_recursion(lambda: (None, None)) - def get_definition_location(self): - return self._get_imported_pyname().get_definition_location() - - -def _get_concluded_data(module): - if module is None: - return rope.base.pyobjects._ConcludedData() - return module._get_concluded_data() - - -def _circular_inference(): - raise rope.base.pyobjects.IsBeingInferredError( - 'Circular Object Inference') - -class _Inferred(object): - - def __init__(self, get_inferred, concluded=None): - self.get_inferred = get_inferred - self.concluded = concluded - if self.concluded is None: - self.temp = None - - @utils.prevent_recursion(_circular_inference) - def get(self, *args, **kwds): - if self.concluded is None or self.concluded.get() is None: - self.set(self.get_inferred(*args, **kwds)) - if self._get() is None: - self.set(rope.base.pyobjects.get_unknown()) - return self._get() - - def set(self, pyobject): - if self.concluded is not None: - self.concluded.set(pyobject) - self.temp = pyobject - - def _get(self): - if self.concluded is not None: - return self.concluded.get() - return self.temp diff --git a/pymode/libs3/rope/base/pynamesdef.py b/pymode/libs3/rope/base/pynamesdef.py deleted file mode 100644 index 6dba0a80..00000000 --- a/pymode/libs3/rope/base/pynamesdef.py +++ /dev/null @@ -1,55 +0,0 @@ -import rope.base.oi.soi -from rope.base import pynames -from rope.base.pynames import * - - -class AssignedName(pynames.AssignedName): - - def __init__(self, lineno=None, module=None, pyobject=None): - self.lineno = lineno - self.module = module - self.assignments = [] - self.pyobject = _Inferred(self._get_inferred, - pynames._get_concluded_data(module)) - self.pyobject.set(pyobject) - - @utils.prevent_recursion(lambda: None) - def _get_inferred(self): - if self.module is not None: - return rope.base.oi.soi.infer_assigned_object(self) - - def get_object(self): - return self.pyobject.get() - - def get_definition_location(self): - """Returns a (module, lineno) tuple""" - if self.lineno is None and self.assignments: - self.lineno = self.assignments[0].get_lineno() - return (self.module, self.lineno) - - def invalidate(self): - """Forget the `PyObject` this `PyName` holds""" - self.pyobject.set(None) - - -class ParameterName(pynames.ParameterName): - - def __init__(self, pyfunction, index): - self.pyfunction = pyfunction - self.index = index - - def get_object(self): - result = self.pyfunction.get_parameter(self.index) - if result is None: - result = rope.base.pyobjects.get_unknown() - return result - - def get_objects(self): - """Returns the list of objects passed as this parameter""" - return rope.base.oi.soi.get_passed_objects( - self.pyfunction, self.index) - - def get_definition_location(self): - return (self.pyfunction.get_module(), self.pyfunction.get_ast().lineno) - -_Inferred = pynames._Inferred diff --git a/pymode/libs3/rope/base/pyobjects.py b/pymode/libs3/rope/base/pyobjects.py deleted file mode 100644 index 76be3040..00000000 --- a/pymode/libs3/rope/base/pyobjects.py +++ /dev/null @@ -1,311 +0,0 @@ -from rope.base.fscommands import _decode_data -from rope.base import ast, exceptions, utils - - -class PyObject(object): - - def __init__(self, type_): - if type_ is None: - type_ = self - self.type = type_ - - def get_attributes(self): - if self.type is self: - return {} - return self.type.get_attributes() - - def get_attribute(self, name): - if name not in self.get_attributes(): - raise exceptions.AttributeNotFoundError( - 'Attribute %s not found' % name) - return self.get_attributes()[name] - - def get_type(self): - return self.type - - def __getitem__(self, key): - """The same as ``get_attribute(key)``""" - return self.get_attribute(key) - - def __contains__(self, key): - """The same as ``key in self.get_attributes()``""" - return key in self.get_attributes() - - def __eq__(self, obj): - """Check the equality of two `PyObject`\s - - Currently it is assumed that instances (the direct instances - of `PyObject`, not the instances of its subclasses) are equal - if their types are equal. For every other object like - defineds or builtins rope assumes objects are reference - objects and their identities should match. - - """ - if self.__class__ != obj.__class__: - return False - if type(self) == PyObject: - if self is not self.type: - return self.type == obj.type - else: - return self.type is obj.type - return self is obj - - def __ne__(self, obj): - return not self.__eq__(obj) - - def __hash__(self): - """See docs for `__eq__()` method""" - if type(self) == PyObject and self != self.type: - return hash(self.type) + 1 - else: - return super(PyObject, self).__hash__() - - def __iter__(self): - """The same as ``iter(self.get_attributes())``""" - return iter(self.get_attributes()) - - _types = None - _unknown = None - - @staticmethod - def _get_base_type(name): - if PyObject._types is None: - PyObject._types = {} - base_type = PyObject(None) - PyObject._types['Type'] = base_type - PyObject._types['Module'] = PyObject(base_type) - PyObject._types['Function'] = PyObject(base_type) - PyObject._types['Unknown'] = PyObject(base_type) - return PyObject._types[name] - - -def get_base_type(name): - """Return the base type with name `name`. - - The base types are 'Type', 'Function', 'Module' and 'Unknown'. It - was used to check the type of a `PyObject` but currently its use - is discouraged. Use classes defined in this module instead. - For example instead of - ``pyobject.get_type() == get_base_type('Function')`` use - ``isinstance(pyobject, AbstractFunction)``. - - You can use `AbstractClass` for classes, `AbstractFunction` for - functions, and `AbstractModule` for modules. You can also use - `PyFunction` and `PyClass` for testing if an object is - defined somewhere and rope can access its source. These classes - provide more methods. - - """ - return PyObject._get_base_type(name) - - -def get_unknown(): - """Return a pyobject whose type is unknown - - Note that two unknown objects are equal. So for example you can - write:: - - if pyname.get_object() == get_unknown(): - print 'cannot determine what this pyname holds' - - Rope could have used `None` for indicating unknown objects but - we had to check that in many places. So actually this method - returns a null object. - - """ - if PyObject._unknown is None: - PyObject._unknown = PyObject(get_base_type('Unknown')) - return PyObject._unknown - - -class AbstractClass(PyObject): - - def __init__(self): - super(AbstractClass, self).__init__(get_base_type('Type')) - - def get_name(self): - pass - - def get_doc(self): - pass - - def get_superclasses(self): - return [] - - -class AbstractFunction(PyObject): - - def __init__(self): - super(AbstractFunction, self).__init__(get_base_type('Function')) - - def get_name(self): - pass - - def get_doc(self): - pass - - def get_param_names(self, special_args=True): - return [] - - def get_returned_object(self, args): - return get_unknown() - - -class AbstractModule(PyObject): - - def __init__(self, doc=None): - super(AbstractModule, self).__init__(get_base_type('Module')) - - def get_doc(self): - pass - - def get_resource(self): - pass - - -class PyDefinedObject(object): - """Python defined names that rope can access their sources""" - - def __init__(self, pycore, ast_node, parent): - self.pycore = pycore - self.ast_node = ast_node - self.scope = None - self.parent = parent - self.structural_attributes = None - self.concluded_attributes = self.get_module()._get_concluded_data() - self.attributes = self.get_module()._get_concluded_data() - self.defineds = None - - visitor_class = None - - @utils.prevent_recursion(lambda: {}) - def _get_structural_attributes(self): - if self.structural_attributes is None: - self.structural_attributes = self._create_structural_attributes() - return self.structural_attributes - - @utils.prevent_recursion(lambda: {}) - def _get_concluded_attributes(self): - if self.concluded_attributes.get() is None: - self._get_structural_attributes() - self.concluded_attributes.set(self._create_concluded_attributes()) - return self.concluded_attributes.get() - - def get_attributes(self): - if self.attributes.get() is None: - result = dict(self._get_concluded_attributes()) - result.update(self._get_structural_attributes()) - self.attributes.set(result) - return self.attributes.get() - - def get_attribute(self, name): - if name in self._get_structural_attributes(): - return self._get_structural_attributes()[name] - if name in self._get_concluded_attributes(): - return self._get_concluded_attributes()[name] - raise exceptions.AttributeNotFoundError('Attribute %s not found' % - name) - - def get_scope(self): - if self.scope is None: - self.scope = self._create_scope() - return self.scope - - def get_module(self): - current_object = self - while current_object.parent is not None: - current_object = current_object.parent - return current_object - - def get_doc(self): - if len(self.get_ast().body) > 0: - expr = self.get_ast().body[0] - if isinstance(expr, ast.Expr) and \ - isinstance(expr.value, ast.Str): - docstring = expr.value.s - coding = self.get_module().coding - return _decode_data(docstring, coding) - - def _get_defined_objects(self): - if self.defineds is None: - self._get_structural_attributes() - return self.defineds - - def _create_structural_attributes(self): - if self.visitor_class is None: - return {} - new_visitor = self.visitor_class(self.pycore, self) - for child in ast.get_child_nodes(self.ast_node): - ast.walk(child, new_visitor) - self.defineds = new_visitor.defineds - return new_visitor.names - - def _create_concluded_attributes(self): - return {} - - def get_ast(self): - return self.ast_node - - def _create_scope(self): - pass - - -class PyFunction(PyDefinedObject, AbstractFunction): - """Only a placeholder""" - - -class PyClass(PyDefinedObject, AbstractClass): - """Only a placeholder""" - - -class _ConcludedData(object): - - def __init__(self): - self.data_ = None - - def set(self, data): - self.data_ = data - - def get(self): - return self.data_ - - data = property(get, set) - - def _invalidate(self): - self.data = None - - def __str__(self): - return '<' + str(self.data) + '>' - - -class _PyModule(PyDefinedObject, AbstractModule): - - def __init__(self, pycore, ast_node, resource): - self.resource = resource - self.concluded_data = [] - AbstractModule.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, None) - - def _get_concluded_data(self): - new_data = _ConcludedData() - self.concluded_data.append(new_data) - return new_data - - def _forget_concluded_data(self): - for data in self.concluded_data: - data._invalidate() - - def get_resource(self): - return self.resource - - -class PyModule(_PyModule): - """Only a placeholder""" - - -class PyPackage(_PyModule): - """Only a placeholder""" - - -class IsBeingInferredError(exceptions.RopeError): - pass diff --git a/pymode/libs3/rope/base/pyobjectsdef.py b/pymode/libs3/rope/base/pyobjectsdef.py deleted file mode 100644 index 57e7af58..00000000 --- a/pymode/libs3/rope/base/pyobjectsdef.py +++ /dev/null @@ -1,555 +0,0 @@ -import sys -import rope.base.codeanalyze -import rope.base.evaluate -import rope.base.builtins -import rope.base.oi.soi -import rope.base.pyscopes -from rope.base import (pynamesdef as pynames, exceptions, ast, - astutils, pyobjects, fscommands, arguments, utils) -from rope.base.pyobjects import * - -class PyFunction(pyobjects.PyFunction): - - def __init__(self, pycore, ast_node, parent): - AbstractFunction.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, parent) - self.arguments = self.ast_node.args - self.parameter_pyobjects = pynames._Inferred( - self._infer_parameters, self.get_module()._get_concluded_data()) - self.returned = pynames._Inferred(self._infer_returned) - self.parameter_pynames = None - - def _create_structural_attributes(self): - return {} - - def _create_concluded_attributes(self): - return {} - - def _create_scope(self): - return rope.base.pyscopes.FunctionScope(self.pycore, self, - _FunctionVisitor) - - def _infer_parameters(self): - pyobjects = rope.base.oi.soi.infer_parameter_objects(self) - self._handle_special_args(pyobjects) - return pyobjects - - def _infer_returned(self, args=None): - return rope.base.oi.soi.infer_returned_object(self, args) - - def _handle_special_args(self, pyobjects): - if len(pyobjects) == len(self.arguments.args): - if self.arguments.vararg: - pyobjects.append(rope.base.builtins.get_list()) - if self.arguments.kwarg: - pyobjects.append(rope.base.builtins.get_dict()) - - def _set_parameter_pyobjects(self, pyobjects): - if pyobjects is not None: - self._handle_special_args(pyobjects) - self.parameter_pyobjects.set(pyobjects) - - def get_parameters(self): - if self.parameter_pynames is None: - result = {} - for index, name in enumerate(self.get_param_names()): - # TODO: handle tuple parameters - result[name] = pynames.ParameterName(self, index) - self.parameter_pynames = result - return self.parameter_pynames - - def get_parameter(self, index): - if index < len(self.parameter_pyobjects.get()): - return self.parameter_pyobjects.get()[index] - - def get_returned_object(self, args): - return self.returned.get(args) - - def get_name(self): - return self.get_ast().name - - def get_param_names(self, special_args=True): - # TODO: handle tuple parameters - result = [node.arg for node in self.arguments.args - if isinstance(node, ast.arg)] - if special_args: - if self.arguments.vararg: - result.append(self.arguments.vararg) - if self.arguments.kwarg: - result.append(self.arguments.kwarg) - return result - - def get_kind(self): - """Get function type - - It returns one of 'function', 'method', 'staticmethod' or - 'classmethod' strs. - - """ - scope = self.parent.get_scope() - if isinstance(self.parent, PyClass): - for decorator in self.decorators: - pyname = rope.base.evaluate.eval_node(scope, decorator) - if pyname == rope.base.builtins.builtins['staticmethod']: - return 'staticmethod' - if pyname == rope.base.builtins.builtins['classmethod']: - return 'classmethod' - return 'method' - return 'function' - - @property - def decorators(self): - try: - return getattr(self.ast_node, 'decorator_list') - except AttributeError: - return getattr(self.ast_node, 'decorators', None) - - -class PyClass(pyobjects.PyClass): - - def __init__(self, pycore, ast_node, parent): - self.visitor_class = _ClassVisitor - AbstractClass.__init__(self) - PyDefinedObject.__init__(self, pycore, ast_node, parent) - self.parent = parent - self._superclasses = self.get_module()._get_concluded_data() - - def get_superclasses(self): - if self._superclasses.get() is None: - self._superclasses.set(self._get_bases()) - return self._superclasses.get() - - def get_name(self): - return self.get_ast().name - - def _create_concluded_attributes(self): - result = {} - for base in reversed(self.get_superclasses()): - result.update(base.get_attributes()) - return result - - def _get_bases(self): - result = [] - for base_name in self.ast_node.bases: - base = rope.base.evaluate.eval_node(self.parent.get_scope(), - base_name) - if base is not None and \ - base.get_object().get_type() == get_base_type('Type'): - result.append(base.get_object()) - return result - - def _create_scope(self): - return rope.base.pyscopes.ClassScope(self.pycore, self) - - -class PyModule(pyobjects.PyModule): - - def __init__(self, pycore, source=None, - resource=None, force_errors=False): - ignore = pycore.project.prefs.get('ignore_syntax_errors', False) - syntax_errors = force_errors or not ignore - self.has_errors = False - try: - source, node = self._init_source(pycore, source, resource) - except exceptions.ModuleSyntaxError: - self.has_errors = True - if syntax_errors: - raise - else: - source = '\n' - node = ast.parse('\n') - self.source_code = source - self.star_imports = [] - self.visitor_class = _GlobalVisitor - self.coding = fscommands.read_str_coding(self.source_code) - super(PyModule, self).__init__(pycore, node, resource) - - def _init_source(self, pycore, source_code, resource): - filename = 'string' - if resource: - filename = resource.path - try: - if source_code is None: - source_bytes = resource.read_bytes() - source_code = fscommands.file_data_to_unicode(source_bytes) - else: - if isinstance(source_code, str): - source_bytes = fscommands.unicode_to_file_data(source_code) - else: - source_bytes = source_code - ast_node = ast.parse(source_bytes, filename=filename) - except SyntaxError as e: - raise exceptions.ModuleSyntaxError(filename, e.lineno, e.msg) - except UnicodeDecodeError as e: - raise exceptions.ModuleSyntaxError(filename, 1, '%s' % (e.reason)) - return source_code, ast_node - - @utils.prevent_recursion(lambda: {}) - def _create_concluded_attributes(self): - result = {} - for star_import in self.star_imports: - result.update(star_import.get_names()) - return result - - def _create_scope(self): - return rope.base.pyscopes.GlobalScope(self.pycore, self) - - @property - @utils.saveit - def lines(self): - """A `SourceLinesAdapter`""" - return rope.base.codeanalyze.SourceLinesAdapter(self.source_code) - - @property - @utils.saveit - def logical_lines(self): - """A `LogicalLinesFinder`""" - return rope.base.codeanalyze.CachingLogicalLineFinder(self.lines) - - -class PyPackage(pyobjects.PyPackage): - - def __init__(self, pycore, resource=None, force_errors=False): - self.resource = resource - init_dot_py = self._get_init_dot_py() - if init_dot_py is not None: - ast_node = pycore.resource_to_pyobject( - init_dot_py, force_errors=force_errors).get_ast() - else: - ast_node = ast.parse('\n') - super(PyPackage, self).__init__(pycore, ast_node, resource) - - def _create_structural_attributes(self): - result = {} - modname = self.pycore.modname(self.resource) - extension_submodules = self.pycore._builtin_submodules(modname) - for name, module in extension_submodules.items(): - result[name] = rope.base.builtins.BuiltinName(module) - if self.resource is None: - return result - for name, resource in self._get_child_resources().items(): - result[name] = pynames.ImportedModule(self, resource=resource) - return result - - def _create_concluded_attributes(self): - result = {} - init_dot_py = self._get_init_dot_py() - if init_dot_py: - init_object = self.pycore.resource_to_pyobject(init_dot_py) - result.update(init_object.get_attributes()) - return result - - def _get_child_resources(self): - result = {} - for child in self.resource.get_children(): - if child.is_folder(): - result[child.name] = child - elif child.name.endswith('.py') and \ - child.name != '__init__.py': - name = child.name[:-3] - result[name] = child - return result - - def _get_init_dot_py(self): - if self.resource is not None and self.resource.has_child('__init__.py'): - return self.resource.get_child('__init__.py') - else: - return None - - def _create_scope(self): - return self.get_module().get_scope() - - def get_module(self): - init_dot_py = self._get_init_dot_py() - if init_dot_py: - return self.pycore.resource_to_pyobject(init_dot_py) - return self - - -class _AssignVisitor(object): - - def __init__(self, scope_visitor): - self.scope_visitor = scope_visitor - self.assigned_ast = None - - def _Assign(self, node): - self.assigned_ast = node.value - for child_node in node.targets: - ast.walk(child_node, self) - - def _assigned(self, name, assignment=None): - self.scope_visitor._assigned(name, assignment) - - def _Name(self, node): - assignment = None - if self.assigned_ast is not None: - assignment = pynames.AssignmentValue(self.assigned_ast) - self._assigned(node.id, assignment) - - def _Tuple(self, node): - names = astutils.get_name_levels(node) - for name, levels in names: - assignment = None - if self.assigned_ast is not None: - assignment = pynames.AssignmentValue(self.assigned_ast, levels) - self._assigned(name, assignment) - - def _Attribute(self, node): - pass - - def _Subscript(self, node): - pass - - def _Slice(self, node): - pass - - -class _ScopeVisitor(object): - - def __init__(self, pycore, owner_object): - self.pycore = pycore - self.owner_object = owner_object - self.names = {} - self.defineds = [] - - def get_module(self): - if self.owner_object is not None: - return self.owner_object.get_module() - else: - return None - - def _ClassDef(self, node): - pyclass = PyClass(self.pycore, node, self.owner_object) - self.names[node.name] = pynames.DefinedName(pyclass) - self.defineds.append(pyclass) - - def _FunctionDef(self, node): - pyfunction = PyFunction(self.pycore, node, self.owner_object) - for decorator in pyfunction.decorators: - if isinstance(decorator, ast.Name) and decorator.id == 'property': - if isinstance(self, _ClassVisitor): - type_ = rope.base.builtins.Property(pyfunction) - arg = pynames.UnboundName(PyObject(self.owner_object)) - def _eval(type_=type_, arg=arg): - return type_.get_property_object( - arguments.ObjectArguments([arg])) - self.names[node.name] = pynames.EvaluatedName( - _eval, module=self.get_module(), lineno=node.lineno) - break - else: - self.names[node.name] = pynames.DefinedName(pyfunction) - self.defineds.append(pyfunction) - - def _Assign(self, node): - ast.walk(node, _AssignVisitor(self)) - - def _AugAssign(self, node): - pass - - def _For(self, node): - names = self._update_evaluated(node.target, node.iter, - '.__iter__().next()') - for child in node.body + node.orelse: - ast.walk(child, self) - - def _assigned(self, name, assignment): - pyname = self.names.get(name, None) - if pyname is None: - pyname = pynames.AssignedName(module=self.get_module()) - if isinstance(pyname, pynames.AssignedName): - if assignment is not None: - pyname.assignments.append(assignment) - self.names[name] = pyname - - def _update_evaluated(self, targets, assigned, - evaluation= '', eval_type=False): - result = {} - if not isinstance(targets, str): - names = astutils.get_name_levels(targets) - for name, levels in names: - assignment = pynames.AssignmentValue(assigned, levels, - evaluation, eval_type) - self._assigned(name, assignment) - else: - assignment = pynames.AssignmentValue(assigned, [], - evaluation, eval_type) - self._assigned(targets, assignment) - return result - - def _With(self, node): - if (sys.version_info[1] < 3): - if node.optional_vars: - self._update_evaluated(node.optional_vars, - node.context_expr, '.__enter__()') - elif len(node.items) > 0: - #TODO Handle all items? - if node.items[0].optional_vars: - self._update_evaluated( - node.items[0].optional_vars, - node.items[0].context_expr, - '.__enter__()' - ) - - for child in node.body: - ast.walk(child, self) - - def _excepthandler(self, node): - if node.name is not None and isinstance(node.name, str): - type_node = node.type - if isinstance(node.type, ast.Tuple) and type_node.elts: - type_node = type_node.elts[0] - self._update_evaluated(node.name, type_node, eval_type=True) - for child in node.body: - ast.walk(child, self) - - def _ExceptHandler(self, node): - self._excepthandler(node) - - def _Import(self, node): - for import_pair in node.names: - module_name = import_pair.name - alias = import_pair.asname - first_package = module_name.split('.')[0] - if alias is not None: - imported = pynames.ImportedModule(self.get_module(), - module_name) - if not self._is_ignored_import(imported): - self.names[alias] = imported - else: - imported = pynames.ImportedModule(self.get_module(), - first_package) - if not self._is_ignored_import(imported): - self.names[first_package] = imported - - def _ImportFrom(self, node): - level = 0 - if node.level: - level = node.level - if node.module is None and len(node.names) > 0: #Relative import "." - self._Import(node) - return - imported_module = pynames.ImportedModule(self.get_module(), - node.module, level) - if self._is_ignored_import(imported_module): - return - if len(node.names) == 1 and node.names[0].name == '*': - if isinstance(self.owner_object, PyModule): - self.owner_object.star_imports.append( - StarImport(imported_module)) - else: - for imported_name in node.names: - imported = imported_name.name - alias = imported_name.asname - if alias is not None: - imported = alias - self.names[imported] = pynames.ImportedName(imported_module, - imported_name.name) - - def _is_ignored_import(self, imported_module): - if not self.pycore.project.prefs.get('ignore_bad_imports', False): - return False - return not isinstance(imported_module.get_object(), AbstractModule) - - def _Global(self, node): - module = self.get_module() - for name in node.names: - if module is not None: - try: - pyname = module[name] - except exceptions.AttributeNotFoundError: - pyname = pynames.AssignedName(node.lineno) - self.names[name] = pyname - - -class _GlobalVisitor(_ScopeVisitor): - - def __init__(self, pycore, owner_object): - super(_GlobalVisitor, self).__init__(pycore, owner_object) - - -class _ClassVisitor(_ScopeVisitor): - - def __init__(self, pycore, owner_object): - super(_ClassVisitor, self).__init__(pycore, owner_object) - - def _FunctionDef(self, node): - _ScopeVisitor._FunctionDef(self, node) - if len(node.args.args) > 0: - first = node.args.args[0] - if isinstance(first, ast.arg): - new_visitor = _ClassInitVisitor(self, first.arg) - for child in ast.get_child_nodes(node): - ast.walk(child, new_visitor) - - -class _FunctionVisitor(_ScopeVisitor): - - def __init__(self, pycore, owner_object): - super(_FunctionVisitor, self).__init__(pycore, owner_object) - self.returned_asts = [] - self.generator = False - - def _Return(self, node): - if node.value is not None: - self.returned_asts.append(node.value) - - def _Yield(self, node): - if node.value is not None: - self.returned_asts.append(node.value) - self.generator = True - - -class _ClassInitVisitor(_AssignVisitor): - - def __init__(self, scope_visitor, self_name): - super(_ClassInitVisitor, self).__init__(scope_visitor) - self.self_name = self_name - - def _Attribute(self, node): - if not isinstance(node.ctx, ast.Store): - return - if isinstance(node.value, ast.Name) and \ - node.value.id == self.self_name: - if node.attr not in self.scope_visitor.names: - self.scope_visitor.names[node.attr] = pynames.AssignedName( - lineno=node.lineno, module=self.scope_visitor.get_module()) - if self.assigned_ast is not None: - pyname = self.scope_visitor.names[node.attr] - if isinstance(pyname, pynames.AssignedName): - pyname.assignments.append( - pynames.AssignmentValue(self.assigned_ast)) - - def _Tuple(self, node): - if not isinstance(node.ctx, ast.Store): - return - for child in ast.get_child_nodes(node): - ast.walk(child, self) - - def _Name(self, node): - pass - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - def _For(self, node): - pass - - def _With(self, node): - pass - - -class StarImport(object): - - def __init__(self, imported_module): - self.imported_module = imported_module - - def get_names(self): - result = {} - imported = self.imported_module.get_object() - for name in imported: - if not name.startswith('_'): - result[name] = pynames.ImportedName(self.imported_module, name) - return result diff --git a/pymode/libs3/rope/base/pyscopes.py b/pymode/libs3/rope/base/pyscopes.py deleted file mode 100644 index a00381b7..00000000 --- a/pymode/libs3/rope/base/pyscopes.py +++ /dev/null @@ -1,313 +0,0 @@ -import rope.base.builtins -import rope.base.codeanalyze -import rope.base.pynames -from rope.base import ast, exceptions, utils - - -class Scope(object): - - def __init__(self, pycore, pyobject, parent_scope): - self.pycore = pycore - self.pyobject = pyobject - self.parent = parent_scope - - def get_names(self): - """Return the names defined or imported in this scope""" - return self.pyobject.get_attributes() - - def get_defined_names(self): - """Return the names defined in this scope""" - return self.pyobject._get_structural_attributes() - - def get_name(self, name): - """Return name `PyName` defined in this scope""" - if name not in self.get_names(): - raise exceptions.NameNotFoundError('name %s not found' % name) - return self.get_names()[name] - - def __getitem__(self, key): - """The same as ``get_name(key)``""" - return self.get_name(key) - - def __contains__(self, key): - """The same as ``key in self.get_names()``""" - return key in self.get_names() - - @utils.saveit - def get_scopes(self): - """Return the subscopes of this scope - - The returned scopes should be sorted by the order they appear. - """ - return self._create_scopes() - - def lookup(self, name): - if name in self.get_names(): - return self.get_names()[name] - if self.parent is not None: - return self.parent._propagated_lookup(name) - return None - - def get_propagated_names(self): - """Return the visible names of this scope - - Return the names defined in this scope that are visible from - scopes containing this scope. This method returns the same - dictionary returned by `get_names()` except for `ClassScope` - which returns an empty dict. - """ - return self.get_names() - - def _propagated_lookup(self, name): - if name in self.get_propagated_names(): - return self.get_propagated_names()[name] - if self.parent is not None: - return self.parent._propagated_lookup(name) - return None - - def _create_scopes(self): - return [pydefined.get_scope() - for pydefined in self.pyobject._get_defined_objects()] - - def _get_global_scope(self): - current = self - while current.parent is not None: - current = current.parent - return current - - def get_start(self): - return self.pyobject.get_ast().lineno - - def get_body_start(self): - body = self.pyobject.get_ast().body - if body: - return body[0].lineno - return self.get_start() - - def get_end(self): - pymodule = self._get_global_scope().pyobject - return pymodule.logical_lines.logical_line_in(self.logical_end)[1] - - @utils.saveit - def get_logical_end(self): - global_scope = self._get_global_scope() - return global_scope._scope_finder.find_scope_end(self) - - start = property(get_start) - end = property(get_end) - logical_end = property(get_logical_end) - - def get_kind(self): - pass - - -class GlobalScope(Scope): - - def __init__(self, pycore, module): - super(GlobalScope, self).__init__(pycore, module, None) - self.names = module._get_concluded_data() - - def get_start(self): - return 1 - - def get_kind(self): - return 'Module' - - def get_name(self, name): - try: - return self.pyobject[name] - except exceptions.AttributeNotFoundError: - if name in self.builtin_names: - return self.builtin_names[name] - raise exceptions.NameNotFoundError('name %s not found' % name) - - def get_names(self): - if self.names.get() is None: - result = dict(self.builtin_names) - result.update(super(GlobalScope, self).get_names()) - self.names.set(result) - return self.names.get() - - def get_inner_scope_for_line(self, lineno, indents=None): - return self._scope_finder.get_holding_scope(self, lineno, indents) - - def get_inner_scope_for_offset(self, offset): - return self._scope_finder.get_holding_scope_for_offset(self, offset) - - @property - @utils.saveit - def _scope_finder(self): - return _HoldingScopeFinder(self.pyobject) - - @property - def builtin_names(self): - return rope.base.builtins.builtins.get_attributes() - - -class FunctionScope(Scope): - - def __init__(self, pycore, pyobject, visitor): - super(FunctionScope, self).__init__(pycore, pyobject, - pyobject.parent.get_scope()) - self.names = None - self.returned_asts = None - self.is_generator = None - self.defineds = None - self.visitor = visitor - - def _get_names(self): - if self.names is None: - self._visit_function() - return self.names - - def _visit_function(self): - if self.names is None: - new_visitor = self.visitor(self.pycore, self.pyobject) - for n in ast.get_child_nodes(self.pyobject.get_ast()): - ast.walk(n, new_visitor) - self.names = new_visitor.names - self.names.update(self.pyobject.get_parameters()) - self.returned_asts = new_visitor.returned_asts - self.is_generator = new_visitor.generator - self.defineds = new_visitor.defineds - - def _get_returned_asts(self): - if self.names is None: - self._visit_function() - return self.returned_asts - - def _is_generator(self): - if self.is_generator is None: - self._get_returned_asts() - return self.is_generator - - def get_names(self): - return self._get_names() - - def _create_scopes(self): - if self.defineds is None: - self._visit_function() - return [pydefined.get_scope() for pydefined in self.defineds] - - def get_kind(self): - return 'Function' - - def invalidate_data(self): - for pyname in self.get_names().values(): - if isinstance(pyname, (rope.base.pynames.AssignedName, - rope.base.pynames.EvaluatedName)): - pyname.invalidate() - - -class ClassScope(Scope): - - def __init__(self, pycore, pyobject): - super(ClassScope, self).__init__(pycore, pyobject, - pyobject.parent.get_scope()) - - def get_kind(self): - return 'Class' - - def get_propagated_names(self): - return {} - - -class _HoldingScopeFinder(object): - - def __init__(self, pymodule): - self.pymodule = pymodule - - def get_indents(self, lineno): - return rope.base.codeanalyze.count_line_indents( - self.lines.get_line(lineno)) - - def _get_scope_indents(self, scope): - return self.get_indents(scope.get_start()) - - def get_holding_scope(self, module_scope, lineno, line_indents=None): - if line_indents is None: - line_indents = self.get_indents(lineno) - current_scope = module_scope - new_scope = current_scope - while new_scope is not None and \ - (new_scope.get_kind() == 'Module' or - self._get_scope_indents(new_scope) <= line_indents): - current_scope = new_scope - if current_scope.get_start() == lineno and \ - current_scope.get_kind() != 'Module': - return current_scope - new_scope = None - for scope in current_scope.get_scopes(): - if scope.get_start() <= lineno: - if lineno <= scope.get_end(): - new_scope = scope - break - else: - break - return current_scope - - def _is_empty_line(self, lineno): - line = self.lines.get_line(lineno) - return line.strip() == '' or line.lstrip().startswith('#') - - def _get_body_indents(self, scope): - return self.get_indents(scope.get_body_start()) - - def get_holding_scope_for_offset(self, scope, offset): - return self.get_holding_scope( - scope, self.lines.get_line_number(offset)) - - def find_scope_end(self, scope): - if not scope.parent: - return self.lines.length() - end = scope.pyobject.get_ast().body[-1].lineno - scope_start = self.pymodule.logical_lines.logical_line_in(scope.start) - if scope_start[1] >= end: - # handling one-liners - body_indents = self._get_scope_indents(scope) + 4 - else: - body_indents = self._get_body_indents(scope) - for l in self.logical_lines.generate_starts( - min(end + 1, self.lines.length()), self.lines.length() + 1): - if not self._is_empty_line(l): - if self.get_indents(l) < body_indents: - return end - else: - end = l - return end - - @property - def lines(self): - return self.pymodule.lines - - @property - def code(self): - return self.pymodule.source_code - - @property - def logical_lines(self): - return self.pymodule.logical_lines - -class TemporaryScope(Scope): - """Currently used for list comprehensions and generator expressions - - These scopes do not appear in the `get_scopes()` method of their - parent scopes. - """ - - def __init__(self, pycore, parent_scope, names): - super(TemporaryScope, self).__init__( - pycore, parent_scope.pyobject, parent_scope) - self.names = names - - def get_names(self): - return self.names - - def get_defined_names(self): - return self.names - - def _create_scopes(self): - return [] - - def get_kind(self): - return 'Temporary' diff --git a/pymode/libs3/rope/base/resourceobserver.py b/pymode/libs3/rope/base/resourceobserver.py deleted file mode 100644 index 6d1accbc..00000000 --- a/pymode/libs3/rope/base/resourceobserver.py +++ /dev/null @@ -1,271 +0,0 @@ -import os - - -class ResourceObserver(object): - """Provides the interface for observing resources - - `ResourceObserver`\s can be registered using `Project. - add_observer()`. But most of the time `FilteredResourceObserver` - should be used. `ResourceObserver`\s report all changes passed - to them and they don't report changes to all resources. For - example if a folder is removed, it only calls `removed()` for that - folder and not its contents. You can use - `FilteredResourceObserver` if you are interested in changes only - to a list of resources. And you want changes to be reported on - individual resources. - - """ - - def __init__(self, changed=None, moved=None, created=None, - removed=None, validate=None): - self.changed = changed - self.moved = moved - self.created = created - self.removed = removed - self._validate = validate - - def resource_changed(self, resource): - """It is called when the resource changes""" - if self.changed is not None: - self.changed(resource) - - def resource_moved(self, resource, new_resource): - """It is called when a resource is moved""" - if self.moved is not None: - self.moved(resource, new_resource) - - def resource_created(self, resource): - """Is called when a new resource is created""" - if self.created is not None: - self.created(resource) - - def resource_removed(self, resource): - """Is called when a new resource is removed""" - if self.removed is not None: - self.removed(resource) - - def validate(self, resource): - """Validate the existence of this resource and its children. - - This function is called when rope need to update its resource - cache about the files that might have been changed or removed - by other processes. - - """ - if self._validate is not None: - self._validate(resource) - - -class FilteredResourceObserver(object): - """A useful decorator for `ResourceObserver` - - Most resource observers have a list of resources and are - interested only in changes to those files. This class satisfies - this need. It dispatches resource changed and removed messages. - It performs these tasks: - - * Changes to files and folders are analyzed to check whether any - of the interesting resources are changed or not. If they are, - it reports these changes to `resource_observer` passed to the - constructor. - * When a resource is removed it checks whether any of the - interesting resources are contained in that folder and reports - them to `resource_observer`. - * When validating a folder it validates all of the interesting - files in that folder. - - Since most resource observers are interested in a list of - resources that change over time, `add_resource` and - `remove_resource` might be useful. - - """ - - def __init__(self, resource_observer, initial_resources=None, - timekeeper=None): - self.observer = resource_observer - self.resources = {} - if timekeeper is not None: - self.timekeeper = timekeeper - else: - self.timekeeper = ChangeIndicator() - if initial_resources is not None: - for resource in initial_resources: - self.add_resource(resource) - - def add_resource(self, resource): - """Add a resource to the list of interesting resources""" - if resource.exists(): - self.resources[resource] = self.timekeeper.get_indicator(resource) - else: - self.resources[resource] = None - - def remove_resource(self, resource): - """Add a resource to the list of interesting resources""" - if resource in self.resources: - del self.resources[resource] - - def clear_resources(self): - """Removes all registered resources""" - self.resources.clear() - - def resource_changed(self, resource): - changes = _Changes() - self._update_changes_caused_by_changed(changes, resource) - self._perform_changes(changes) - - def _update_changes_caused_by_changed(self, changes, changed): - if changed in self.resources: - changes.add_changed(changed) - if self._is_parent_changed(changed): - changes.add_changed(changed.parent) - - def _update_changes_caused_by_moved(self, changes, resource, - new_resource=None): - if resource in self.resources: - changes.add_removed(resource, new_resource) - if new_resource in self.resources: - changes.add_created(new_resource) - if resource.is_folder(): - for file in list(self.resources): - if resource.contains(file): - new_file = self._calculate_new_resource( - resource, new_resource, file) - changes.add_removed(file, new_file) - if self._is_parent_changed(resource): - changes.add_changed(resource.parent) - if new_resource is not None: - if self._is_parent_changed(new_resource): - changes.add_changed(new_resource.parent) - - def _is_parent_changed(self, child): - return child.parent in self.resources - - def resource_moved(self, resource, new_resource): - changes = _Changes() - self._update_changes_caused_by_moved(changes, resource, new_resource) - self._perform_changes(changes) - - def resource_created(self, resource): - changes = _Changes() - self._update_changes_caused_by_created(changes, resource) - self._perform_changes(changes) - - def _update_changes_caused_by_created(self, changes, resource): - if resource in self.resources: - changes.add_created(resource) - if self._is_parent_changed(resource): - changes.add_changed(resource.parent) - - def resource_removed(self, resource): - changes = _Changes() - self._update_changes_caused_by_moved(changes, resource) - self._perform_changes(changes) - - def _perform_changes(self, changes): - for resource in changes.changes: - self.observer.resource_changed(resource) - self.resources[resource] = self.timekeeper.get_indicator(resource) - for resource, new_resource in changes.moves.items(): - self.resources[resource] = None - if new_resource is not None: - self.observer.resource_moved(resource, new_resource) - else: - self.observer.resource_removed(resource) - for resource in changes.creations: - self.observer.resource_created(resource) - self.resources[resource] = self.timekeeper.get_indicator(resource) - - def validate(self, resource): - changes = _Changes() - for file in self._search_resource_moves(resource): - if file in self.resources: - self._update_changes_caused_by_moved(changes, file) - for file in self._search_resource_changes(resource): - if file in self.resources: - self._update_changes_caused_by_changed(changes, file) - for file in self._search_resource_creations(resource): - if file in self.resources: - changes.add_created(file) - self._perform_changes(changes) - - def _search_resource_creations(self, resource): - creations = set() - if resource in self.resources and resource.exists() and \ - self.resources[resource] is None: - creations.add(resource) - if resource.is_folder(): - for file in self.resources: - if file.exists() and resource.contains(file) and \ - self.resources[file] is None: - creations.add(file) - return creations - - def _search_resource_moves(self, resource): - all_moved = set() - if resource in self.resources and not resource.exists(): - all_moved.add(resource) - if resource.is_folder(): - for file in self.resources: - if resource.contains(file): - if not file.exists(): - all_moved.add(file) - moved = set(all_moved) - for folder in [file for file in all_moved if file.is_folder()]: - if folder in moved: - for file in list(moved): - if folder.contains(file): - moved.remove(file) - return moved - - def _search_resource_changes(self, resource): - changed = set() - if resource in self.resources and self._is_changed(resource): - changed.add(resource) - if resource.is_folder(): - for file in self.resources: - if file.exists() and resource.contains(file): - if self._is_changed(file): - changed.add(file) - return changed - - def _is_changed(self, resource): - if self.resources[resource] is None: - return False - return self.resources[resource] != self.timekeeper.get_indicator(resource) - - def _calculate_new_resource(self, main, new_main, resource): - if new_main is None: - return None - diff = resource.path[len(main.path):] - return resource.project.get_resource(new_main.path + diff) - - -class ChangeIndicator(object): - - def get_indicator(self, resource): - """Return the modification time and size of a `Resource`.""" - path = resource.real_path - # on dos, mtime does not change for a folder when files are added - if os.name != 'posix' and os.path.isdir(path): - return (os.path.getmtime(path), - len(os.listdir(path)), - os.path.getsize(path)) - return (os.path.getmtime(path), - os.path.getsize(path)) - - -class _Changes(object): - - def __init__(self): - self.changes = set() - self.creations = set() - self.moves = {} - - def add_changed(self, resource): - self.changes.add(resource) - - def add_removed(self, resource, new_resource=None): - self.moves[resource] = new_resource - - def add_created(self, resource): - self.creations.add(resource) diff --git a/pymode/libs3/rope/base/resources.py b/pymode/libs3/rope/base/resources.py deleted file mode 100644 index 871211a5..00000000 --- a/pymode/libs3/rope/base/resources.py +++ /dev/null @@ -1,212 +0,0 @@ -import os -import re - -import rope.base.change -import rope.base.fscommands -from rope.base import exceptions - - -class Resource(object): - """Represents files and folders in a project""" - - def __init__(self, project, path): - self.project = project - self._path = path - - def move(self, new_location): - """Move resource to `new_location`""" - self._perform_change(rope.base.change.MoveResource(self, new_location), - 'Moving <%s> to <%s>' % (self.path, new_location)) - - def remove(self): - """Remove resource from the project""" - self._perform_change(rope.base.change.RemoveResource(self), - 'Removing <%s>' % self.path) - - def is_folder(self): - """Return true if the resource is a folder""" - - def create(self): - """Create this resource""" - - def exists(self): - return os.path.exists(self.real_path) - - @property - def parent(self): - parent = '/'.join(self.path.split('/')[0:-1]) - return self.project.get_folder(parent) - - @property - def path(self): - """Return the path of this resource relative to the project root - - The path is the list of parent directories separated by '/' followed - by the resource name. - """ - return self._path - - @property - def name(self): - """Return the name of this resource""" - return self.path.split('/')[-1] - - @property - def real_path(self): - """Return the file system path of this resource""" - return self.project._get_resource_path(self.path) - - def __eq__(self, obj): - return self.__class__ == obj.__class__ and self.path == obj.path - - def __ne__(self, obj): - return not self.__eq__(obj) - - def __hash__(self): - return hash(self.path) - - def _perform_change(self, change_, description): - changes = rope.base.change.ChangeSet(description) - changes.add_change(change_) - self.project.do(changes) - - -class File(Resource): - """Represents a file""" - - def __init__(self, project, name): - super(File, self).__init__(project, name) - - def read(self): - data = self.read_bytes() - try: - return rope.base.fscommands.file_data_to_unicode(data) - except UnicodeDecodeError as e: - raise exceptions.ModuleDecodeError(self.path, e.reason) - - def read_bytes(self): - with open(self.real_path, 'rb') as fi: - return fi.read() - - def write(self, contents): - try: - if contents == self.read(): - return - except IOError: - pass - self._perform_change(rope.base.change.ChangeContents(self, contents), - 'Writing file <%s>' % self.path) - - def is_folder(self): - return False - - def create(self): - self.parent.create_file(self.name) - - -class Folder(Resource): - """Represents a folder""" - - def __init__(self, project, name): - super(Folder, self).__init__(project, name) - - def is_folder(self): - return True - - def get_children(self): - """Return the children of this folder""" - result = [] - for name in os.listdir(self.real_path): - try: - child = self.get_child(name) - except exceptions.ResourceNotFoundError: - continue - if not self.project.is_ignored(child): - result.append(self.get_child(name)) - return result - - def create_file(self, file_name): - self._perform_change( - rope.base.change.CreateFile(self, file_name), - 'Creating file <%s>' % self._get_child_path(file_name)) - return self.get_child(file_name) - - def create_folder(self, folder_name): - self._perform_change( - rope.base.change.CreateFolder(self, folder_name), - 'Creating folder <%s>' % self._get_child_path(folder_name)) - return self.get_child(folder_name) - - def _get_child_path(self, name): - if self.path: - return self.path + '/' + name - else: - return name - - def get_child(self, name): - return self.project.get_resource(self._get_child_path(name)) - - def has_child(self, name): - try: - self.get_child(name) - return True - except exceptions.ResourceNotFoundError: - return False - - def get_files(self): - return [resource for resource in self.get_children() - if not resource.is_folder()] - - def get_folders(self): - return [resource for resource in self.get_children() - if resource.is_folder()] - - def contains(self, resource): - if self == resource: - return False - return self.path == '' or resource.path.startswith(self.path + '/') - - def create(self): - self.parent.create_folder(self.name) - - -class _ResourceMatcher(object): - - def __init__(self): - self.patterns = [] - self._compiled_patterns = [] - - def set_patterns(self, patterns): - """Specify which resources to match - - `patterns` is a `list` of `str`\s that can contain ``*`` and - ``?`` signs for matching resource names. - - """ - self._compiled_patterns = None - self.patterns = patterns - - def _add_pattern(self, pattern): - re_pattern = pattern.replace('.', '\\.').\ - replace('*', '[^/]*').replace('?', '[^/]').\ - replace('//', '/(.*/)?') - re_pattern = '^(.*/)?' + re_pattern + '(/.*)?$' - self.compiled_patterns.append(re.compile(re_pattern)) - - def does_match(self, resource): - for pattern in self.compiled_patterns: - if pattern.match(resource.path): - return True - path = os.path.join(resource.project.address, - *resource.path.split('/')) - if os.path.islink(path): - return True - return False - - @property - def compiled_patterns(self): - if self._compiled_patterns is None: - self._compiled_patterns = [] - for pattern in self.patterns: - self._add_pattern(pattern) - return self._compiled_patterns diff --git a/pymode/libs3/rope/base/simplify.py b/pymode/libs3/rope/base/simplify.py deleted file mode 100644 index bc4cade4..00000000 --- a/pymode/libs3/rope/base/simplify.py +++ /dev/null @@ -1,55 +0,0 @@ -"""A module to ease code analysis - -This module is here to help source code analysis. -""" -import re - -from rope.base import codeanalyze, utils - - -@utils.cached(7) -def real_code(source): - """Simplify `source` for analysis - - It replaces: - - * comments with spaces - * strs with a new str filled with spaces - * implicit and explicit continuations with spaces - * tabs and semicolons with spaces - - The resulting code is a lot easier to analyze if we are interested - only in offsets. - """ - collector = codeanalyze.ChangeCollector(source) - for start, end in ignored_regions(source): - if source[start] == '#': - replacement = ' ' * (end - start) - else: - replacement = '"%s"' % (' ' * (end - start - 2)) - collector.add_change(start, end, replacement) - source = collector.get_changed() or source - collector = codeanalyze.ChangeCollector(source) - parens = 0 - for match in _parens.finditer(source): - i = match.start() - c = match.group() - if c in '({[': - parens += 1 - if c in ')}]': - parens -= 1 - if c == '\n' and parens > 0: - collector.add_change(i, i + 1, ' ') - source = collector.get_changed() or source - return source.replace('\\\n', ' ').replace('\t', ' ').replace(';', '\n') - - -@utils.cached(7) -def ignored_regions(source): - """Return ignored regions like strings and comments in `source` """ - return [(match.start(), match.end()) for match in _str.finditer(source)] - - -_str = re.compile('%s|%s' % (codeanalyze.get_comment_pattern(), - codeanalyze.get_string_pattern())) -_parens = re.compile(r'[\({\[\]}\)\n]') diff --git a/pymode/libs3/rope/base/stdmods.py b/pymode/libs3/rope/base/stdmods.py deleted file mode 100644 index 4a7d9fbe..00000000 --- a/pymode/libs3/rope/base/stdmods.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -import sys - -from rope.base import utils - - -def _stdlib_path(): - import inspect - return os.path.dirname(inspect.getsourcefile(inspect)) - -@utils.cached(1) -def standard_modules(): - return python_modules() | dynload_modules() - -@utils.cached(1) -def python_modules(): - result = set() - lib_path = _stdlib_path() - if os.path.exists(lib_path): - for name in os.listdir(lib_path): - path = os.path.join(lib_path, name) - if os.path.isdir(path): - if '-' not in name: - result.add(name) - else: - if name.endswith('.py'): - result.add(name[:-3]) - return result - -@utils.cached(1) -def dynload_modules(): - result = set(sys.builtin_module_names) - dynload_path = os.path.join(_stdlib_path(), 'lib-dynload') - if os.path.exists(dynload_path): - for name in os.listdir(dynload_path): - path = os.path.join(dynload_path, name) - if os.path.isfile(path): - if name.endswith('.so') or name.endswith('.dll'): - if "cpython" in name: - result.add(os.path.splitext(os.path.splitext(name)[0])[0]) - else: - result.add(os.path.splitext(name)[0]) - return result diff --git a/pymode/libs3/rope/base/taskhandle.py b/pymode/libs3/rope/base/taskhandle.py deleted file mode 100644 index 6d4ed856..00000000 --- a/pymode/libs3/rope/base/taskhandle.py +++ /dev/null @@ -1,133 +0,0 @@ -import warnings - -from rope.base import exceptions - - -class TaskHandle(object): - - def __init__(self, name='Task', interrupts=True): - """Construct a TaskHandle - - If `interrupts` is `False` the task won't be interrupted by - calling `TaskHandle.stop()`. - - """ - self.name = name - self.interrupts = interrupts - self.stopped = False - self.job_sets = [] - self.observers = [] - - def stop(self): - """Interrupts the refactoring""" - if self.interrupts: - self.stopped = True - self._inform_observers() - - def current_jobset(self): - """Return the current `JobSet`""" - if self.job_sets: - return self.job_sets[-1] - - def add_observer(self, observer): - """Register an observer for this task handle - - The observer is notified whenever the task is stopped or - a job gets finished. - - """ - self.observers.append(observer) - - def is_stopped(self): - return self.stopped - - def get_jobsets(self): - return self.job_sets - - def create_jobset(self, name='JobSet', count=None): - result = JobSet(self, name=name, count=count) - self.job_sets.append(result) - self._inform_observers() - return result - - def _inform_observers(self): - for observer in list(self.observers): - observer() - - -class JobSet(object): - - def __init__(self, handle, name, count): - self.handle = handle - self.name = name - self.count = count - self.done = 0 - self.job_name = None - - def started_job(self, name): - self.check_status() - self.job_name = name - self.handle._inform_observers() - - def finished_job(self): - self.check_status() - self.done += 1 - self.handle._inform_observers() - self.job_name = None - - def check_status(self): - if self.handle.is_stopped(): - raise exceptions.InterruptedTaskError() - - def get_active_job_name(self): - return self.job_name - - def get_percent_done(self): - if self.count is not None and self.count > 0: - percent = self.done * 100 // self.count - return min(percent, 100) - - def get_name(self): - return self.name - - -class NullTaskHandle(object): - - def __init__(self): - pass - - def is_stopped(self): - return False - - def stop(self): - pass - - def create_jobset(self, *args, **kwds): - return NullJobSet() - - def get_jobsets(self): - return [] - - def add_observer(self, observer): - pass - - -class NullJobSet(object): - - def started_job(self, name): - pass - - def finished_job(self): - pass - - def check_status(self): - pass - - def get_active_job_name(self): - pass - - def get_percent_done(self): - pass - - def get_name(self): - pass diff --git a/pymode/libs3/rope/base/utils.py b/pymode/libs3/rope/base/utils.py deleted file mode 100644 index e35ecbf3..00000000 --- a/pymode/libs3/rope/base/utils.py +++ /dev/null @@ -1,78 +0,0 @@ -import warnings - - -def saveit(func): - """A decorator that caches the return value of a function""" - - name = '_' + func.__name__ - def _wrapper(self, *args, **kwds): - if not hasattr(self, name): - setattr(self, name, func(self, *args, **kwds)) - return getattr(self, name) - return _wrapper - -cacheit = saveit - -def prevent_recursion(default): - """A decorator that returns the return value of `default` in recursions""" - def decorator(func): - name = '_calling_%s_' % func.__name__ - def newfunc(self, *args, **kwds): - if getattr(self, name, False): - return default() - setattr(self, name, True) - try: - return func(self, *args, **kwds) - finally: - setattr(self, name, False) - return newfunc - return decorator - - -def ignore_exception(exception_class): - """A decorator that ignores `exception_class` exceptions""" - def _decorator(func): - def newfunc(*args, **kwds): - try: - return func(*args, **kwds) - except exception_class: - pass - return newfunc - return _decorator - - -def deprecated(message=None): - """A decorator for deprecated functions""" - def _decorator(func, message=message): - if message is None: - message = '%s is deprecated' % func.__name__ - def newfunc(*args, **kwds): - warnings.warn(message, DeprecationWarning, stacklevel=2) - return func(*args, **kwds) - return newfunc - return _decorator - - -def cached(count): - """A caching decorator based on parameter objects""" - def decorator(func): - return _Cached(func, count) - return decorator - -class _Cached(object): - - def __init__(self, func, count): - self.func = func - self.cache = [] - self.count = count - - def __call__(self, *args, **kwds): - key = (args, kwds) - for cached_key, cached_result in self.cache: - if cached_key == key: - return cached_result - result = self.func(*args, **kwds) - self.cache.append((key, result)) - if len(self.cache) > self.count: - del self.cache[0] - return result diff --git a/pymode/libs3/rope/base/worder.py b/pymode/libs3/rope/base/worder.py deleted file mode 100644 index 08d75f34..00000000 --- a/pymode/libs3/rope/base/worder.py +++ /dev/null @@ -1,524 +0,0 @@ -import bisect -import keyword - -import rope.base.simplify - - -def get_name_at(resource, offset): - source_code = resource.read() - word_finder = Worder(source_code) - return word_finder.get_word_at(offset) - - -class Worder(object): - """A class for finding boundaries of words and expressions - - Note that in these methods, offset should be the index of the - character not the index of the character after it. - """ - - def __init__(self, code, handle_ignores=False): - simplified = rope.base.simplify.real_code(code) - self.code_finder = _RealFinder(simplified, code) - self.handle_ignores = handle_ignores - self.code = code - - def _init_ignores(self): - ignores = rope.base.simplify.ignored_regions(self.code) - self.dumb_finder = _RealFinder(self.code, self.code) - self.starts = [ignored[0] for ignored in ignores] - self.ends = [ignored[1] for ignored in ignores] - - def _context_call(self, name, offset): - if self.handle_ignores: - if not hasattr(self, 'starts'): - self._init_ignores() - start = bisect.bisect(self.starts, offset) - if start > 0 and offset < self.ends[start - 1]: - return getattr(self.dumb_finder, name)(offset) - return getattr(self.code_finder, name)(offset) - - def get_primary_at(self, offset): - return self._context_call('get_primary_at', offset) - - def get_word_at(self, offset): - return self._context_call('get_word_at', offset) - - def get_primary_range(self, offset): - return self._context_call('get_primary_range', offset) - - def get_splitted_primary_before(self, offset): - return self._context_call('get_splitted_primary_before', offset) - - def get_word_range(self, offset): - return self._context_call('get_word_range', offset) - - def is_function_keyword_parameter(self, offset): - return self.code_finder.is_function_keyword_parameter(offset) - - def is_a_class_or_function_name_in_header(self, offset): - return self.code_finder.is_a_class_or_function_name_in_header(offset) - - def is_from_statement_module(self, offset): - return self.code_finder.is_from_statement_module(offset) - - def is_from_aliased(self, offset): - return self.code_finder.is_from_aliased(offset) - - def find_parens_start_from_inside(self, offset): - return self.code_finder.find_parens_start_from_inside(offset) - - def is_a_name_after_from_import(self, offset): - return self.code_finder.is_a_name_after_from_import(offset) - - def is_from_statement(self, offset): - return self.code_finder.is_from_statement(offset) - - def get_from_aliased(self, offset): - return self.code_finder.get_from_aliased(offset) - - def is_import_statement(self, offset): - return self.code_finder.is_import_statement(offset) - - def is_assigned_here(self, offset): - return self.code_finder.is_assigned_here(offset) - - def is_a_function_being_called(self, offset): - return self.code_finder.is_a_function_being_called(offset) - - def get_word_parens_range(self, offset): - return self.code_finder.get_word_parens_range(offset) - - def is_name_assigned_in_class_body(self, offset): - return self.code_finder.is_name_assigned_in_class_body(offset) - - def is_on_function_call_keyword(self, offset): - return self.code_finder.is_on_function_call_keyword(offset) - - def _find_parens_start(self, offset): - return self.code_finder._find_parens_start(offset) - - def get_parameters(self, first, last): - return self.code_finder.get_parameters(first, last) - - def get_from_module(self, offset): - return self.code_finder.get_from_module(offset) - - def is_assigned_in_a_tuple_assignment(self, offset): - return self.code_finder.is_assigned_in_a_tuple_assignment(offset) - - def get_assignment_type(self, offset): - return self.code_finder.get_assignment_type(offset) - - def get_function_and_args_in_header(self, offset): - return self.code_finder.get_function_and_args_in_header(offset) - - def get_lambda_and_args(self, offset): - return self.code_finder.get_lambda_and_args(offset) - - def find_function_offset(self, offset): - return self.code_finder.find_function_offset(offset) - - -class _RealFinder(object): - - def __init__(self, code, raw): - self.code = code - self.raw = raw - - def _find_word_start(self, offset): - current_offset = offset - while current_offset >= 0 and self._is_id_char(current_offset): - current_offset -= 1 - return current_offset + 1 - - def _find_word_end(self, offset): - while offset + 1 < len(self.code) and self._is_id_char(offset + 1): - offset += 1 - return offset - - def _find_last_non_space_char(self, offset): - while offset >= 0 and self.code[offset].isspace(): - if self.code[offset] == '\n': - return offset - offset -= 1 - return max(-1, offset) - - def get_word_at(self, offset): - offset = self._get_fixed_offset(offset) - return self.raw[self._find_word_start(offset): - self._find_word_end(offset) + 1] - - def _get_fixed_offset(self, offset): - if offset >= len(self.code): - return offset - 1 - if not self._is_id_char(offset): - if offset > 0 and self._is_id_char(offset - 1): - return offset - 1 - if offset < len(self.code) - 1 and self._is_id_char(offset + 1): - return offset + 1 - return offset - - def _is_id_char(self, offset): - return self.code[offset].isalnum() or self.code[offset] == '_' - - def _find_string_start(self, offset): - kind = self.code[offset] - try: - return self.code.rindex(kind, 0, offset) - except ValueError: - return 0 - - def _find_parens_start(self, offset): - offset = self._find_last_non_space_char(offset - 1) - while offset >= 0 and self.code[offset] not in '[({': - if self.code[offset] not in ':,': - offset = self._find_primary_start(offset) - offset = self._find_last_non_space_char(offset - 1) - return offset - - def _find_atom_start(self, offset): - old_offset = offset - if self.code[offset] == '\n': - return offset + 1 - if self.code[offset].isspace(): - offset = self._find_last_non_space_char(offset) - if self.code[offset] in '\'"': - return self._find_string_start(offset) - if self.code[offset] in ')]}': - return self._find_parens_start(offset) - if self._is_id_char(offset): - return self._find_word_start(offset) - return old_offset - - def _find_primary_without_dot_start(self, offset): - """It tries to find the undotted primary start - - It is different from `self._get_atom_start()` in that it - follows function calls, too; such as in ``f(x)``. - - """ - last_atom = offset - offset = self._find_last_non_space_char(last_atom) - while offset > 0 and self.code[offset] in ')]': - last_atom = self._find_parens_start(offset) - offset = self._find_last_non_space_char(last_atom - 1) - if offset >= 0 and (self.code[offset] in '"\'})]' or - self._is_id_char(offset)): - atom_start = self._find_atom_start(offset) - if not keyword.iskeyword(self.code[atom_start:offset + 1]): - return atom_start - return last_atom - - def _find_primary_start(self, offset): - if offset >= len(self.code): - offset = len(self.code) - 1 - if self.code[offset] != '.': - offset = self._find_primary_without_dot_start(offset) - else: - offset = offset + 1 - while offset > 0: - prev = self._find_last_non_space_char(offset - 1) - if offset <= 0 or self.code[prev] != '.': - break - offset = self._find_primary_without_dot_start(prev - 1) - if not self._is_id_char(offset): - break - - return offset - - def get_primary_at(self, offset): - offset = self._get_fixed_offset(offset) - start, end = self.get_primary_range(offset) - return self.raw[start:end].strip() - - def get_splitted_primary_before(self, offset): - """returns expression, starting, starting_offset - - This function is used in `rope.codeassist.assist` function. - """ - if offset == 0: - return ('', '', 0) - end = offset - 1 - word_start = self._find_atom_start(end) - real_start = self._find_primary_start(end) - if self.code[word_start:offset].strip() == '': - word_start = end - if self.code[end].isspace(): - word_start = end - if self.code[real_start:word_start].strip() == '': - real_start = word_start - if real_start == word_start == end and not self._is_id_char(end): - return ('', '', offset) - if real_start == word_start: - return ('', self.raw[word_start:offset], word_start) - else: - if self.code[end] == '.': - return (self.raw[real_start:end], '', offset) - last_dot_position = word_start - if self.code[word_start] != '.': - last_dot_position = self._find_last_non_space_char(word_start - 1) - last_char_position = self._find_last_non_space_char(last_dot_position - 1) - if self.code[word_start].isspace(): - word_start = offset - return (self.raw[real_start:last_char_position + 1], - self.raw[word_start:offset], word_start) - - def _get_line_start(self, offset): - try: - return self.code.rindex('\n', 0, offset + 1) - except ValueError: - return 0 - - def _get_line_end(self, offset): - try: - return self.code.index('\n', offset) - except ValueError: - return len(self.code) - - def is_name_assigned_in_class_body(self, offset): - word_start = self._find_word_start(offset - 1) - word_end = self._find_word_end(offset) + 1 - if '.' in self.code[word_start:word_end]: - return False - line_start = self._get_line_start(word_start) - line = self.code[line_start:word_start].strip() - return not line and self.get_assignment_type(offset) == '=' - - def is_a_class_or_function_name_in_header(self, offset): - word_start = self._find_word_start(offset - 1) - line_start = self._get_line_start(word_start) - prev_word = self.code[line_start:word_start].strip() - return prev_word in ['def', 'class'] - - def _find_first_non_space_char(self, offset): - if offset >= len(self.code): - return len(self.code) - while offset < len(self.code) and self.code[offset].isspace(): - if self.code[offset] == '\n': - return offset - offset += 1 - return offset - - def is_a_function_being_called(self, offset): - word_end = self._find_word_end(offset) + 1 - next_char = self._find_first_non_space_char(word_end) - return next_char < len(self.code) and \ - self.code[next_char] == '(' and \ - not self.is_a_class_or_function_name_in_header(offset) - - def _find_import_end(self, start): - return self._get_line_end(start) - - def is_import_statement(self, offset): - try: - last_import = self.code.rindex('import ', 0, offset) - except ValueError: - return False - return self._find_import_end(last_import + 7) >= offset - - def is_from_statement(self, offset): - try: - last_from = self.code.rindex('from ', 0, offset) - from_import = self.code.index(' import ', last_from) - from_names = from_import + 8 - except ValueError: - return False - from_names = self._find_first_non_space_char(from_names) - return self._find_import_end(from_names) >= offset - - def is_from_statement_module(self, offset): - if offset >= len(self.code) - 1: - return False - stmt_start = self._find_primary_start(offset) - line_start = self._get_line_start(stmt_start) - prev_word = self.code[line_start:stmt_start].strip() - return prev_word == 'from' - - def is_a_name_after_from_import(self, offset): - try: - if len(self.code) > offset and self.code[offset] == '\n': - line_start = self._get_line_start(offset - 1) - else: - line_start = self._get_line_start(offset) - last_from = self.code.rindex('from ', line_start, offset) - from_import = self.code.index(' import ', last_from) - from_names = from_import + 8 - except ValueError: - return False - if from_names - 1 > offset: - return False - return self._find_import_end(from_names) >= offset - - def get_from_module(self, offset): - try: - last_from = self.code.rindex('from ', 0, offset) - import_offset = self.code.index(' import ', last_from) - end = self._find_last_non_space_char(import_offset) - return self.get_primary_at(end) - except ValueError: - pass - - def is_from_aliased(self, offset): - if not self.is_a_name_after_from_import(offset): - return False - try: - end = self._find_word_end(offset) - as_end = min(self._find_word_end(end + 1), len(self.code)) - as_start = self._find_word_start(as_end) - if self.code[as_start:as_end + 1] == 'as': - return True - except ValueError: - return False - - def get_from_aliased(self, offset): - try: - end = self._find_word_end(offset) - as_ = self._find_word_end(end + 1) - alias = self._find_word_end(as_ + 1) - start = self._find_word_start(alias) - return self.raw[start:alias + 1] - except ValueError: - pass - - def is_function_keyword_parameter(self, offset): - word_end = self._find_word_end(offset) - if word_end + 1 == len(self.code): - return False - next_char = self._find_first_non_space_char(word_end + 1) - equals = self.code[next_char:next_char + 2] - if equals == '==' or not equals.startswith('='): - return False - word_start = self._find_word_start(offset) - prev_char = self._find_last_non_space_char(word_start - 1) - return prev_char - 1 >= 0 and self.code[prev_char] in ',(' - - def is_on_function_call_keyword(self, offset): - stop = self._get_line_start(offset) - if self._is_id_char(offset): - offset = self._find_word_start(offset) - 1 - offset = self._find_last_non_space_char(offset) - if offset <= stop or self.code[offset] not in '(,': - return False - parens_start = self.find_parens_start_from_inside(offset) - return stop < parens_start - - def find_parens_start_from_inside(self, offset): - stop = self._get_line_start(offset) - opens = 1 - while offset > stop: - if self.code[offset] == '(': - break - if self.code[offset] != ',': - offset = self._find_primary_start(offset) - offset -= 1 - return max(stop, offset) - - def is_assigned_here(self, offset): - return self.get_assignment_type(offset) is not None - - def get_assignment_type(self, offset): - # XXX: does not handle tuple assignments - word_end = self._find_word_end(offset) - next_char = self._find_first_non_space_char(word_end + 1) - single = self.code[next_char:next_char + 1] - double = self.code[next_char:next_char + 2] - triple = self.code[next_char:next_char + 3] - if double not in ('==', '<=', '>=', '!='): - for op in [single, double, triple]: - if op.endswith('='): - return op - - def get_primary_range(self, offset): - start = self._find_primary_start(offset) - end = self._find_word_end(offset) + 1 - return (start, end) - - def get_word_range(self, offset): - offset = max(0, offset) - start = self._find_word_start(offset) - end = self._find_word_end(offset) + 1 - return (start, end) - - def get_word_parens_range(self, offset, opening='(', closing=')'): - end = self._find_word_end(offset) - start_parens = self.code.index(opening, end) - index = start_parens - open_count = 0 - while index < len(self.code): - if self.code[index] == opening: - open_count += 1 - if self.code[index] == closing: - open_count -= 1 - if open_count == 0: - return (start_parens, index + 1) - index += 1 - return (start_parens, index) - - def get_parameters(self, first, last): - keywords = [] - args = [] - current = self._find_last_non_space_char(last - 1) - while current > first: - primary_start = current - current = self._find_primary_start(current) - while current != first and self.code[current] not in '=,': - current = self._find_last_non_space_char(current - 1) - primary = self.raw[current + 1:primary_start + 1].strip() - if self.code[current] == '=': - primary_start = current - 1 - current -= 1 - while current != first and self.code[current] not in ',': - current = self._find_last_non_space_char(current - 1) - param_name = self.raw[current + 1:primary_start + 1].strip() - keywords.append((param_name, primary)) - else: - args.append(primary) - current = self._find_last_non_space_char(current - 1) - args.reverse() - keywords.reverse() - return args, keywords - - def is_assigned_in_a_tuple_assignment(self, offset): - start = self._get_line_start(offset) - end = self._get_line_end(offset) - primary_start = self._find_primary_start(offset) - primary_end = self._find_word_end(offset) - - prev_char_offset = self._find_last_non_space_char(primary_start - 1) - next_char_offset = self._find_first_non_space_char(primary_end + 1) - next_char = prev_char = '' - if prev_char_offset >= start: - prev_char = self.code[prev_char_offset] - if next_char_offset < end: - next_char = self.code[next_char_offset] - try: - equals_offset = self.code.index('=', start, end) - except ValueError: - return False - if prev_char not in '(,' and next_char not in ',)': - return False - parens_start = self.find_parens_start_from_inside(offset) - # XXX: only handling (x, y) = value - return offset < equals_offset and \ - self.code[start:parens_start].strip() == '' - - def get_function_and_args_in_header(self, offset): - offset = self.find_function_offset(offset) - lparens, rparens = self.get_word_parens_range(offset) - return self.raw[offset:rparens + 1] - - def find_function_offset(self, offset, definition='def '): - while True: - offset = self.code.index(definition, offset) - if offset == 0 or not self._is_id_char(offset - 1): - break - offset += 1 - def_ = offset + 4 - return self._find_first_non_space_char(def_) - - def get_lambda_and_args(self, offset): - offset = self.find_function_offset(offset, definition = 'lambda ') - lparens, rparens = self.get_word_parens_range(offset, opening=' ', closing=':') - return self.raw[offset:rparens + 1] - diff --git a/pymode/libs3/rope/contrib/__init__.py b/pymode/libs3/rope/contrib/__init__.py deleted file mode 100644 index 0d3f837e..00000000 --- a/pymode/libs3/rope/contrib/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -"""rope IDE tools package - -This package contains modules that can be used in IDEs -but do not depend on the UI. So these modules will be used -by `rope.ui` modules. - -""" diff --git a/pymode/libs3/rope/contrib/autoimport.py b/pymode/libs3/rope/contrib/autoimport.py deleted file mode 100644 index 4b7b5b05..00000000 --- a/pymode/libs3/rope/contrib/autoimport.py +++ /dev/null @@ -1,217 +0,0 @@ -import re - -from rope.base import (exceptions, pynames, resourceobserver, - taskhandle, pyobjects, builtins, resources) -from rope.refactor import importutils - - -class AutoImport(object): - """A class for finding the module that provides a name - - This class maintains a cache of global names in python modules. - Note that this cache is not accurate and might be out of date. - - """ - - def __init__(self, project, observe=True, underlined=False): - """Construct an AutoImport object - - If `observe` is `True`, listen for project changes and update - the cache. - - If `underlined` is `True`, underlined names are cached, too. - """ - self.project = project - self.underlined = underlined - self.names = project.data_files.read_data('globalnames') - if self.names is None: - self.names = {} - project.data_files.add_write_hook(self._write) - # XXX: using a filtered observer - observer = resourceobserver.ResourceObserver( - changed=self._changed, moved=self._moved, removed=self._removed) - if observe: - project.add_observer(observer) - - def import_assist(self, starting): - """Return a list of ``(name, module)`` tuples - - This function tries to find modules that have a global name - that starts with `starting`. - """ - # XXX: breaking if gave up! use generators - result = [] - for module in self.names: - for global_name in self.names[module]: - if global_name.startswith(starting): - result.append((global_name, module)) - return result - - def get_modules(self, name): - """Return the list of modules that have global `name`""" - result = [] - for module in self.names: - if name in self.names[module]: - result.append(module) - return result - - def get_all_names(self): - """Return the list of all cached global names""" - result = set() - for module in self.names: - result.update(set(self.names[module])) - return result - - def get_name_locations(self, name): - """Return a list of ``(resource, lineno)`` tuples""" - result = [] - pycore = self.project.pycore - for module in self.names: - if name in self.names[module]: - try: - pymodule = pycore.get_module(module) - if name in pymodule: - pyname = pymodule[name] - module, lineno = pyname.get_definition_location() - if module is not None: - resource = module.get_module().get_resource() - if resource is not None and lineno is not None: - result.append((resource, lineno)) - except exceptions.ModuleNotFoundError: - pass - return result - - def generate_cache(self, resources=None, underlined=None, - task_handle=taskhandle.NullTaskHandle()): - """Generate global name cache for project files - - If `resources` is a list of `rope.base.resource.File`\s, only - those files are searched; otherwise all python modules in the - project are cached. - - """ - if resources is None: - resources = self.project.pycore.get_python_files() - job_set = task_handle.create_jobset( - 'Generatig autoimport cache', len(resources)) - for file in resources: - job_set.started_job('Working on <%s>' % file.path) - self.update_resource(file, underlined) - job_set.finished_job() - - def generate_modules_cache(self, modules, underlined=None, - task_handle=taskhandle.NullTaskHandle()): - """Generate global name cache for modules listed in `modules`""" - job_set = task_handle.create_jobset( - 'Generatig autoimport cache for modules', len(modules)) - for modname in modules: - job_set.started_job('Working on <%s>' % modname) - if modname.endswith('.*'): - mod = self.project.pycore.find_module(modname[:-2]) - if mod: - for sub in submodules(mod): - self.update_resource(sub, underlined) - else: - self.update_module(modname, underlined) - job_set.finished_job() - - def clear_cache(self): - """Clear all entries in global-name cache - - It might be a good idea to use this function before - regenerating global names. - - """ - self.names.clear() - - def find_insertion_line(self, code): - """Guess at what line the new import should be inserted""" - match = re.search(r'^(def|class)\s+', code) - if match is not None: - code = code[:match.start()] - try: - pymodule = self.project.pycore.get_string_module(code) - except exceptions.ModuleSyntaxError: - return 1 - testmodname = '__rope_testmodule_rope' - importinfo = importutils.NormalImport(((testmodname, None),)) - module_imports = importutils.get_module_imports( - self.project.pycore, pymodule) - module_imports.add_import(importinfo) - code = module_imports.get_changed_source() - offset = code.index(testmodname) - lineno = code.count('\n', 0, offset) + 1 - return lineno - - def update_resource(self, resource, underlined=None): - """Update the cache for global names in `resource`""" - try: - pymodule = self.project.pycore.resource_to_pyobject(resource) - modname = self._module_name(resource) - self._add_names(pymodule, modname, underlined) - except exceptions.ModuleSyntaxError: - pass - - def update_module(self, modname, underlined=None): - """Update the cache for global names in `modname` module - - `modname` is the name of a module. - """ - try: - pymodule = self.project.pycore.get_module(modname) - self._add_names(pymodule, modname, underlined) - except exceptions.ModuleNotFoundError: - pass - - def _module_name(self, resource): - return self.project.pycore.modname(resource) - - def _add_names(self, pymodule, modname, underlined): - if underlined is None: - underlined = self.underlined - globals = [] - if isinstance(pymodule, pyobjects.PyDefinedObject): - attributes = pymodule._get_structural_attributes() - else: - attributes = pymodule.get_attributes() - for name, pyname in attributes.items(): - if not underlined and name.startswith('_'): - continue - if isinstance(pyname, (pynames.AssignedName, pynames.DefinedName)): - globals.append(name) - if isinstance(pymodule, builtins.BuiltinModule): - globals.append(name) - self.names[modname] = globals - - def _write(self): - self.project.data_files.write_data('globalnames', self.names) - - def _changed(self, resource): - if not resource.is_folder(): - self.update_resource(resource) - - def _moved(self, resource, newresource): - if not resource.is_folder(): - modname = self._module_name(resource) - if modname in self.names: - del self.names[modname] - self.update_resource(newresource) - - def _removed(self, resource): - if not resource.is_folder(): - modname = self._module_name(resource) - if modname in self.names: - del self.names[modname] - - -def submodules(mod): - if isinstance(mod, resources.File): - if mod.name.endswith('.py') and mod.name != '__init__.py': - return set([mod]) - return set() - if not mod.has_child('__init__.py'): - return set() - result = set([mod]) - for child in mod.get_children(): - result |= submodules(child) - return result diff --git a/pymode/libs3/rope/contrib/changestack.py b/pymode/libs3/rope/contrib/changestack.py deleted file mode 100644 index 70f2271f..00000000 --- a/pymode/libs3/rope/contrib/changestack.py +++ /dev/null @@ -1,52 +0,0 @@ -"""For performing many refactorings as a single command - -`changestack` module can be used to perform many refactorings on top -of each other as one bigger command. It can be used like:: - - stack = ChangeStack(project, 'my big command') - - #.. - stack.push(refactoring1.get_changes()) - #.. - stack.push(refactoring2.get_changes()) - #.. - stack.push(refactoringX.get_changes()) - - stack.pop_all() - changes = stack.merged() - -Now `changes` can be previewed or performed as before. -""" - -from rope.base import change - - -class ChangeStack(object): - - def __init__(self, project, description='merged changes'): - self.project = project - self.description = description - self.stack = [] - - def push(self, changes): - self.stack.append(changes) - self.project.do(changes) - - def pop_all(self): - for i in range(len(self.stack)): - self.project.history.undo(drop=True) - - def merged(self): - result = change.ChangeSet(self.description) - for changes in self.stack: - for c in self._basic_changes(changes): - result.add_change(c) - return result - - def _basic_changes(self, changes): - if isinstance(changes, change.ChangeSet): - for child in changes.changes: - for atom in self._basic_changes(child): - yield atom - else: - yield changes diff --git a/pymode/libs3/rope/contrib/codeassist.py b/pymode/libs3/rope/contrib/codeassist.py deleted file mode 100644 index 994f9fef..00000000 --- a/pymode/libs3/rope/contrib/codeassist.py +++ /dev/null @@ -1,648 +0,0 @@ -import keyword -import sys -import warnings - -import rope.base.codeanalyze -import rope.base.evaluate -from rope.base import pyobjects, pyobjectsdef, pynames, builtins, exceptions, worder -from rope.base.codeanalyze import SourceLinesAdapter -from rope.contrib import fixsyntax -from rope.refactor import functionutils - - -def code_assist(project, source_code, offset, resource=None, - templates=None, maxfixes=1, later_locals=True): - """Return python code completions as a list of `CodeAssistProposal`\s - - `resource` is a `rope.base.resources.Resource` object. If - provided, relative imports are handled. - - `maxfixes` is the maximum number of errors to fix if the code has - errors in it. - - If `later_locals` is `False` names defined in this scope and after - this line is ignored. - - """ - if templates is not None: - warnings.warn('Codeassist no longer supports templates', - DeprecationWarning, stacklevel=2) - assist = _PythonCodeAssist( - project, source_code, offset, resource=resource, - maxfixes=maxfixes, later_locals=later_locals) - return assist() - - -def starting_offset(source_code, offset): - """Return the offset in which the completion should be inserted - - Usually code assist proposals should be inserted like:: - - completion = proposal.name - result = (source_code[:starting_offset] + - completion + source_code[offset:]) - - Where starting_offset is the offset returned by this function. - - """ - word_finder = worder.Worder(source_code, True) - expression, starting, starting_offset = \ - word_finder.get_splitted_primary_before(offset) - return starting_offset - - -def get_doc(project, source_code, offset, resource=None, maxfixes=1): - """Get the pydoc""" - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is None: - return None - pyobject = pyname.get_object() - return PyDocExtractor().get_doc(pyobject) - - -def get_calltip(project, source_code, offset, resource=None, - maxfixes=1, ignore_unknown=False, remove_self=False): - """Get the calltip of a function - - The format of the returned string is - ``module_name.holding_scope_names.function_name(arguments)``. For - classes `__init__()` and for normal objects `__call__()` function - is used. - - Note that the offset is on the function itself *not* after the its - open parenthesis. (Actually it used to be the other way but it - was easily confused when string literals were involved. So I - decided it is better for it not to try to be too clever when it - cannot be clever enough). You can use a simple search like:: - - offset = source_code.rindex('(', 0, offset) - 1 - - to handle simple situations. - - If `ignore_unknown` is `True`, `None` is returned for functions - without source-code like builtins and extensions. - - If `remove_self` is `True`, the first parameter whose name is self - will be removed for methods. - """ - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is None: - return None - pyobject = pyname.get_object() - return PyDocExtractor().get_calltip(pyobject, ignore_unknown, remove_self) - - -def get_definition_location(project, source_code, offset, - resource=None, maxfixes=1): - """Return the definition location of the python name at `offset` - - Return a (`rope.base.resources.Resource`, lineno) tuple. If no - `resource` is given and the definition is inside the same module, - the first element of the returned tuple would be `None`. If the - location cannot be determined ``(None, None)`` is returned. - - """ - fixer = fixsyntax.FixSyntax(project.pycore, source_code, - resource, maxfixes) - pymodule = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is not None: - module, lineno = pyname.get_definition_location() - if module is not None: - return module.get_module().get_resource(), lineno - return (None, None) - - -def find_occurrences(*args, **kwds): - import rope.contrib.findit - warnings.warn('Use `rope.contrib.findit.find_occurrences()` instead', - DeprecationWarning, stacklevel=2) - return rope.contrib.findit.find_occurrences(*args, **kwds) - - -class CompletionProposal(object): - """A completion proposal - - The `scope` instance variable shows where proposed name came from - and can be 'global', 'local', 'builtin', 'attribute', 'keyword', - 'imported', 'parameter_keyword'. - - The `type` instance variable shows the approximate type of the - proposed object and can be 'instance', 'class', 'function', 'module', - and `None`. - - All possible relations between proposal's `scope` and `type` are shown - in the table below (different scopes in rows and types in columns): - - | instance | class | function | module | None - local | + | + | + | + | - global | + | + | + | + | - builtin | + | + | + | | - attribute | + | + | + | + | - imported | + | + | + | + | - keyword | | | | | + - parameter_keyword | | | | | + - - """ - - def __init__(self, name, scope, pyname=None): - self.name = name - self.pyname = pyname - self.scope = self._get_scope(scope) - - def __str__(self): - return '%s (%s, %s)' % (self.name, self.scope, self.type) - - def __repr__(self): - return str(self) - - @property - def parameters(self): - """The names of the parameters the function takes. - - Returns None if this completion is not a function. - """ - pyname = self.pyname - if isinstance(pyname, pynames.ImportedName): - pyname = pyname._get_imported_pyname() - if isinstance(pyname, pynames.DefinedName): - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - return pyobject.get_param_names() - - @property - def type(self): - pyname = self.pyname - if isinstance(pyname, builtins.BuiltinName): - pyobject = pyname.get_object() - if isinstance(pyobject, builtins.BuiltinFunction): - return 'function' - elif isinstance(pyobject, builtins.BuiltinClass): - clsobj = pyobject.builtin - return 'class' - elif isinstance(pyobject, builtins.BuiltinObject) or \ - isinstance(pyobject, builtins.BuiltinName): - return 'instance' - elif isinstance(pyname, pynames.ImportedModule): - return 'module' - elif isinstance(pyname, pynames.ImportedName) or \ - isinstance(pyname, pynames.DefinedName): - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - return 'function' - if isinstance(pyobject, pyobjects.AbstractClass): - return 'class' - return 'instance' - - def _get_scope(self, scope): - if isinstance(self.pyname, builtins.BuiltinName): - return 'builtin' - if isinstance(self.pyname, pynames.ImportedModule) or \ - isinstance(self.pyname, pynames.ImportedName): - return 'imported' - return scope - - def get_doc(self): - """Get the proposed object's docstring. - - Returns None if it can not be get. - """ - if not self.pyname: - return None - pyobject = self.pyname.get_object() - if not hasattr(pyobject, 'get_doc'): - return None - return self.pyname.get_object().get_doc() - - @property - def kind(self): - warnings.warn("the proposal's `kind` property is deprecated, " \ - "use `scope` instead") - return self.scope - - -# leaved for backward compatibility -CodeAssistProposal = CompletionProposal - - -class NamedParamProposal(CompletionProposal): - """A parameter keyword completion proposal - - Holds reference to ``_function`` -- the function which - parameter ``name`` belongs to. This allows to determine - default value for this parameter. - """ - def __init__(self, name, function): - self.argname = name - name = '%s=' % name - super(NamedParamProposal, self).__init__(name, 'parameter_keyword') - self._function = function - - def get_default(self): - """Get a string representation of a param's default value. - - Returns None if there is no default value for this param. - """ - definfo = functionutils.DefinitionInfo.read(self._function) - for arg, default in definfo.args_with_defaults: - if self.argname == arg: - return default - return None - - -def sorted_proposals(proposals, scopepref=None, typepref=None): - """Sort a list of proposals - - Return a sorted list of the given `CodeAssistProposal`\s. - - `scopepref` can be a list of proposal scopes. Defaults to - ``['parameter_keyword', 'local', 'global', 'imported', - 'attribute', 'builtin', 'keyword']``. - - `typepref` can be a list of proposal types. Defaults to - ``['class', 'function', 'instance', 'module', None]``. - (`None` stands for completions with no type like keywords.) - """ - sorter = _ProposalSorter(proposals, scopepref, typepref) - return sorter.get_sorted_proposal_list() - - -def starting_expression(source_code, offset): - """Return the expression to complete""" - word_finder = worder.Worder(source_code, True) - expression, starting, starting_offset = \ - word_finder.get_splitted_primary_before(offset) - if expression: - return expression + '.' + starting - return starting - - -def default_templates(): - warnings.warn('default_templates() is deprecated.', - DeprecationWarning, stacklevel=2) - return {} - - -class _PythonCodeAssist(object): - - def __init__(self, project, source_code, offset, resource=None, - maxfixes=1, later_locals=True): - self.project = project - self.pycore = self.project.pycore - self.code = source_code - self.resource = resource - self.maxfixes = maxfixes - self.later_locals = later_locals - self.word_finder = worder.Worder(source_code, True) - self.expression, self.starting, self.offset = \ - self.word_finder.get_splitted_primary_before(offset) - - keywords = keyword.kwlist - - def _find_starting_offset(self, source_code, offset): - current_offset = offset - 1 - while current_offset >= 0 and (source_code[current_offset].isalnum() or - source_code[current_offset] in '_'): - current_offset -= 1; - return current_offset + 1 - - def _matching_keywords(self, starting): - result = [] - for kw in self.keywords: - if kw.startswith(starting): - result.append(CompletionProposal(kw, 'keyword')) - return result - - def __call__(self): - if self.offset > len(self.code): - return [] - completions = list(self._code_completions().values()) - if self.expression.strip() == '' and self.starting.strip() != '': - completions.extend(self._matching_keywords(self.starting)) - return completions - - def _dotted_completions(self, module_scope, holding_scope): - result = {} - found_pyname = rope.base.evaluate.eval_str(holding_scope, - self.expression) - if found_pyname is not None: - element = found_pyname.get_object() - compl_scope = 'attribute' - if isinstance(element, (pyobjectsdef.PyModule, - pyobjectsdef.PyPackage)): - compl_scope = 'imported' - for name, pyname in element.get_attributes().items(): - if name.startswith(self.starting): - result[name] = CompletionProposal(name, compl_scope, pyname) - return result - - def _undotted_completions(self, scope, result, lineno=None): - if scope.parent != None: - self._undotted_completions(scope.parent, result) - if lineno is None: - names = scope.get_propagated_names() - else: - names = scope.get_names() - for name, pyname in names.items(): - if name.startswith(self.starting): - compl_scope = 'local' - if scope.get_kind() == 'Module': - compl_scope = 'global' - if lineno is None or self.later_locals or \ - not self._is_defined_after(scope, pyname, lineno): - result[name] = CompletionProposal(name, compl_scope, - pyname) - - def _from_import_completions(self, pymodule): - module_name = self.word_finder.get_from_module(self.offset) - if module_name is None: - return {} - pymodule = self._find_module(pymodule, module_name) - result = {} - for name in pymodule: - if name.startswith(self.starting): - result[name] = CompletionProposal(name, scope='global', - pyname=pymodule[name]) - return result - - def _find_module(self, pymodule, module_name): - dots = 0 - while module_name[dots] == '.': - dots += 1 - pyname = pynames.ImportedModule(pymodule, - module_name[dots:], dots) - return pyname.get_object() - - def _is_defined_after(self, scope, pyname, lineno): - location = pyname.get_definition_location() - if location is not None and location[1] is not None: - if location[0] == scope.pyobject.get_module() and \ - lineno <= location[1] <= scope.get_end(): - return True - - def _code_completions(self): - lineno = self.code.count('\n', 0, self.offset) + 1 - fixer = fixsyntax.FixSyntax(self.pycore, self.code, - self.resource, self.maxfixes) - pymodule = fixer.get_pymodule() - module_scope = pymodule.get_scope() - code = pymodule.source_code - lines = code.split('\n') - result = {} - start = fixsyntax._logical_start(lines, lineno) - indents = fixsyntax._get_line_indents(lines[start - 1]) - inner_scope = module_scope.get_inner_scope_for_line(start, indents) - if self.word_finder.is_a_name_after_from_import(self.offset): - return self._from_import_completions(pymodule) - if self.expression.strip() != '': - result.update(self._dotted_completions(module_scope, inner_scope)) - else: - result.update(self._keyword_parameters(module_scope.pyobject, - inner_scope)) - self._undotted_completions(inner_scope, result, lineno=lineno) - return result - - def _keyword_parameters(self, pymodule, scope): - offset = self.offset - if offset == 0: - return {} - word_finder = worder.Worder(self.code, True) - lines = SourceLinesAdapter(self.code) - lineno = lines.get_line_number(offset) - if word_finder.is_on_function_call_keyword(offset - 1): - name_finder = rope.base.evaluate.ScopeNameFinder(pymodule) - function_parens = word_finder.\ - find_parens_start_from_inside(offset - 1) - primary = word_finder.get_primary_at(function_parens - 1) - try: - function_pyname = rope.base.evaluate.\ - eval_str(scope, primary) - except exceptions.BadIdentifierError as e: - return {} - if function_pyname is not None: - pyobject = function_pyname.get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - pass - elif isinstance(pyobject, pyobjects.AbstractClass) and \ - '__init__' in pyobject: - pyobject = pyobject['__init__'].get_object() - elif '__call__' in pyobject: - pyobject = pyobject['__call__'].get_object() - if isinstance(pyobject, pyobjects.AbstractFunction): - param_names = [] - param_names.extend( - pyobject.get_param_names(special_args=False)) - result = {} - for name in param_names: - if name.startswith(self.starting): - result[name + '='] = NamedParamProposal( - name, pyobject - ) - return result - return {} - - -class _ProposalSorter(object): - """Sort a list of code assist proposals""" - - def __init__(self, code_assist_proposals, scopepref=None, typepref=None): - self.proposals = code_assist_proposals - if scopepref is None: - scopepref = ['parameter_keyword', 'local', 'global', 'imported', - 'attribute', 'builtin', 'keyword'] - self.scopepref = scopepref - if typepref is None: - typepref = ['class', 'function', 'instance', 'module', None] - self.typerank = dict((type, index) - for index, type in enumerate(typepref)) - - def get_sorted_proposal_list(self): - """Return a list of `CodeAssistProposal`""" - proposals = {} - for proposal in self.proposals: - proposals.setdefault(proposal.scope, []).append(proposal) - result = [] - for scope in self.scopepref: - scope_proposals = proposals.get(scope, []) - scope_proposals = [proposal for proposal in scope_proposals - if proposal.type in self.typerank] - scope_proposals.sort(key = self._proposal_cmp) - result.extend(scope_proposals) - return result - - def _proposal_cmp(self, proposal): - def underline_count(name): - result = 0 - while result < len(name) and name[result] == '_': - result += 1 - return result - return (self.typerank.get(proposal.type, 100), underline_count(proposal.name), proposal.name) - - def _compare_underlined_names(self, name1, name2): - def underline_count(name): - result = 0 - while result < len(name) and name[result] == '_': - result += 1 - return result - underline_count1 = underline_count(name1) - underline_count2 = underline_count(name2) - if underline_count1 != underline_count2: - return cmp(underline_count1, underline_count2) - return cmp(name1, name2) - - -class PyDocExtractor(object): - - def get_doc(self, pyobject): - if isinstance(pyobject, pyobjects.AbstractFunction): - return self._get_function_docstring(pyobject) - elif isinstance(pyobject, pyobjects.AbstractClass): - return self._get_class_docstring(pyobject) - elif isinstance(pyobject, pyobjects.AbstractModule): - return self._trim_docstring(pyobject.get_doc()) - return None - - def get_calltip(self, pyobject, ignore_unknown=False, remove_self=False): - try: - if isinstance(pyobject, pyobjects.AbstractClass): - pyobject = pyobject['__init__'].get_object() - if not isinstance(pyobject, pyobjects.AbstractFunction): - pyobject = pyobject['__call__'].get_object() - except exceptions.AttributeNotFoundError: - return None - if ignore_unknown and not isinstance(pyobject, pyobjects.PyFunction): - return - if isinstance(pyobject, pyobjects.AbstractFunction): - result = self._get_function_signature(pyobject, add_module=True) - if remove_self and self._is_method(pyobject): - return result.replace('(self)', '()').replace('(self, ', '(') - return result - - def _get_class_docstring(self, pyclass): - contents = self._trim_docstring(pyclass.get_doc(), 2) - supers = [super.get_name() for super in pyclass.get_superclasses()] - doc = 'class %s(%s):\n\n' % (pyclass.get_name(), ', '.join(supers)) + contents - - if '__init__' in pyclass: - init = pyclass['__init__'].get_object() - if isinstance(init, pyobjects.AbstractFunction): - doc += '\n\n' + self._get_single_function_docstring(init) - return doc - - def _get_function_docstring(self, pyfunction): - functions = [pyfunction] - if self._is_method(pyfunction): - functions.extend(self._get_super_methods(pyfunction.parent, - pyfunction.get_name())) - return '\n\n'.join([self._get_single_function_docstring(function) - for function in functions]) - - def _is_method(self, pyfunction): - return isinstance(pyfunction, pyobjects.PyFunction) and \ - isinstance(pyfunction.parent, pyobjects.PyClass) - - def _get_single_function_docstring(self, pyfunction): - signature = self._get_function_signature(pyfunction) - docs = self._trim_docstring(pyfunction.get_doc(), indents=2) - return signature + ':\n\n' + docs - - def _get_super_methods(self, pyclass, name): - result = [] - for super_class in pyclass.get_superclasses(): - if name in super_class: - function = super_class[name].get_object() - if isinstance(function, pyobjects.AbstractFunction): - result.append(function) - result.extend(self._get_super_methods(super_class, name)) - return result - - def _get_function_signature(self, pyfunction, add_module=False): - location = self._location(pyfunction, add_module) - if isinstance(pyfunction, pyobjects.PyFunction): - info = functionutils.DefinitionInfo.read(pyfunction) - return location + info.to_string() - else: - return '%s(%s)' % (location + pyfunction.get_name(), - ', '.join(pyfunction.get_param_names())) - - def _location(self, pyobject, add_module=False): - location = [] - parent = pyobject.parent - while parent and not isinstance(parent, pyobjects.AbstractModule): - location.append(parent.get_name()) - location.append('.') - parent = parent.parent - if add_module: - if isinstance(pyobject, pyobjects.PyFunction): - module = pyobject.get_module() - location.insert(0, self._get_module(pyobject)) - if isinstance(parent, builtins.BuiltinModule): - location.insert(0, parent.get_name() + '.') - return ''.join(location) - - def _get_module(self, pyfunction): - module = pyfunction.get_module() - if module is not None: - resource = module.get_resource() - if resource is not None: - return pyfunction.pycore.modname(resource) + '.' - return '' - - def _trim_docstring(self, docstring, indents=0): - """The sample code from :PEP:`257`""" - if not docstring: - return '' - # Convert tabs to spaces (following normal Python rules) - # and split into a list of lines: - lines = docstring.expandtabs().splitlines() - # Determine minimum indentation (first line doesn't count): - indent = sys.maxsize - for line in lines[1:]: - stripped = line.lstrip() - if stripped: - indent = min(indent, len(line) - len(stripped)) - # Remove indentation (first line is special): - trimmed = [lines[0].strip()] - if indent < sys.maxsize: - for line in lines[1:]: - trimmed.append(line[indent:].rstrip()) - # Strip off trailing and leading blank lines: - while trimmed and not trimmed[-1]: - trimmed.pop() - while trimmed and not trimmed[0]: - trimmed.pop(0) - # Return a single string: - return '\n'.join((' ' * indents + line for line in trimmed)) - - -# Deprecated classes - -class TemplateProposal(CodeAssistProposal): - def __init__(self, name, template): - warnings.warn('TemplateProposal is deprecated.', - DeprecationWarning, stacklevel=2) - super(TemplateProposal, self).__init__(name, 'template') - self.template = template - - -class Template(object): - - def __init__(self, template): - self.template = template - warnings.warn('Template is deprecated.', - DeprecationWarning, stacklevel=2) - - def variables(self): - return [] - - def substitute(self, mapping): - return self.template - - def get_cursor_location(self, mapping): - return len(self.template) diff --git a/pymode/libs3/rope/contrib/finderrors.py b/pymode/libs3/rope/contrib/finderrors.py deleted file mode 100644 index c8cf7e15..00000000 --- a/pymode/libs3/rope/contrib/finderrors.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Finding bad name and attribute accesses - -`find_errors` function can be used to find possible bad name and -attribute accesses. As an example:: - - errors = find_errors(project, project.get_resource('mod.py')) - for error in errors: - print '%s: %s' % (error.lineno, error.error) - -prints possible errors for ``mod.py`` file. - -TODO: - -* use task handles -* reporting names at most once -* attributes of extension modules that don't appear in - extension_modules project config can be ignored -* not calling `PyScope.get_inner_scope_for_line()` if it is a - bottleneck; needs profiling -* not reporting occurrences where rope cannot infer the object -* rope saves multiple objects for some of the names in its objectdb - use all of them not to give false positives -* ... ;-) - -""" -from rope.base import ast, evaluate, pyobjects - - -def find_errors(project, resource): - """Find possible bad name and attribute accesses - - It returns a list of `Error`\s. - """ - pymodule = project.pycore.resource_to_pyobject(resource) - finder = _BadAccessFinder(pymodule) - ast.walk(pymodule.get_ast(), finder) - return finder.errors - - -class _BadAccessFinder(object): - - def __init__(self, pymodule): - self.pymodule = pymodule - self.scope = pymodule.get_scope() - self.errors = [] - - def _Name(self, node): - if isinstance(node.ctx, (ast.Store, ast.Param)): - return - scope = self.scope.get_inner_scope_for_line(node.lineno) - pyname = scope.lookup(node.id) - if pyname is None: - self._add_error(node, 'Unresolved variable') - elif self._is_defined_after(scope, pyname, node.lineno): - self._add_error(node, 'Defined later') - - def _Attribute(self, node): - if not isinstance(node.ctx, ast.Store): - scope = self.scope.get_inner_scope_for_line(node.lineno) - pyname = evaluate.eval_node(scope, node.value) - if pyname is not None and \ - pyname.get_object() != pyobjects.get_unknown(): - if node.attr not in pyname.get_object(): - self._add_error(node, 'Unresolved attribute') - ast.walk(node.value, self) - - def _add_error(self, node, msg): - if isinstance(node, ast.Attribute): - name = node.attr - else: - name = node.id - if name != 'None': - error = Error(node.lineno, msg + ' ' + name) - self.errors.append(error) - - def _is_defined_after(self, scope, pyname, lineno): - location = pyname.get_definition_location() - if location is not None and location[1] is not None: - if location[0] == self.pymodule and \ - lineno <= location[1] <= scope.get_end(): - return True - - -class Error(object): - - def __init__(self, lineno, error): - self.lineno = lineno - self.error = error - - def __str__(self): - return '%s: %s' % (self.lineno, self.error) diff --git a/pymode/libs3/rope/contrib/findit.py b/pymode/libs3/rope/contrib/findit.py deleted file mode 100644 index e8ddd7e5..00000000 --- a/pymode/libs3/rope/contrib/findit.py +++ /dev/null @@ -1,110 +0,0 @@ -import rope.base.codeanalyze -import rope.base.evaluate -import rope.base.pyobjects -from rope.base import taskhandle, exceptions, worder -from rope.contrib import fixsyntax -from rope.refactor import occurrences - - -def find_occurrences(project, resource, offset, unsure=False, resources=None, - in_hierarchy=False, task_handle=taskhandle.NullTaskHandle()): - """Return a list of `Location`\s - - If `unsure` is `True`, possible matches are returned, too. You - can use `Location.unsure` to see which are unsure occurrences. - `resources` can be a list of `rope.base.resource.File`\s that - should be searched for occurrences; if `None` all python files - in the project are searched. - - """ - name = worder.get_name_at(resource, offset) - this_pymodule = project.pycore.resource_to_pyobject(resource) - primary, pyname = rope.base.evaluate.eval_location2( - this_pymodule, offset) - def is_match(occurrence): - return unsure - finder = occurrences.create_finder( - project.pycore, name, pyname, unsure=is_match, - in_hierarchy=in_hierarchy, instance=primary) - if resources is None: - resources = project.pycore.get_python_files() - job_set = task_handle.create_jobset('Finding Occurrences', - count=len(resources)) - return _find_locations(finder, resources, job_set) - - -def find_implementations(project, resource, offset, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Find the places a given method is overridden. - - Finds the places a method is implemented. Returns a list of - `Location`\s. - """ - name = worder.get_name_at(resource, offset) - this_pymodule = project.pycore.resource_to_pyobject(resource) - pyname = rope.base.evaluate.eval_location(this_pymodule, offset) - if pyname is not None: - pyobject = pyname.get_object() - if not isinstance(pyobject, rope.base.pyobjects.PyFunction) or \ - pyobject.get_kind() != 'method': - raise exceptions.BadIdentifierError('Not a method!') - else: - raise exceptions.BadIdentifierError('Cannot resolve the identifier!') - def is_defined(occurrence): - if not occurrence.is_defined(): - return False - def not_self(occurrence): - if occurrence.get_pyname().get_object() == pyname.get_object(): - return False - filters = [is_defined, not_self, - occurrences.InHierarchyFilter(pyname, True)] - finder = occurrences.Finder(project.pycore, name, filters=filters) - if resources is None: - resources = project.pycore.get_python_files() - job_set = task_handle.create_jobset('Finding Implementations', - count=len(resources)) - return _find_locations(finder, resources, job_set) - - -def find_definition(project, code, offset, resource=None, maxfixes=1): - """Return the definition location of the python name at `offset` - - A `Location` object is returned if the definition location can be - determined, otherwise ``None`` is returned. - """ - fixer = fixsyntax.FixSyntax(project.pycore, code, resource, maxfixes) - main_module = fixer.get_pymodule() - pyname = fixer.pyname_at(offset) - if pyname is not None: - module, lineno = pyname.get_definition_location() - name = rope.base.worder.Worder(code).get_word_at(offset) - if lineno is not None: - start = module.lines.get_line_start(lineno) - def check_offset(occurrence): - if occurrence.offset < start: - return False - pyname_filter = occurrences.PyNameFilter(pyname) - finder = occurrences.Finder(project.pycore, name, - [check_offset, pyname_filter]) - for occurrence in finder.find_occurrences(pymodule=module): - return Location(occurrence) - - -class Location(object): - - def __init__(self, occurrence): - self.resource = occurrence.resource - self.region = occurrence.get_word_range() - self.offset = self.region[0] - self.unsure = occurrence.is_unsure() - self.lineno = occurrence.lineno - - -def _find_locations(finder, resources, job_set): - result = [] - for resource in resources: - job_set.started_job(resource.path) - for occurrence in finder.find_occurrences(resource): - result.append(Location(occurrence)) - job_set.finished_job() - return result diff --git a/pymode/libs3/rope/contrib/fixmodnames.py b/pymode/libs3/rope/contrib/fixmodnames.py deleted file mode 100644 index 7092f131..00000000 --- a/pymode/libs3/rope/contrib/fixmodnames.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Fix the name of modules - -This module is useful when you want to rename many of the modules in -your project. That can happen specially when you want to change their -naming style. - -For instance:: - - fixer = FixModuleNames(project) - changes = fixer.get_changes(fixer=str.lower) - project.do(changes) - -Here it renames all modules and packages to use lower-cased chars. -You can tell it to use any other style by using the ``fixer`` -argument. - -""" -from rope.base import change, taskhandle -from rope.contrib import changestack -from rope.refactor import rename - - -class FixModuleNames(object): - - def __init__(self, project): - self.project = project - - def get_changes(self, fixer=str.lower, - task_handle=taskhandle.NullTaskHandle()): - """Fix module names - - `fixer` is a function that takes and returns a `str`. Given - the name of a module, it should return the fixed name. - - """ - stack = changestack.ChangeStack(self.project, 'Fixing module names') - jobset = task_handle.create_jobset('Fixing module names', - self._count_fixes(fixer) + 1) - try: - while True: - for resource in self._tobe_fixed(fixer): - jobset.started_job(resource.path) - renamer = rename.Rename(self.project, resource) - changes = renamer.get_changes(fixer(self._name(resource))) - stack.push(changes) - jobset.finished_job() - break - else: - break - finally: - jobset.started_job('Reverting to original state') - stack.pop_all() - jobset.finished_job() - return stack.merged() - - def _count_fixes(self, fixer): - return len(list(self._tobe_fixed(fixer))) - - def _tobe_fixed(self, fixer): - for resource in self.project.pycore.get_python_files(): - modname = self._name(resource) - if modname != fixer(modname): - yield resource - - def _name(self, resource): - modname = resource.name.rsplit('.', 1)[0] - if modname == '__init__': - modname = resource.parent.name - return modname diff --git a/pymode/libs3/rope/contrib/fixsyntax.py b/pymode/libs3/rope/contrib/fixsyntax.py deleted file mode 100644 index f7667e92..00000000 --- a/pymode/libs3/rope/contrib/fixsyntax.py +++ /dev/null @@ -1,178 +0,0 @@ -import rope.base.codeanalyze -import rope.base.evaluate -from rope.base import worder, exceptions, utils -from rope.base.codeanalyze import ArrayLinesAdapter, LogicalLineFinder - - -class FixSyntax(object): - - def __init__(self, pycore, code, resource, maxfixes=1): - self.pycore = pycore - self.code = code - self.resource = resource - self.maxfixes = maxfixes - - @utils.saveit - def get_pymodule(self): - """Get a `PyModule`""" - errors = [] - code = self.code - tries = 0 - while True: - try: - if tries == 0 and self.resource is not None and \ - self.resource.read() == code: - return self.pycore.resource_to_pyobject(self.resource, - force_errors=True) - return self.pycore.get_string_module( - code, resource=self.resource, force_errors=True) - except exceptions.ModuleSyntaxError as e: - if tries < self.maxfixes: - tries += 1 - self.commenter.comment(e.lineno) - code = '\n'.join(self.commenter.lines) - errors.append(' * line %s: %s ... fixed' % (e.lineno, - e.message_)) - else: - errors.append(' * line %s: %s ... raised!' % (e.lineno, - e.message_)) - new_message = ('\nSyntax errors in file %s:\n' % e.filename) \ - + '\n'.join(errors) - raise exceptions.ModuleSyntaxError(e.filename, e.lineno, - new_message) - - @property - @utils.saveit - def commenter(self): - return _Commenter(self.code) - - def pyname_at(self, offset): - pymodule = self.get_pymodule() - def old_pyname(): - word_finder = worder.Worder(self.code, True) - expression = word_finder.get_primary_at(offset) - expression = expression.replace('\\\n', ' ').replace('\n', ' ') - lineno = self.code.count('\n', 0, offset) - scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - return rope.base.evaluate.eval_str(scope, expression) - new_code = pymodule.source_code - def new_pyname(): - newoffset = self.commenter.transfered_offset(offset) - return rope.base.evaluate.eval_location(pymodule, newoffset) - if new_code.startswith(self.code[:offset + 1]): - return new_pyname() - result = old_pyname() - if result is None: - return new_pyname() - return result - - -class _Commenter(object): - - def __init__(self, code): - self.code = code - self.lines = self.code.split('\n') - self.lines.append('\n') - self.origs = list(range(len(self.lines) + 1)) - self.diffs = [0] * (len(self.lines) + 1) - - def comment(self, lineno): - start = _logical_start(self.lines, lineno, check_prev=True) - 1 - # using self._get_stmt_end() instead of self._get_block_end() - # to lower commented lines - end = self._get_stmt_end(start) - indents = _get_line_indents(self.lines[start]) - if 0 < start: - last_lineno = self._last_non_blank(start - 1) - last_line = self.lines[last_lineno] - if last_line.rstrip().endswith(':'): - indents = _get_line_indents(last_line) + 4 - self._set(start, ' ' * indents + 'pass') - for line in range(start + 1, end + 1): - self._set(line, self.lines[start]) - self._fix_incomplete_try_blocks(lineno, indents) - - def transfered_offset(self, offset): - lineno = self.code.count('\n', 0, offset) - diff = sum(self.diffs[:lineno]) - return offset + diff - - def _last_non_blank(self, start): - while start > 0 and self.lines[start].strip() == '': - start -= 1 - return start - - def _get_block_end(self, lineno): - end_line = lineno - base_indents = _get_line_indents(self.lines[lineno]) - for i in range(lineno + 1, len(self.lines)): - if _get_line_indents(self.lines[i]) >= base_indents: - end_line = i - else: - break - return end_line - - def _get_stmt_end(self, lineno): - end_line = lineno - base_indents = _get_line_indents(self.lines[lineno]) - for i in range(lineno + 1, len(self.lines)): - if _get_line_indents(self.lines[i]) <= base_indents: - return i - 1 - return lineno - - def _fix_incomplete_try_blocks(self, lineno, indents): - block_start = lineno - last_indents = current_indents = indents - while block_start > 0: - block_start = rope.base.codeanalyze.get_block_start( - ArrayLinesAdapter(self.lines), block_start) - 1 - if self.lines[block_start].strip().startswith('try:'): - indents = _get_line_indents(self.lines[block_start]) - if indents > last_indents: - continue - last_indents = indents - block_end = self._find_matching_deindent(block_start) - line = self.lines[block_end].strip() - if not (line.startswith('finally:') or - line.startswith('except ') or - line.startswith('except:')): - self._insert(block_end, ' ' * indents + 'finally:') - self._insert(block_end + 1, ' ' * indents + ' pass') - - def _find_matching_deindent(self, line_number): - indents = _get_line_indents(self.lines[line_number]) - current_line = line_number + 1 - while current_line < len(self.lines): - line = self.lines[current_line] - if not line.strip().startswith('#') and not line.strip() == '': - # HACK: We should have used logical lines here - if _get_line_indents(self.lines[current_line]) <= indents: - return current_line - current_line += 1 - return len(self.lines) - 1 - - def _set(self, lineno, line): - self.diffs[self.origs[lineno]] += len(line) - len(self.lines[lineno]) - self.lines[lineno] = line - - def _insert(self, lineno, line): - self.diffs[self.origs[lineno]] += len(line) + 1 - self.origs.insert(lineno, self.origs[lineno]) - self.lines.insert(lineno, line) - -def _logical_start(lines, lineno, check_prev=False): - logical_finder = LogicalLineFinder(ArrayLinesAdapter(lines)) - if check_prev: - prev = lineno - 1 - while prev > 0: - start, end = logical_finder.logical_line_in(prev) - if end is None or start <= lineno < end: - return start - if start <= prev: - break - prev -= 1 - return logical_finder.logical_line_in(lineno)[0] - - -def _get_line_indents(line): - return rope.base.codeanalyze.count_line_indents(line) diff --git a/pymode/libs3/rope/contrib/generate.py b/pymode/libs3/rope/contrib/generate.py deleted file mode 100644 index 4d850da0..00000000 --- a/pymode/libs3/rope/contrib/generate.py +++ /dev/null @@ -1,355 +0,0 @@ -import rope.base.evaluate -from rope.base import change, pyobjects, exceptions, pynames, worder, codeanalyze -from rope.refactor import sourceutils, importutils, functionutils, suites - - -def create_generate(kind, project, resource, offset): - """A factory for creating `Generate` objects - - `kind` can be 'variable', 'function', 'class', 'module' or - 'package'. - - """ - generate = eval('Generate' + kind.title()) - return generate(project, resource, offset) - - -def create_module(project, name, sourcefolder=None): - """Creates a module and returns a `rope.base.resources.File`""" - if sourcefolder is None: - sourcefolder = project.root - packages = name.split('.') - parent = sourcefolder - for package in packages[:-1]: - parent = parent.get_child(package) - return parent.create_file(packages[-1] + '.py') - -def create_package(project, name, sourcefolder=None): - """Creates a package and returns a `rope.base.resources.Folder`""" - if sourcefolder is None: - sourcefolder = project.root - packages = name.split('.') - parent = sourcefolder - for package in packages[:-1]: - parent = parent.get_child(package) - made_packages = parent.create_folder(packages[-1]) - made_packages.create_file('__init__.py') - return made_packages - - -class _Generate(object): - - def __init__(self, project, resource, offset): - self.project = project - self.resource = resource - self.info = self._generate_info(project, resource, offset) - self.name = self.info.get_name() - self._check_exceptional_conditions() - - def _generate_info(self, project, resource, offset): - return _GenerationInfo(project.pycore, resource, offset) - - def _check_exceptional_conditions(self): - if self.info.element_already_exists(): - raise exceptions.RefactoringError( - 'Element <%s> already exists.' % self.name) - if not self.info.primary_is_found(): - raise exceptions.RefactoringError( - 'Cannot determine the scope <%s> should be defined in.' % self.name) - - def get_changes(self): - changes = change.ChangeSet('Generate %s <%s>' % - (self._get_element_kind(), self.name)) - indents = self.info.get_scope_indents() - blanks = self.info.get_blank_lines() - base_definition = sourceutils.fix_indentation(self._get_element(), indents) - definition = '\n' * blanks[0] + base_definition + '\n' * blanks[1] - - resource = self.info.get_insertion_resource() - start, end = self.info.get_insertion_offsets() - - collector = codeanalyze.ChangeCollector(resource.read()) - collector.add_change(start, end, definition) - changes.add_change(change.ChangeContents( - resource, collector.get_changed())) - return changes - - def get_location(self): - return (self.info.get_insertion_resource(), - self.info.get_insertion_lineno()) - - def _get_element_kind(self): - raise NotImplementedError() - - def _get_element(self): - raise NotImplementedError() - - -class GenerateFunction(_Generate): - - def _generate_info(self, project, resource, offset): - return _FunctionGenerationInfo(project.pycore, resource, offset) - - def _get_element(self): - decorator = '' - args = [] - if self.info.is_static_method(): - decorator = '@staticmethod\n' - if self.info.is_method() or self.info.is_constructor() or \ - self.info.is_instance(): - args.append('self') - args.extend(self.info.get_passed_args()) - definition = '%sdef %s(%s):\n pass\n' % (decorator, self.name, - ', '.join(args)) - return definition - - def _get_element_kind(self): - return 'Function' - - -class GenerateVariable(_Generate): - - def _get_element(self): - return '%s = None\n' % self.name - - def _get_element_kind(self): - return 'Variable' - - -class GenerateClass(_Generate): - - def _get_element(self): - return 'class %s(object):\n pass\n' % self.name - - def _get_element_kind(self): - return 'Class' - - -class GenerateModule(_Generate): - - def get_changes(self): - package = self.info.get_package() - changes = change.ChangeSet('Generate Module <%s>' % self.name) - new_resource = self.project.get_file('%s/%s.py' % (package.path, self.name)) - if new_resource.exists(): - raise exceptions.RefactoringError( - 'Module <%s> already exists' % new_resource.path) - changes.add_change(change.CreateResource(new_resource)) - changes.add_change(_add_import_to_module( - self.project.pycore, self.resource, new_resource)) - return changes - - def get_location(self): - package = self.info.get_package() - return (package.get_child('%s.py' % self.name) , 1) - - -class GeneratePackage(_Generate): - - def get_changes(self): - package = self.info.get_package() - changes = change.ChangeSet('Generate Package <%s>' % self.name) - new_resource = self.project.get_folder('%s/%s' % (package.path, self.name)) - if new_resource.exists(): - raise exceptions.RefactoringError( - 'Package <%s> already exists' % new_resource.path) - changes.add_change(change.CreateResource(new_resource)) - changes.add_change(_add_import_to_module( - self.project.pycore, self.resource, new_resource)) - child = self.project.get_folder(package.path + '/' + self.name) - changes.add_change(change.CreateFile(child, '__init__.py')) - return changes - - def get_location(self): - package = self.info.get_package() - child = package.get_child(self.name) - return (child.get_child('__init__.py') , 1) - - -def _add_import_to_module(pycore, resource, imported): - pymodule = pycore.resource_to_pyobject(resource) - import_tools = importutils.ImportTools(pycore) - module_imports = import_tools.module_imports(pymodule) - module_name = pycore.modname(imported) - new_import = importutils.NormalImport(((module_name, None), )) - module_imports.add_import(new_import) - return change.ChangeContents(resource, module_imports.get_changed_source()) - - -class _GenerationInfo(object): - - def __init__(self, pycore, resource, offset): - self.pycore = pycore - self.resource = resource - self.offset = offset - self.source_pymodule = self.pycore.resource_to_pyobject(resource) - finder = rope.base.evaluate.ScopeNameFinder(self.source_pymodule) - self.primary, self.pyname = finder.get_primary_and_pyname_at(offset) - self._init_fields() - - def _init_fields(self): - self.source_scope = self._get_source_scope() - self.goal_scope = self._get_goal_scope() - self.goal_pymodule = self._get_goal_module(self.goal_scope) - - def _get_goal_scope(self): - if self.primary is None: - return self._get_source_scope() - pyobject = self.primary.get_object() - if isinstance(pyobject, pyobjects.PyDefinedObject): - return pyobject.get_scope() - elif isinstance(pyobject.get_type(), pyobjects.PyClass): - return pyobject.get_type().get_scope() - - def _get_goal_module(self, scope): - if scope is None: - return - while scope.parent is not None: - scope = scope.parent - return scope.pyobject - - def _get_source_scope(self): - module_scope = self.source_pymodule.get_scope() - lineno = self.source_pymodule.lines.get_line_number(self.offset) - return module_scope.get_inner_scope_for_line(lineno) - - def get_insertion_lineno(self): - lines = self.goal_pymodule.lines - if self.goal_scope == self.source_scope: - line_finder = self.goal_pymodule.logical_lines - lineno = lines.get_line_number(self.offset) - lineno = line_finder.logical_line_in(lineno)[0] - root = suites.ast_suite_tree(self.goal_scope.pyobject.get_ast()) - suite = root.find_suite(lineno) - indents = sourceutils.get_indents(lines, lineno) - while self.get_scope_indents() < indents: - lineno = suite.get_start() - indents = sourceutils.get_indents(lines, lineno) - suite = suite.parent - return lineno - else: - return min(self.goal_scope.get_end() + 1, lines.length()) - - def get_insertion_resource(self): - return self.goal_pymodule.get_resource() - - def get_insertion_offsets(self): - if self.goal_scope.get_kind() == 'Class': - start, end = sourceutils.get_body_region(self.goal_scope.pyobject) - if self.goal_pymodule.source_code[start:end].strip() == 'pass': - return start, end - lines = self.goal_pymodule.lines - start = lines.get_line_start(self.get_insertion_lineno()) - return (start, start) - - def get_scope_indents(self): - if self.goal_scope.get_kind() == 'Module': - return 0 - return sourceutils.get_indents(self.goal_pymodule.lines, - self.goal_scope.get_start()) + 4 - - def get_blank_lines(self): - if self.goal_scope.get_kind() == 'Module': - base_blanks = 2 - if self.goal_pymodule.source_code.strip() == '': - base_blanks = 0 - if self.goal_scope.get_kind() == 'Class': - base_blanks = 1 - if self.goal_scope.get_kind() == 'Function': - base_blanks = 0 - if self.goal_scope == self.source_scope: - return (0, base_blanks) - return (base_blanks, 0) - - def get_package(self): - primary = self.primary - if self.primary is None: - return self.pycore.get_source_folders()[0] - if isinstance(primary.get_object(), pyobjects.PyPackage): - return primary.get_object().get_resource() - raise exceptions.RefactoringError( - 'A module/package can be only created in a package.') - - def primary_is_found(self): - return self.goal_scope is not None - - def element_already_exists(self): - if self.pyname is None or isinstance(self.pyname, pynames.UnboundName): - return False - return self.get_name() in self.goal_scope.get_defined_names() - - def get_name(self): - return worder.get_name_at(self.resource, self.offset) - - -class _FunctionGenerationInfo(_GenerationInfo): - - def _get_goal_scope(self): - if self.is_constructor(): - return self.pyname.get_object().get_scope() - if self.is_instance(): - return self.pyname.get_object().get_type().get_scope() - if self.primary is None: - return self._get_source_scope() - pyobject = self.primary.get_object() - if isinstance(pyobject, pyobjects.PyDefinedObject): - return pyobject.get_scope() - elif isinstance(pyobject.get_type(), pyobjects.PyClass): - return pyobject.get_type().get_scope() - - def element_already_exists(self): - if self.pyname is None or isinstance(self.pyname, pynames.UnboundName): - return False - return self.get_name() in self.goal_scope.get_defined_names() - - def is_static_method(self): - return self.primary is not None and \ - isinstance(self.primary.get_object(), pyobjects.PyClass) - - def is_method(self): - return self.primary is not None and \ - isinstance(self.primary.get_object().get_type(), pyobjects.PyClass) - - def is_constructor(self): - return self.pyname is not None and \ - isinstance(self.pyname.get_object(), pyobjects.PyClass) - - def is_instance(self): - if self.pyname is None: - return False - pyobject = self.pyname.get_object() - return isinstance(pyobject.get_type(), pyobjects.PyClass) - - def get_name(self): - if self.is_constructor(): - return '__init__' - if self.is_instance(): - return '__call__' - return worder.get_name_at(self.resource, self.offset) - - def get_passed_args(self): - result = [] - source = self.source_pymodule.source_code - finder = worder.Worder(source) - if finder.is_a_function_being_called(self.offset): - start, end = finder.get_primary_range(self.offset) - parens_start, parens_end = finder.get_word_parens_range(end - 1) - call = source[start:parens_end] - parser = functionutils._FunctionParser(call, False) - args, keywords = parser.get_parameters() - for arg in args: - if self._is_id(arg): - result.append(arg) - else: - result.append('arg%d' % len(result)) - for name, value in keywords: - result.append(name) - return result - - def _is_id(self, arg): - def id_or_underline(c): - return c.isalpha() or c == '_' - for c in arg: - if not id_or_underline(c) and not c.isdigit(): - return False - return id_or_underline(arg[0]) diff --git a/pymode/libs3/rope/refactor/__init__.py b/pymode/libs3/rope/refactor/__init__.py deleted file mode 100644 index 10d734c3..00000000 --- a/pymode/libs3/rope/refactor/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -"""rope refactor package - -This package contains modules that perform python refactorings. -Refactoring classes perform refactorings in 4 steps: - -1. Collect some data for performing the refactoring and use them - to construct a refactoring class. Like:: - - renamer = Rename(project, resource, offset) - -2. Some refactorings give you useful information about the - refactoring after their construction. Like:: - - print(renamer.get_old_name()) - -3. Give the refactoring class more information about how to - perform the refactoring and get the changes this refactoring is - going to make. This is done by calling `get_changes` method of the - refactoring class. Like:: - - changes = renamer.get_changes(new_name) - -4. You can commit the changes. Like:: - - project.do(changes) - -These steps are like the steps IDEs usually do for performing a -refactoring. These are the things an IDE does in each step: - -1. Construct a refactoring object by giving it information like - resource, offset and ... . Some of the refactoring problems (like - performing rename refactoring on language keywords) can be reported - here. -2. Print some information about the refactoring and ask the user - about the information that are necessary for completing the - refactoring (like new name). -3. Call the `get_changes` by passing it information asked from - the user (if necessary) and get and preview the changes returned by - it. -4. perform the refactoring. - -From ``0.5m5`` release the `get_changes()` method of some time- -consuming refactorings take an optional `rope.base.taskhandle. -TaskHandle` parameter. You can use this object for stopping or -monitoring the progress of refactorings. - -""" -from rope.refactor.importutils import ImportOrganizer -from rope.refactor.topackage import ModuleToPackage - - -__all__ = ['rename', 'move', 'inline', 'extract', 'restructure', 'topackage', - 'importutils', 'usefunction', 'change_signature', - 'encapsulate_field', 'introduce_factory', 'introduce_parameter', - 'localtofield', 'method_object', 'multiproject'] diff --git a/pymode/libs3/rope/refactor/change_signature.py b/pymode/libs3/rope/refactor/change_signature.py deleted file mode 100644 index e7ab25a9..00000000 --- a/pymode/libs3/rope/refactor/change_signature.py +++ /dev/null @@ -1,340 +0,0 @@ -import copy - -import rope.base.exceptions -from rope.base import pyobjects, taskhandle, evaluate, worder, codeanalyze, utils -from rope.base.change import ChangeContents, ChangeSet -from rope.refactor import occurrences, functionutils - - -class ChangeSignature(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.resource = resource - self.offset = offset - self._set_name_and_pyname() - if self.pyname is None or self.pyname.get_object() is None or \ - not isinstance(self.pyname.get_object(), pyobjects.PyFunction): - raise rope.base.exceptions.RefactoringError( - 'Change method signature should be performed on functions') - - def _set_name_and_pyname(self): - self.name = worder.get_name_at(self.resource, self.offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) - self.primary, self.pyname = evaluate.eval_location2( - this_pymodule, self.offset) - if self.pyname is None: - return - pyobject = self.pyname.get_object() - if isinstance(pyobject, pyobjects.PyClass) and \ - '__init__' in pyobject: - self.pyname = pyobject['__init__'] - self.name = '__init__' - pyobject = self.pyname.get_object() - self.others = None - if self.name == '__init__' and \ - isinstance(pyobject, pyobjects.PyFunction) and \ - isinstance(pyobject.parent, pyobjects.PyClass): - pyclass = pyobject.parent - self.others = (pyclass.get_name(), - pyclass.parent[pyclass.get_name()]) - - def _change_calls(self, call_changer, in_hierarchy=None, resources=None, - handle=taskhandle.NullTaskHandle()): - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Changing signature of <%s>' % self.name) - job_set = handle.create_jobset('Collecting Changes', len(resources)) - finder = occurrences.create_finder( - self.pycore, self.name, self.pyname, instance=self.primary, - in_hierarchy=in_hierarchy and self.is_method()) - if self.others: - name, pyname = self.others - constructor_finder = occurrences.create_finder( - self.pycore, name, pyname, only_calls=True) - finder = _MultipleFinders([finder, constructor_finder]) - for file in resources: - job_set.started_job(file.path) - change_calls = _ChangeCallsInModule( - self.pycore, finder, file, call_changer) - changed_file = change_calls.get_changed_module() - if changed_file is not None: - changes.add_change(ChangeContents(file, changed_file)) - job_set.finished_job() - return changes - - def get_args(self): - """Get function arguments. - - Return a list of ``(name, default)`` tuples for all but star - and double star arguments. For arguments that don't have a - default, `None` will be used. - """ - return self._definfo().args_with_defaults - - def is_method(self): - pyfunction = self.pyname.get_object() - return isinstance(pyfunction.parent, pyobjects.PyClass) - - @utils.deprecated('Use `ChangeSignature.get_args()` instead') - def get_definition_info(self): - return self._definfo() - - def _definfo(self): - return functionutils.DefinitionInfo.read(self.pyname.get_object()) - - @utils.deprecated() - def normalize(self): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentNormalizer()]) - return self._change_calls(changer) - - @utils.deprecated() - def remove(self, index): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentRemover(index)]) - return self._change_calls(changer) - - @utils.deprecated() - def add(self, index, name, default=None, value=None): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentAdder(index, name, default, value)]) - return self._change_calls(changer) - - @utils.deprecated() - def inline_default(self, index): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentDefaultInliner(index)]) - return self._change_calls(changer) - - @utils.deprecated() - def reorder(self, new_ordering): - changer = _FunctionChangers( - self.pyname.get_object(), self.get_definition_info(), - [ArgumentReorderer(new_ordering)]) - return self._change_calls(changer) - - def get_changes(self, changers, in_hierarchy=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get changes caused by this refactoring - - `changers` is a list of `_ArgumentChanger`\s. If `in_hierarchy` - is `True` the changers are applyed to all matching methods in - the class hierarchy. - `resources` can be a list of `rope.base.resource.File`\s that - should be searched for occurrences; if `None` all python files - in the project are searched. - - """ - function_changer = _FunctionChangers(self.pyname.get_object(), - self._definfo(), changers) - return self._change_calls(function_changer, in_hierarchy, - resources, task_handle) - - -class _FunctionChangers(object): - - def __init__(self, pyfunction, definition_info, changers=None): - self.pyfunction = pyfunction - self.definition_info = definition_info - self.changers = changers - self.changed_definition_infos = self._get_changed_definition_infos() - - def _get_changed_definition_infos(self): - result = [] - definition_info = self.definition_info - result.append(definition_info) - for changer in self.changers: - definition_info = copy.deepcopy(definition_info) - changer.change_definition_info(definition_info) - result.append(definition_info) - return result - - def change_definition(self, call): - return self.changed_definition_infos[-1].to_string() - - def change_call(self, primary, pyname, call): - call_info = functionutils.CallInfo.read( - primary, pyname, self.definition_info, call) - mapping = functionutils.ArgumentMapping(self.definition_info, call_info) - - for definition_info, changer in zip(self.changed_definition_infos, self.changers): - changer.change_argument_mapping(definition_info, mapping) - - return mapping.to_call_info(self.changed_definition_infos[-1]).to_string() - - -class _ArgumentChanger(object): - - def change_definition_info(self, definition_info): - pass - - def change_argument_mapping(self, definition_info, argument_mapping): - pass - - -class ArgumentNormalizer(_ArgumentChanger): - pass - - -class ArgumentRemover(_ArgumentChanger): - - def __init__(self, index): - self.index = index - - def change_definition_info(self, call_info): - if self.index < len(call_info.args_with_defaults): - del call_info.args_with_defaults[self.index] - elif self.index == len(call_info.args_with_defaults) and \ - call_info.args_arg is not None: - call_info.args_arg = None - elif (self.index == len(call_info.args_with_defaults) and - call_info.args_arg is None and call_info.keywords_arg is not None) or \ - (self.index == len(call_info.args_with_defaults) + 1 and - call_info.args_arg is not None and call_info.keywords_arg is not None): - call_info.keywords_arg = None - - def change_argument_mapping(self, definition_info, mapping): - if self.index < len(definition_info.args_with_defaults): - name = definition_info.args_with_defaults[0] - if name in mapping.param_dict: - del mapping.param_dict[name] - - -class ArgumentAdder(_ArgumentChanger): - - def __init__(self, index, name, default=None, value=None): - self.index = index - self.name = name - self.default = default - self.value = value - - def change_definition_info(self, definition_info): - for pair in definition_info.args_with_defaults: - if pair[0] == self.name: - raise rope.base.exceptions.RefactoringError( - 'Adding duplicate parameter: <%s>.' % self.name) - definition_info.args_with_defaults.insert(self.index, - (self.name, self.default)) - - def change_argument_mapping(self, definition_info, mapping): - if self.value is not None: - mapping.param_dict[self.name] = self.value - - -class ArgumentDefaultInliner(_ArgumentChanger): - - def __init__(self, index): - self.index = index - self.remove = False - - def change_definition_info(self, definition_info): - if self.remove: - definition_info.args_with_defaults[self.index] = \ - (definition_info.args_with_defaults[self.index][0], None) - - def change_argument_mapping(self, definition_info, mapping): - default = definition_info.args_with_defaults[self.index][1] - name = definition_info.args_with_defaults[self.index][0] - if default is not None and name not in mapping.param_dict: - mapping.param_dict[name] = default - - -class ArgumentReorderer(_ArgumentChanger): - - def __init__(self, new_order, autodef=None): - """Construct an `ArgumentReorderer` - - Note that the `new_order` is a list containing the new - position of parameters; not the position each parameter - is going to be moved to. (changed in ``0.5m4``) - - For example changing ``f(a, b, c)`` to ``f(c, a, b)`` - requires passing ``[2, 0, 1]`` and *not* ``[1, 2, 0]``. - - The `autodef` (automatic default) argument, forces rope to use - it as a default if a default is needed after the change. That - happens when an argument without default is moved after - another that has a default value. Note that `autodef` should - be a string or `None`; the latter disables adding automatic - default. - - """ - self.new_order = new_order - self.autodef = autodef - - def change_definition_info(self, definition_info): - new_args = list(definition_info.args_with_defaults) - for new_index, index in enumerate(self.new_order): - new_args[new_index] = definition_info.args_with_defaults[index] - seen_default = False - for index, (arg, default) in enumerate(list(new_args)): - if default is not None: - seen_default = True - if seen_default and default is None and self.autodef is not None: - new_args[index] = (arg, self.autodef) - definition_info.args_with_defaults = new_args - - -class _ChangeCallsInModule(object): - - def __init__(self, pycore, occurrence_finder, resource, call_changer): - self.pycore = pycore - self.occurrence_finder = occurrence_finder - self.resource = resource - self.call_changer = call_changer - - def get_changed_module(self): - word_finder = worder.Worder(self.source) - change_collector = codeanalyze.ChangeCollector(self.source) - for occurrence in self.occurrence_finder.find_occurrences(self.resource): - if not occurrence.is_called() and not occurrence.is_defined(): - continue - start, end = occurrence.get_primary_range() - begin_parens, end_parens = word_finder.get_word_parens_range(end - 1) - if occurrence.is_called(): - primary, pyname = occurrence.get_primary_and_pyname() - changed_call = self.call_changer.change_call( - primary, pyname, self.source[start:end_parens]) - else: - changed_call = self.call_changer.change_definition( - self.source[start:end_parens]) - if changed_call is not None: - change_collector.add_change(start, end_parens, changed_call) - return change_collector.get_changed() - - @property - @utils.saveit - def pymodule(self): - return self.pycore.resource_to_pyobject(self.resource) - - @property - @utils.saveit - def source(self): - if self.resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def lines(self): - return self.pymodule.lines - - -class _MultipleFinders(object): - - def __init__(self, finders): - self.finders = finders - - def find_occurrences(self, resource=None, pymodule=None): - all_occurrences = [] - for finder in self.finders: - all_occurrences.extend(finder.find_occurrences(resource, pymodule)) - all_occurrences.sort(key = lambda o: o.get_primary_range()) - return all_occurrences - diff --git a/pymode/libs3/rope/refactor/encapsulate_field.py b/pymode/libs3/rope/refactor/encapsulate_field.py deleted file mode 100644 index 0e6fea22..00000000 --- a/pymode/libs3/rope/refactor/encapsulate_field.py +++ /dev/null @@ -1,202 +0,0 @@ -from rope.base import pynames, taskhandle, evaluate, exceptions, worder, utils -from rope.base.change import ChangeSet, ChangeContents -from rope.refactor import sourceutils, occurrences - - -class EncapsulateField(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.name = worder.get_name_at(resource, offset) - this_pymodule = self.pycore.resource_to_pyobject(resource) - self.pyname = evaluate.eval_location(this_pymodule, offset) - if not self._is_an_attribute(self.pyname): - raise exceptions.RefactoringError( - 'Encapsulate field should be performed on class attributes.') - self.resource = self.pyname.get_definition_location()[0].get_resource() - - def get_changes(self, getter=None, setter=None, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes this refactoring makes - - If `getter` is not `None`, that will be the name of the - getter, otherwise ``get_${field_name}`` will be used. The - same is true for `setter` and if it is None set_${field_name} is - used. - - `resources` can be a list of `rope.base.resource.File`\s that - the refactoring should be applied on; if `None` all python - files in the project are searched. - - """ - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Encapsulate field <%s>' % self.name) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - if getter is None: - getter = 'get_' + self.name - if setter is None: - setter = 'set_' + self.name - renamer = GetterSetterRenameInModule( - self.pycore, self.name, self.pyname, getter, setter) - for file in resources: - job_set.started_job(file.path) - if file == self.resource: - result = self._change_holding_module(changes, renamer, - getter, setter) - changes.add_change(ChangeContents(self.resource, result)) - else: - result = renamer.get_changed_module(file) - if result is not None: - changes.add_change(ChangeContents(file, result)) - job_set.finished_job() - return changes - - def get_field_name(self): - """Get the name of the field to be encapsulated""" - return self.name - - def _is_an_attribute(self, pyname): - if pyname is not None and isinstance(pyname, pynames.AssignedName): - pymodule, lineno = self.pyname.get_definition_location() - scope = pymodule.get_scope().\ - get_inner_scope_for_line(lineno) - if scope.get_kind() == 'Class': - return pyname in list(scope.get_names().values()) - parent = scope.parent - if parent is not None and parent.get_kind() == 'Class': - return pyname in list(parent.get_names().values()) - return False - - def _get_defining_class_scope(self): - defining_scope = self._get_defining_scope() - if defining_scope.get_kind() == 'Function': - defining_scope = defining_scope.parent - return defining_scope - - def _get_defining_scope(self): - pymodule, line = self.pyname.get_definition_location() - return pymodule.get_scope().get_inner_scope_for_line(line) - - def _change_holding_module(self, changes, renamer, getter, setter): - pymodule = self.pycore.resource_to_pyobject(self.resource) - class_scope = self._get_defining_class_scope() - defining_object = self._get_defining_scope().pyobject - start, end = sourceutils.get_body_region(defining_object) - - new_source = renamer.get_changed_module(pymodule=pymodule, - skip_start=start, skip_end=end) - if new_source is not None: - pymodule = self.pycore.get_string_module(new_source, self.resource) - class_scope = pymodule.get_scope().\ - get_inner_scope_for_line(class_scope.get_start()) - indents = sourceutils.get_indent(self.pycore) * ' ' - getter = 'def %s(self):\n%sreturn self.%s' % \ - (getter, indents, self.name) - setter = 'def %s(self, value):\n%sself.%s = value' % \ - (setter, indents, self.name) - new_source = sourceutils.add_methods(pymodule, class_scope, - [getter, setter]) - return new_source - - -class GetterSetterRenameInModule(object): - - def __init__(self, pycore, name, pyname, getter, setter): - self.pycore = pycore - self.name = name - self.finder = occurrences.create_finder(pycore, name, pyname) - self.getter = getter - self.setter = setter - - def get_changed_module(self, resource=None, pymodule=None, - skip_start=0, skip_end=0): - change_finder = _FindChangesForModule(self, resource, pymodule, - skip_start, skip_end) - return change_finder.get_changed_module() - - -class _FindChangesForModule(object): - - def __init__(self, finder, resource, pymodule, skip_start, skip_end): - self.pycore = finder.pycore - self.finder = finder.finder - self.getter = finder.getter - self.setter = finder.setter - self.resource = resource - self.pymodule = pymodule - self.last_modified = 0 - self.last_set = None - self.set_index = None - self.skip_start = skip_start - self.skip_end = skip_end - - def get_changed_module(self): - result = [] - for occurrence in self.finder.find_occurrences(self.resource, - self.pymodule): - start, end = occurrence.get_word_range() - if self.skip_start <= start < self.skip_end: - continue - self._manage_writes(start, result) - result.append(self.source[self.last_modified:start]) - if self._is_assigned_in_a_tuple_assignment(occurrence): - raise exceptions.RefactoringError( - 'Cannot handle tuple assignments in encapsulate field.') - if occurrence.is_written(): - assignment_type = self.worder.get_assignment_type(start) - if assignment_type == '=': - result.append(self.setter + '(') - else: - var_name = self.source[occurrence.get_primary_range()[0]: - start] + self.getter + '()' - result.append(self.setter + '(' + var_name - + ' %s ' % assignment_type[:-1]) - current_line = self.lines.get_line_number(start) - start_line, end_line = self.pymodule.logical_lines.\ - logical_line_in(current_line) - self.last_set = self.lines.get_line_end(end_line) - end = self.source.index('=', end) + 1 - self.set_index = len(result) - else: - result.append(self.getter + '()') - self.last_modified = end - if self.last_modified != 0: - self._manage_writes(len(self.source), result) - result.append(self.source[self.last_modified:]) - return ''.join(result) - return None - - def _manage_writes(self, offset, result): - if self.last_set is not None and self.last_set <= offset: - result.append(self.source[self.last_modified:self.last_set]) - set_value = ''.join(result[self.set_index:]).strip() - del result[self.set_index:] - result.append(set_value + ')') - self.last_modified = self.last_set - self.last_set = None - - def _is_assigned_in_a_tuple_assignment(self, occurance): - offset = occurance.get_word_range()[0] - return self.worder.is_assigned_in_a_tuple_assignment(offset) - - @property - @utils.saveit - def source(self): - if self.resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def lines(self): - if self.pymodule is None: - self.pymodule = self.pycore.resource_to_pyobject(self.resource) - return self.pymodule.lines - - @property - @utils.saveit - def worder(self): - return worder.Worder(self.source) diff --git a/pymode/libs3/rope/refactor/extract.py b/pymode/libs3/rope/refactor/extract.py deleted file mode 100644 index bb672322..00000000 --- a/pymode/libs3/rope/refactor/extract.py +++ /dev/null @@ -1,789 +0,0 @@ -import re - -from rope.base import ast, codeanalyze -from rope.base.change import ChangeSet, ChangeContents -from rope.base.exceptions import RefactoringError -from rope.refactor import (sourceutils, similarfinder, - patchedast, suites, usefunction) - - -# Extract refactoring has lots of special cases. I tried to split it -# to smaller parts to make it more manageable: -# -# _ExtractInfo: holds information about the refactoring; it is passed -# to the parts that need to have information about the refactoring -# -# _ExtractCollector: merely saves all of the information necessary for -# performing the refactoring. -# -# _DefinitionLocationFinder: finds where to insert the definition. -# -# _ExceptionalConditionChecker: checks for exceptional conditions in -# which the refactoring cannot be applied. -# -# _ExtractMethodParts: generates the pieces of code (like definition) -# needed for performing extract method. -# -# _ExtractVariableParts: like _ExtractMethodParts for variables. -# -# _ExtractPerformer: Uses above classes to collect refactoring -# changes. -# -# There are a few more helper functions and classes used by above -# classes. -class _ExtractRefactoring(object): - - def __init__(self, project, resource, start_offset, end_offset, - variable=False): - self.project = project - self.pycore = project.pycore - self.resource = resource - self.start_offset = self._fix_start(resource.read(), start_offset) - self.end_offset = self._fix_end(resource.read(), end_offset) - - def _fix_start(self, source, offset): - while offset < len(source) and source[offset].isspace(): - offset += 1 - return offset - - def _fix_end(self, source, offset): - while offset > 0 and source[offset - 1].isspace(): - offset -= 1 - return offset - - def get_changes(self, extracted_name, similar=False, global_=False): - """Get the changes this refactoring makes - - :parameters: - - `similar`: if `True`, similar expressions/statements are also - replaced. - - `global_`: if `True`, the extracted method/variable will - be global. - - """ - info = _ExtractInfo( - self.project, self.resource, self.start_offset, self.end_offset, - extracted_name, variable=self.kind == 'variable', - similar=similar, make_global=global_) - new_contents = _ExtractPerformer(info).extract() - changes = ChangeSet('Extract %s <%s>' % (self.kind, - extracted_name)) - changes.add_change(ChangeContents(self.resource, new_contents)) - return changes - - -class ExtractMethod(_ExtractRefactoring): - - def __init__(self, *args, **kwds): - super(ExtractMethod, self).__init__(*args, **kwds) - - kind = 'method' - - -class ExtractVariable(_ExtractRefactoring): - - def __init__(self, *args, **kwds): - kwds = dict(kwds) - kwds['variable'] = True - super(ExtractVariable, self).__init__(*args, **kwds) - - kind = 'variable' - - -class _ExtractInfo(object): - """Holds information about the extract to be performed""" - - def __init__(self, project, resource, start, end, new_name, - variable, similar, make_global): - self.pycore = project.pycore - self.resource = resource - self.pymodule = self.pycore.resource_to_pyobject(resource) - self.global_scope = self.pymodule.get_scope() - self.source = self.pymodule.source_code - self.lines = self.pymodule.lines - self.new_name = new_name - self.variable = variable - self.similar = similar - self._init_parts(start, end) - self._init_scope() - self.make_global = make_global - - def _init_parts(self, start, end): - self.region = (self._choose_closest_line_end(start), - self._choose_closest_line_end(end, end=True)) - - start = self.logical_lines.logical_line_in( - self.lines.get_line_number(self.region[0]))[0] - end = self.logical_lines.logical_line_in( - self.lines.get_line_number(self.region[1]))[1] - self.region_lines = (start, end) - - self.lines_region = (self.lines.get_line_start(self.region_lines[0]), - self.lines.get_line_end(self.region_lines[1])) - - @property - def logical_lines(self): - return self.pymodule.logical_lines - - def _init_scope(self): - start_line = self.region_lines[0] - scope = self.global_scope.get_inner_scope_for_line(start_line) - if scope.get_kind() != 'Module' and scope.get_start() == start_line: - scope = scope.parent - self.scope = scope - self.scope_region = self._get_scope_region(self.scope) - - def _get_scope_region(self, scope): - return (self.lines.get_line_start(scope.get_start()), - self.lines.get_line_end(scope.get_end()) + 1) - - def _choose_closest_line_end(self, offset, end=False): - lineno = self.lines.get_line_number(offset) - line_start = self.lines.get_line_start(lineno) - line_end = self.lines.get_line_end(lineno) - if self.source[line_start:offset].strip() == '': - if end: - return line_start - 1 - else: - return line_start - elif self.source[offset:line_end].strip() == '': - return min(line_end, len(self.source)) - return offset - - @property - def one_line(self): - return self.region != self.lines_region and \ - (self.logical_lines.logical_line_in(self.region_lines[0]) == - self.logical_lines.logical_line_in(self.region_lines[1])) - - @property - def global_(self): - return self.scope.parent is None - - @property - def method(self): - return self.scope.parent is not None and \ - self.scope.parent.get_kind() == 'Class' - - @property - def indents(self): - return sourceutils.get_indents(self.pymodule.lines, - self.region_lines[0]) - - @property - def scope_indents(self): - if self.global_: - return 0 - return sourceutils.get_indents(self.pymodule.lines, - self.scope.get_start()) - - @property - def extracted(self): - return self.source[self.region[0]:self.region[1]] - - _returned = None - @property - def returned(self): - """Does the extracted piece contain return statement""" - if self._returned is None: - node = _parse_text(self.extracted) - self._returned = usefunction._returns_last(node) - return self._returned - - -class _ExtractCollector(object): - """Collects information needed for performing the extract""" - - def __init__(self, info): - self.definition = None - self.body_pattern = None - self.checks = {} - self.replacement_pattern = None - self.matches = None - self.replacements = None - self.definition_location = None - - -class _ExtractPerformer(object): - - def __init__(self, info): - self.info = info - _ExceptionalConditionChecker()(self.info) - - def extract(self): - extract_info = self._collect_info() - content = codeanalyze.ChangeCollector(self.info.source) - definition = extract_info.definition - lineno, indents = extract_info.definition_location - offset = self.info.lines.get_line_start(lineno) - indented = sourceutils.fix_indentation(definition, indents) - content.add_change(offset, offset, indented) - self._replace_occurrences(content, extract_info) - return content.get_changed() - - def _replace_occurrences(self, content, extract_info): - for match in extract_info.matches: - replacement = similarfinder.CodeTemplate( - extract_info.replacement_pattern) - mapping = {} - for name in replacement.get_names(): - node = match.get_ast(name) - if node: - start, end = patchedast.node_region(match.get_ast(name)) - mapping[name] = self.info.source[start:end] - else: - mapping[name] = name - region = match.get_region() - content.add_change(region[0], region[1], - replacement.substitute(mapping)) - - def _collect_info(self): - extract_collector = _ExtractCollector(self.info) - self._find_definition(extract_collector) - self._find_matches(extract_collector) - self._find_definition_location(extract_collector) - return extract_collector - - def _find_matches(self, collector): - regions = self._where_to_search() - finder = similarfinder.SimilarFinder(self.info.pymodule) - matches = [] - for start, end in regions: - matches.extend((finder.get_matches(collector.body_pattern, - collector.checks, start, end))) - collector.matches = matches - - def _where_to_search(self): - if self.info.similar: - if self.info.make_global or self.info.global_: - return [(0, len(self.info.pymodule.source_code))] - if self.info.method and not self.info.variable: - class_scope = self.info.scope.parent - regions = [] - method_kind = _get_function_kind(self.info.scope) - for scope in class_scope.get_scopes(): - if method_kind == 'method' and \ - _get_function_kind(scope) != 'method': - continue - start = self.info.lines.get_line_start(scope.get_start()) - end = self.info.lines.get_line_end(scope.get_end()) - regions.append((start, end)) - return regions - else: - if self.info.variable: - return [self.info.scope_region] - else: - return [self.info._get_scope_region(self.info.scope.parent)] - else: - return [self.info.region] - - def _find_definition_location(self, collector): - matched_lines = [] - for match in collector.matches: - start = self.info.lines.get_line_number(match.get_region()[0]) - start_line = self.info.logical_lines.logical_line_in(start)[0] - matched_lines.append(start_line) - location_finder = _DefinitionLocationFinder(self.info, matched_lines) - collector.definition_location = (location_finder.find_lineno(), - location_finder.find_indents()) - - def _find_definition(self, collector): - if self.info.variable: - parts = _ExtractVariableParts(self.info) - else: - parts = _ExtractMethodParts(self.info) - collector.definition = parts.get_definition() - collector.body_pattern = parts.get_body_pattern() - collector.replacement_pattern = parts.get_replacement_pattern() - collector.checks = parts.get_checks() - - -class _DefinitionLocationFinder(object): - - def __init__(self, info, matched_lines): - self.info = info - self.matched_lines = matched_lines - # This only happens when subexpressions cannot be matched - if not matched_lines: - self.matched_lines.append(self.info.region_lines[0]) - - def find_lineno(self): - if self.info.variable and not self.info.make_global: - return self._get_before_line() - if self.info.make_global or self.info.global_: - toplevel = self._find_toplevel(self.info.scope) - ast = self.info.pymodule.get_ast() - newlines = sorted(self.matched_lines + [toplevel.get_end() + 1]) - return suites.find_visible(ast, newlines) - return self._get_after_scope() - - def _find_toplevel(self, scope): - toplevel = scope - if toplevel.parent is not None: - while toplevel.parent.parent is not None: - toplevel = toplevel.parent - return toplevel - - def find_indents(self): - if self.info.variable and not self.info.make_global: - return sourceutils.get_indents(self.info.lines, - self._get_before_line()) - else: - if self.info.global_ or self.info.make_global: - return 0 - return self.info.scope_indents - - def _get_before_line(self): - ast = self.info.scope.pyobject.get_ast() - return suites.find_visible(ast, self.matched_lines) - - def _get_after_scope(self): - return self.info.scope.get_end() + 1 - - -class _ExceptionalConditionChecker(object): - - def __call__(self, info): - self.base_conditions(info) - if info.one_line: - self.one_line_conditions(info) - else: - self.multi_line_conditions(info) - - def base_conditions(self, info): - if info.region[1] > info.scope_region[1]: - raise RefactoringError('Bad region selected for extract method') - end_line = info.region_lines[1] - end_scope = info.global_scope.get_inner_scope_for_line(end_line) - if end_scope != info.scope and end_scope.get_end() != end_line: - raise RefactoringError('Bad region selected for extract method') - try: - extracted = info.source[info.region[0]:info.region[1]] - if info.one_line: - extracted = '(%s)' % extracted - if _UnmatchedBreakOrContinueFinder.has_errors(extracted): - raise RefactoringError('A break/continue without having a ' - 'matching for/while loop.') - except SyntaxError: - raise RefactoringError('Extracted piece should ' - 'contain complete statements.') - - def one_line_conditions(self, info): - if self._is_region_on_a_word(info): - raise RefactoringError('Should extract complete statements.') - if info.variable and not info.one_line: - raise RefactoringError('Extract variable should not ' - 'span multiple lines.') - - def multi_line_conditions(self, info): - node = _parse_text(info.source[info.region[0]:info.region[1]]) - count = usefunction._return_count(node) - if count > 1: - raise RefactoringError('Extracted piece can have only one ' - 'return statement.') - if usefunction._yield_count(node): - raise RefactoringError('Extracted piece cannot ' - 'have yield statements.') - if count == 1 and not usefunction._returns_last(node): - raise RefactoringError('Return should be the last statement.') - if info.region != info.lines_region: - raise RefactoringError('Extracted piece should ' - 'contain complete statements.') - - def _is_region_on_a_word(self, info): - if info.region[0] > 0 and self._is_on_a_word(info, info.region[0] - 1) or \ - self._is_on_a_word(info, info.region[1] - 1): - return True - - def _is_on_a_word(self, info, offset): - prev = info.source[offset] - if not (prev.isalnum() or prev == '_') or \ - offset + 1 == len(info.source): - return False - next = info.source[offset + 1] - return next.isalnum() or next == '_' - - -class _ExtractMethodParts(object): - - def __init__(self, info): - self.info = info - self.info_collector = self._create_info_collector() - - def get_definition(self): - if self.info.global_: - return '\n%s\n' % self._get_function_definition() - else: - return '\n%s' % self._get_function_definition() - - def get_replacement_pattern(self): - variables = [] - variables.extend(self._find_function_arguments()) - variables.extend(self._find_function_returns()) - return similarfinder.make_pattern(self._get_call(), variables) - - def get_body_pattern(self): - variables = [] - variables.extend(self._find_function_arguments()) - variables.extend(self._find_function_returns()) - variables.extend(self._find_temps()) - return similarfinder.make_pattern(self._get_body(), variables) - - def _get_body(self): - result = sourceutils.fix_indentation(self.info.extracted, 0) - if self.info.one_line: - result = '(%s)' % result - return result - - def _find_temps(self): - return usefunction.find_temps(self.info.pycore.project, - self._get_body()) - - def get_checks(self): - if self.info.method and not self.info.make_global: - if _get_function_kind(self.info.scope) == 'method': - class_name = similarfinder._pydefined_to_str( - self.info.scope.parent.pyobject) - return {self._get_self_name(): 'type=' + class_name} - return {} - - def _create_info_collector(self): - zero = self.info.scope.get_start() - 1 - start_line = self.info.region_lines[0] - zero - end_line = self.info.region_lines[1] - zero - info_collector = _FunctionInformationCollector(start_line, end_line, - self.info.global_) - body = self.info.source[self.info.scope_region[0]: - self.info.scope_region[1]] - node = _parse_text(body) - ast.walk(node, info_collector) - return info_collector - - def _get_function_definition(self): - args = self._find_function_arguments() - returns = self._find_function_returns() - result = [] - if self.info.method and not self.info.make_global and \ - _get_function_kind(self.info.scope) != 'method': - result.append('@staticmethod\n') - result.append('def %s:\n' % self._get_function_signature(args)) - unindented_body = self._get_unindented_function_body(returns) - indents = sourceutils.get_indent(self.info.pycore) - function_body = sourceutils.indent_lines(unindented_body, indents) - result.append(function_body) - definition = ''.join(result) - - return definition + '\n' - - def _get_function_signature(self, args): - args = list(args) - prefix = '' - if self._extracting_method(): - self_name = self._get_self_name() - if self_name is None: - raise RefactoringError('Extracting a method from a function ' - 'with no self argument.') - if self_name in args: - args.remove(self_name) - args.insert(0, self_name) - return prefix + self.info.new_name + \ - '(%s)' % self._get_comma_form(args) - - def _extracting_method(self): - return self.info.method and not self.info.make_global and \ - _get_function_kind(self.info.scope) == 'method' - - def _get_self_name(self): - param_names = self.info.scope.pyobject.get_param_names() - if param_names: - return param_names[0] - - def _get_function_call(self, args): - prefix = '' - if self.info.method and not self.info.make_global: - if _get_function_kind(self.info.scope) == 'method': - self_name = self._get_self_name() - if self_name in args: - args.remove(self_name) - prefix = self_name + '.' - else: - prefix = self.info.scope.parent.pyobject.get_name() + '.' - return prefix + '%s(%s)' % (self.info.new_name, - self._get_comma_form(args)) - - def _get_comma_form(self, names): - result = '' - if names: - result += names[0] - for name in names[1:]: - result += ', ' + name - return result - - def _get_call(self): - if self.info.one_line: - args = self._find_function_arguments() - return self._get_function_call(args) - args = self._find_function_arguments() - returns = self._find_function_returns() - call_prefix = '' - if returns: - call_prefix = self._get_comma_form(returns) + ' = ' - if self.info.returned: - call_prefix = 'return ' - return call_prefix + self._get_function_call(args) - - def _find_function_arguments(self): - # if not make_global, do not pass any global names; they are - # all visible. - if self.info.global_ and not self.info.make_global: - return () - if not self.info.one_line: - result = (self.info_collector.prewritten & - self.info_collector.read) - result |= (self.info_collector.prewritten & - self.info_collector.postread & - (self.info_collector.maybe_written - - self.info_collector.written)) - return list(result) - start = self.info.region[0] - if start == self.info.lines_region[0]: - start = start + re.search('\S', self.info.extracted).start() - function_definition = self.info.source[start:self.info.region[1]] - read = _VariableReadsAndWritesFinder.find_reads_for_one_liners( - function_definition) - return list(self.info_collector.prewritten.intersection(read)) - - def _find_function_returns(self): - if self.info.one_line or self.info.returned: - return [] - written = self.info_collector.written | \ - self.info_collector.maybe_written - return list(written & self.info_collector.postread) - - def _get_unindented_function_body(self, returns): - if self.info.one_line: - return 'return ' + _join_lines(self.info.extracted) - extracted_body = self.info.extracted - unindented_body = sourceutils.fix_indentation(extracted_body, 0) - if returns: - unindented_body += '\nreturn %s' % self._get_comma_form(returns) - return unindented_body - - -class _ExtractVariableParts(object): - - def __init__(self, info): - self.info = info - - def get_definition(self): - result = self.info.new_name + ' = ' + \ - _join_lines(self.info.extracted) + '\n' - return result - - def get_body_pattern(self): - return '(%s)' % self.info.extracted.strip() - - def get_replacement_pattern(self): - return self.info.new_name - - def get_checks(self): - return {} - - -class _FunctionInformationCollector(object): - - def __init__(self, start, end, is_global): - self.start = start - self.end = end - self.is_global = is_global - self.prewritten = set() - self.maybe_written = set() - self.written = set() - self.read = set() - self.postread = set() - self.postwritten = set() - self.host_function = True - self.conditional = False - - def _read_variable(self, name, lineno): - if self.start <= lineno <= self.end: - if name not in self.written: - self.read.add(name) - if self.end < lineno: - if name not in self.postwritten: - self.postread.add(name) - - def _written_variable(self, name, lineno): - if self.start <= lineno <= self.end: - if self.conditional: - self.maybe_written.add(name) - else: - self.written.add(name) - if self.start > lineno: - self.prewritten.add(name) - if self.end < lineno: - self.postwritten.add(name) - - def _FunctionDef(self, node): - if not self.is_global and self.host_function: - self.host_function = False - for name in _get_argnames(node.args): - self._written_variable(name, node.lineno) - for child in node.body: - ast.walk(child, self) - else: - self._written_variable(node.name, node.lineno) - visitor = _VariableReadsAndWritesFinder() - for child in node.body: - ast.walk(child, visitor) - for name in visitor.read - visitor.written: - self._read_variable(name, node.lineno) - - def _Name(self, node): - if isinstance(node.ctx, (ast.Store, ast.AugStore)): - self._written_variable(node.id, node.lineno) - if not isinstance(node.ctx, ast.Store): - self._read_variable(node.id, node.lineno) - - def _Assign(self, node): - ast.walk(node.value, self) - for child in node.targets: - ast.walk(child, self) - - def _ClassDef(self, node): - self._written_variable(node.name, node.lineno) - - def _handle_conditional_node(self, node): - self.conditional = True - try: - for child in ast.get_child_nodes(node): - ast.walk(child, self) - finally: - self.conditional = False - - def _If(self, node): - self._handle_conditional_node(node) - - def _While(self, node): - self._handle_conditional_node(node) - - def _For(self, node): - self._handle_conditional_node(node) - - - -def _get_argnames(arguments): - result = [node.arg for node in arguments.args - if isinstance(node, ast.arg)] - if arguments.vararg: - result.append(arguments.vararg) - if arguments.kwarg: - result.append(arguments.kwarg) - return result - - -class _VariableReadsAndWritesFinder(object): - - def __init__(self): - self.written = set() - self.read = set() - - def _Name(self, node): - if isinstance(node.ctx, (ast.Store, ast.AugStore)): - self.written.add(node.id) - if not isinstance(node, ast.Store): - self.read.add(node.id) - - def _FunctionDef(self, node): - self.written.add(node.name) - visitor = _VariableReadsAndWritesFinder() - for child in ast.get_child_nodes(node): - ast.walk(child, visitor) - self.read.update(visitor.read - visitor.written) - - def _Class(self, node): - self.written.add(node.name) - - @staticmethod - def find_reads_and_writes(code): - if code.strip() == '': - return set(), set() - if isinstance(code, str): - code = code.encode('utf-8') - node = _parse_text(code) - visitor = _VariableReadsAndWritesFinder() - ast.walk(node, visitor) - return visitor.read, visitor.written - - @staticmethod - def find_reads_for_one_liners(code): - if code.strip() == '': - return set(), set() - node = _parse_text(code) - visitor = _VariableReadsAndWritesFinder() - ast.walk(node, visitor) - return visitor.read - - -class _UnmatchedBreakOrContinueFinder(object): - - def __init__(self): - self.error = False - self.loop_count = 0 - - def _For(self, node): - self.loop_encountered(node) - - def _While(self, node): - self.loop_encountered(node) - - def loop_encountered(self, node): - self.loop_count += 1 - for child in node.body: - ast.walk(child, self) - self.loop_count -= 1 - if node.orelse: - ast.walk(node.orelse, self) - - def _Break(self, node): - self.check_loop() - - def _Continue(self, node): - self.check_loop() - - def check_loop(self): - if self.loop_count < 1: - self.error = True - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - @staticmethod - def has_errors(code): - if code.strip() == '': - return False - node = _parse_text(code) - visitor = _UnmatchedBreakOrContinueFinder() - ast.walk(node, visitor) - return visitor.error - -def _get_function_kind(scope): - return scope.pyobject.get_kind() - - -def _parse_text(body): - body = sourceutils.fix_indentation(body, 0) - node = ast.parse(body) - return node - -def _join_lines(code): - lines = [] - for line in code.splitlines(): - if line.endswith('\\'): - lines.append(line[:-1].strip()) - else: - lines.append(line.strip()) - return ' '.join(lines) diff --git a/pymode/libs3/rope/refactor/functionutils.py b/pymode/libs3/rope/refactor/functionutils.py deleted file mode 100644 index a653b9db..00000000 --- a/pymode/libs3/rope/refactor/functionutils.py +++ /dev/null @@ -1,222 +0,0 @@ -import rope.base.exceptions -import rope.base.pyobjects -from rope.base.builtins import Lambda -from rope.base import worder - - -class DefinitionInfo(object): - - def __init__(self, function_name, is_method, args_with_defaults, - args_arg, keywords_arg): - self.function_name = function_name - self.is_method = is_method - self.args_with_defaults = args_with_defaults - self.args_arg = args_arg - self.keywords_arg = keywords_arg - - def to_string(self): - return '%s(%s)' % (self.function_name, self.arguments_to_string()) - - def arguments_to_string(self, from_index=0): - params = [] - for arg, default in self.args_with_defaults: - if default is not None: - params.append('%s=%s' % (arg, default)) - else: - params.append(arg) - if self.args_arg is not None: - params.append('*' + self.args_arg) - if self.keywords_arg: - params.append('**' + self.keywords_arg) - return ', '.join(params[from_index:]) - - @staticmethod - def _read(pyfunction, code): - scope = pyfunction.get_scope() - parent = scope.parent - parameter_names = pyfunction.get_param_names() - kind = pyfunction.get_kind() - is_method = kind == 'method' - is_lambda = kind == 'lambda' - info = _FunctionParser(code, is_method, is_lambda) - args, keywords = info.get_parameters() - args_arg = None - keywords_arg = None - if args and args[-1].startswith('**'): - keywords_arg = args[-1][2:] - del args[-1] - if args and args[-1].startswith('*'): - args_arg = args[-1][1:] - del args[-1] - args_with_defaults = [(name, None) for name in args] - args_with_defaults.extend(keywords) - return DefinitionInfo(info.get_function_name(), is_method, - args_with_defaults, args_arg, keywords_arg) - - @staticmethod - def read(pyfunction): - pymodule = pyfunction.get_module() - word_finder = worder.Worder(pymodule.source_code) - lineno = pyfunction.get_ast().lineno - start = pymodule.lines.get_line_start(lineno) - if isinstance(pyfunction, Lambda): - call = word_finder.get_lambda_and_args(start) - else: - call = word_finder.get_function_and_args_in_header(start) - return DefinitionInfo._read(pyfunction, call) - - -class CallInfo(object): - - def __init__(self, function_name, args, keywords, args_arg, - keywords_arg, implicit_arg, constructor): - self.function_name = function_name - self.args = args - self.keywords = keywords - self.args_arg = args_arg - self.keywords_arg = keywords_arg - self.implicit_arg = implicit_arg - self.constructor = constructor - - def to_string(self): - function = self.function_name - if self.implicit_arg: - function = self.args[0] + '.' + self.function_name - params = [] - start = 0 - if self.implicit_arg or self.constructor: - start = 1 - if self.args[start:]: - params.extend(self.args[start:]) - if self.keywords: - params.extend(['%s=%s' % (name, value) for name, value in self.keywords]) - if self.args_arg is not None: - params.append('*' + self.args_arg) - if self.keywords_arg: - params.append('**' + self.keywords_arg) - return '%s(%s)' % (function, ', '.join(params)) - - @staticmethod - def read(primary, pyname, definition_info, code): - is_method_call = CallInfo._is_method_call(primary, pyname) - is_constructor = CallInfo._is_class(pyname) - is_classmethod = CallInfo._is_classmethod(pyname) - info = _FunctionParser(code, is_method_call or is_classmethod) - args, keywords = info.get_parameters() - args_arg = None - keywords_arg = None - if args and args[-1].startswith('**'): - keywords_arg = args[-1][2:] - del args[-1] - if args and args[-1].startswith('*'): - args_arg = args[-1][1:] - del args[-1] - if is_constructor: - args.insert(0, definition_info.args_with_defaults[0][0]) - return CallInfo(info.get_function_name(), args, keywords, args_arg, - keywords_arg, is_method_call or is_classmethod, - is_constructor) - - @staticmethod - def _is_method_call(primary, pyname): - return primary is not None and \ - isinstance(primary.get_object().get_type(), - rope.base.pyobjects.PyClass) and \ - CallInfo._is_method(pyname) - - @staticmethod - def _is_class(pyname): - return pyname is not None and \ - isinstance(pyname.get_object(), - rope.base.pyobjects.PyClass) - - @staticmethod - def _is_method(pyname): - if pyname is not None and \ - isinstance(pyname.get_object(), rope.base.pyobjects.PyFunction): - return pyname.get_object().get_kind() == 'method' - return False - - @staticmethod - def _is_classmethod(pyname): - if pyname is not None and \ - isinstance(pyname.get_object(), rope.base.pyobjects.PyFunction): - return pyname.get_object().get_kind() == 'classmethod' - return False - - -class ArgumentMapping(object): - - def __init__(self, definition_info, call_info): - self.call_info = call_info - self.param_dict = {} - self.keyword_args = [] - self.args_arg = [] - for index, value in enumerate(call_info.args): - if index < len(definition_info.args_with_defaults): - name = definition_info.args_with_defaults[index][0] - self.param_dict[name] = value - else: - self.args_arg.append(value) - for name, value in call_info.keywords: - index = -1 - for pair in definition_info.args_with_defaults: - if pair[0] == name: - self.param_dict[name] = value - break - else: - self.keyword_args.append((name, value)) - - def to_call_info(self, definition_info): - args = [] - keywords = [] - for index in range(len(definition_info.args_with_defaults)): - name = definition_info.args_with_defaults[index][0] - if name in self.param_dict: - args.append(self.param_dict[name]) - else: - for i in range(index, len(definition_info.args_with_defaults)): - name = definition_info.args_with_defaults[i][0] - if name in self.param_dict: - keywords.append((name, self.param_dict[name])) - break - args.extend(self.args_arg) - keywords.extend(self.keyword_args) - return CallInfo(self.call_info.function_name, args, keywords, - self.call_info.args_arg, self.call_info.keywords_arg, - self.call_info.implicit_arg, self.call_info.constructor) - - -class _FunctionParser(object): - - def __init__(self, call, implicit_arg, is_lambda=False): - self.call = call - self.implicit_arg = implicit_arg - self.word_finder = worder.Worder(self.call) - if is_lambda: - self.last_parens = self.call.rindex(':') - else: - self.last_parens = self.call.rindex(')') - self.first_parens = self.word_finder._find_parens_start(self.last_parens) - - def get_parameters(self): - args, keywords = self.word_finder.get_parameters(self.first_parens, - self.last_parens) - if self.is_called_as_a_method(): - instance = self.call[:self.call.rindex('.', 0, self.first_parens)] - args.insert(0, instance.strip()) - return args, keywords - - def get_instance(self): - if self.is_called_as_a_method(): - return self.word_finder.get_primary_at( - self.call.rindex('.', 0, self.first_parens) - 1) - - def get_function_name(self): - if self.is_called_as_a_method(): - return self.word_finder.get_word_at(self.first_parens - 1) - else: - return self.word_finder.get_primary_at(self.first_parens - 1) - - def is_called_as_a_method(self): - return self.implicit_arg and '.' in self.call[:self.first_parens] diff --git a/pymode/libs3/rope/refactor/importutils/__init__.py b/pymode/libs3/rope/refactor/importutils/__init__.py deleted file mode 100644 index 2a86edb0..00000000 --- a/pymode/libs3/rope/refactor/importutils/__init__.py +++ /dev/null @@ -1,299 +0,0 @@ -"""A package for handling imports - -This package provides tools for modifying module imports after -refactorings or as a separate task. - -""" -import rope.base.evaluate -from rope.base.change import ChangeSet, ChangeContents -from rope.refactor import occurrences, rename -from rope.refactor.importutils import module_imports, actions -from rope.refactor.importutils.importinfo import NormalImport, FromImport -import rope.base.codeanalyze - - -class ImportOrganizer(object): - """Perform some import-related commands - - Each method returns a `rope.base.change.Change` object. - - """ - - def __init__(self, project): - self.project = project - self.pycore = project.pycore - self.import_tools = ImportTools(self.pycore) - - def organize_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.organize_imports, resource, offset) - - def expand_star_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.expand_stars, resource, offset) - - def froms_to_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.froms_to_imports, resource, offset) - - def relatives_to_absolutes(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.relatives_to_absolutes, resource, offset) - - def handle_long_imports(self, resource, offset=None): - return self._perform_command_on_import_tools( - self.import_tools.handle_long_imports, resource, offset) - - def _perform_command_on_import_tools(self, method, resource, offset): - pymodule = self.pycore.resource_to_pyobject(resource) - before_performing = pymodule.source_code - import_filter = None - if offset is not None: - import_filter = self._line_filter( - pymodule.lines.get_line_number(offset)) - result = method(pymodule, import_filter=import_filter) - if result is not None and result != before_performing: - changes = ChangeSet(method.__name__.replace('_', ' ') + - ' in <%s>' % resource.path) - changes.add_change(ChangeContents(resource, result)) - return changes - - def _line_filter(self, lineno): - def import_filter(import_stmt): - return import_stmt.start_line <= lineno < import_stmt.end_line - return import_filter - - -class ImportTools(object): - - def __init__(self, pycore): - self.pycore = pycore - - def get_import(self, resource): - """The import statement for `resource`""" - module_name = self.pycore.modname(resource) - return NormalImport(((module_name, None), )) - - def get_from_import(self, resource, name): - """The from import statement for `name` in `resource`""" - module_name = self.pycore.modname(resource) - names = [] - if isinstance(name, list): - names = [(imported, None) for imported in name] - else: - names = [(name, None),] - return FromImport(module_name, 0, tuple(names)) - - def module_imports(self, module, imports_filter=None): - return module_imports.ModuleImports(self.pycore, module, - imports_filter) - - def froms_to_imports(self, pymodule, import_filter=None): - pymodule = self._clean_up_imports(pymodule, import_filter) - module_imports = self.module_imports(pymodule, import_filter) - for import_stmt in module_imports.imports: - if import_stmt.readonly or \ - not self._is_transformable_to_normal(import_stmt.import_info): - continue - pymodule = self._from_to_normal(pymodule, import_stmt) - - # Adding normal imports in place of froms - module_imports = self.module_imports(pymodule, import_filter) - for import_stmt in module_imports.imports: - if not import_stmt.readonly and \ - self._is_transformable_to_normal(import_stmt.import_info): - import_stmt.import_info = \ - NormalImport(((import_stmt.import_info.module_name, None),)) - module_imports.remove_duplicates() - return module_imports.get_changed_source() - - def expand_stars(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - module_imports.expand_stars() - return module_imports.get_changed_source() - - def _from_to_normal(self, pymodule, import_stmt): - resource = pymodule.get_resource() - from_import = import_stmt.import_info - module_name = from_import.module_name - for name, alias in from_import.names_and_aliases: - imported = name - if alias is not None: - imported = alias - occurrence_finder = occurrences.create_finder( - self.pycore, imported, pymodule[imported], imports=False) - source = rename.rename_in_module( - occurrence_finder, module_name + '.' + name, - pymodule=pymodule, replace_primary=True) - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - return pymodule - - def _clean_up_imports(self, pymodule, import_filter): - resource = pymodule.get_resource() - module_with_imports = self.module_imports(pymodule, import_filter) - module_with_imports.expand_stars() - source = module_with_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - source = self.relatives_to_absolutes(pymodule) - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - - module_with_imports = self.module_imports(pymodule, import_filter) - module_with_imports.remove_duplicates() - module_with_imports.remove_unused_imports() - source = module_with_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module(source, resource) - return pymodule - - def relatives_to_absolutes(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - to_be_absolute_list = module_imports.get_relative_to_absolute_list() - for name, absolute_name in to_be_absolute_list: - pymodule = self._rename_in_module(pymodule, name, absolute_name) - module_imports = self.module_imports(pymodule, import_filter) - module_imports.get_relative_to_absolute_list() - source = module_imports.get_changed_source() - if source is None: - source = pymodule.source_code - return source - - def _is_transformable_to_normal(self, import_info): - if not isinstance(import_info, FromImport): - return False - return True - - def organize_imports(self, pymodule, - unused=True, duplicates=True, - selfs=True, sort=True, import_filter=None): - if unused or duplicates: - module_imports = self.module_imports(pymodule, import_filter) - if unused: - module_imports.remove_unused_imports() - if duplicates: - module_imports.remove_duplicates() - source = module_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module( - source, pymodule.get_resource()) - if selfs: - pymodule = self._remove_self_imports(pymodule, import_filter) - if sort: - return self.sort_imports(pymodule, import_filter) - else: - return pymodule.source_code - - def _remove_self_imports(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - to_be_fixed, to_be_renamed = module_imports.get_self_import_fix_and_rename_list() - for name in to_be_fixed: - try: - pymodule = self._rename_in_module(pymodule, name, '', till_dot=True) - except ValueError: - # There is a self import with direct access to it - return pymodule - for name, new_name in to_be_renamed: - pymodule = self._rename_in_module(pymodule, name, new_name) - module_imports = self.module_imports(pymodule, import_filter) - module_imports.get_self_import_fix_and_rename_list() - source = module_imports.get_changed_source() - if source is not None: - pymodule = self.pycore.get_string_module(source, pymodule.get_resource()) - return pymodule - - def _rename_in_module(self, pymodule, name, new_name, till_dot=False): - old_name = name.split('.')[-1] - old_pyname = rope.base.evaluate.eval_str(pymodule.get_scope(), name) - occurrence_finder = occurrences.create_finder( - self.pycore, old_name, old_pyname, imports=False) - changes = rope.base.codeanalyze.ChangeCollector(pymodule.source_code) - for occurrence in occurrence_finder.find_occurrences(pymodule=pymodule): - start, end = occurrence.get_primary_range() - if till_dot: - new_end = pymodule.source_code.index('.', end) + 1 - space = pymodule.source_code[end:new_end - 1].strip() - if not space == '': - for c in space: - if not c.isspace() and c not in '\\': - raise ValueError() - end = new_end - changes.add_change(start, end, new_name) - source = changes.get_changed() - if source is not None: - pymodule = self.pycore.get_string_module(source, pymodule.get_resource()) - return pymodule - - def sort_imports(self, pymodule, import_filter=None): - module_imports = self.module_imports(pymodule, import_filter) - module_imports.sort_imports() - return module_imports.get_changed_source() - - def handle_long_imports(self, pymodule, maxdots=2, maxlength=27, - import_filter=None): - # IDEA: `maxdots` and `maxlength` can be specified in project config - # adding new from imports - module_imports = self.module_imports(pymodule, import_filter) - to_be_fixed = module_imports.handle_long_imports(maxdots, maxlength) - # performing the renaming - pymodule = self.pycore.get_string_module( - module_imports.get_changed_source(), - resource=pymodule.get_resource()) - for name in to_be_fixed: - pymodule = self._rename_in_module(pymodule, name, - name.split('.')[-1]) - # organizing imports - return self.organize_imports(pymodule, selfs=False, sort=False, - import_filter=import_filter) - - -def get_imports(pycore, pydefined): - """A shortcut for getting the `ImportInfo`\s used in a scope""" - pymodule = pydefined.get_module() - module = module_imports.ModuleImports(pycore, pymodule) - if pymodule == pydefined: - return [stmt.import_info for stmt in module.imports] - return module.get_used_imports(pydefined) - - -def get_module_imports(pycore, pymodule): - """A shortcut for creating a `module_imports.ModuleImports` object""" - return module_imports.ModuleImports(pycore, pymodule) - - -def add_import(pycore, pymodule, module_name, name=None): - imports = get_module_imports(pycore, pymodule) - candidates = [] - names = [] - # from mod import name - if name is not None: - from_import = FromImport(module_name, 0, [(name, None)]) - names.append(name) - candidates.append(from_import) - # from pkg import mod - if '.' in module_name: - pkg, mod = module_name.rsplit('.', 1) - candidates.append(FromImport(pkg, 0, [(mod, None)])) - if name: - names.append(mod + '.' + name) - else: - names.append(mod) - # import mod - normal_import = NormalImport([(module_name, None)]) - if name: - names.append(module_name + '.' + name) - else: - names.append(module_name) - - candidates.append(normal_import) - - visitor = actions.AddingVisitor(pycore, candidates) - selected_import = normal_import - for import_statement in imports.imports: - if import_statement.accept(visitor): - selected_import = visitor.import_info - break - imports.add_import(selected_import) - imported_name = names[candidates.index(selected_import)] - return imports.get_changed_source(), imported_name diff --git a/pymode/libs3/rope/refactor/importutils/actions.py b/pymode/libs3/rope/refactor/importutils/actions.py deleted file mode 100644 index 4851d02f..00000000 --- a/pymode/libs3/rope/refactor/importutils/actions.py +++ /dev/null @@ -1,359 +0,0 @@ -import os -import sys - -from rope.base import pyobjects, exceptions, stdmods -from rope.refactor import occurrences -from rope.refactor.importutils import importinfo - - -class ImportInfoVisitor(object): - - def dispatch(self, import_): - try: - method_name = 'visit' + import_.import_info.__class__.__name__ - method = getattr(self, method_name) - return method(import_, import_.import_info) - except exceptions.ModuleNotFoundError: - pass - - def visitEmptyImport(self, import_stmt, import_info): - pass - - def visitNormalImport(self, import_stmt, import_info): - pass - - def visitFromImport(self, import_stmt, import_info): - pass - - -class RelativeToAbsoluteVisitor(ImportInfoVisitor): - - def __init__(self, pycore, current_folder): - self.to_be_absolute = [] - self.pycore = pycore - self.folder = current_folder - self.context = importinfo.ImportContext(pycore, current_folder) - - def visitNormalImport(self, import_stmt, import_info): - self.to_be_absolute.extend(self._get_relative_to_absolute_list(import_info)) - new_pairs = [] - for name, alias in import_info.names_and_aliases: - resource = self.pycore.find_module(name, folder=self.folder) - if resource is None: - new_pairs.append((name, alias)) - continue - absolute_name = self.pycore.modname(resource) - new_pairs.append((absolute_name, alias)) - if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): - import_stmt.import_info = importinfo.NormalImport(new_pairs) - - def _get_relative_to_absolute_list(self, import_info): - result = [] - for name, alias in import_info.names_and_aliases: - if alias is not None: - continue - resource = self.pycore.find_module(name, folder=self.folder) - if resource is None: - continue - absolute_name = self.pycore.modname(resource) - if absolute_name != name: - result.append((name, absolute_name)) - return result - - def visitFromImport(self, import_stmt, import_info): - resource = import_info.get_imported_resource(self.context) - if resource is None: - return None - absolute_name = self.pycore.modname(resource) - if import_info.module_name != absolute_name: - import_stmt.import_info = importinfo.FromImport( - absolute_name, 0, import_info.names_and_aliases) - - -class FilteringVisitor(ImportInfoVisitor): - - def __init__(self, pycore, folder, can_select): - self.to_be_absolute = [] - self.pycore = pycore - self.can_select = self._transform_can_select(can_select) - self.context = importinfo.ImportContext(pycore, folder) - - def _transform_can_select(self, can_select): - def can_select_name_and_alias(name, alias): - imported = name - if alias is not None: - imported = alias - return can_select(imported) - return can_select_name_and_alias - - def visitNormalImport(self, import_stmt, import_info): - new_pairs = [] - for name, alias in import_info.names_and_aliases: - if self.can_select(name, alias): - new_pairs.append((name, alias)) - return importinfo.NormalImport(new_pairs) - - def visitFromImport(self, import_stmt, import_info): - if _is_future(import_info): - return import_info - new_pairs = [] - if import_info.is_star_import(): - for name in import_info.get_imported_names(self.context): - if self.can_select(name, None): - new_pairs.append(import_info.names_and_aliases[0]) - break - else: - for name, alias in import_info.names_and_aliases: - if self.can_select(name, alias): - new_pairs.append((name, alias)) - return importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - - -class RemovingVisitor(ImportInfoVisitor): - - def __init__(self, pycore, folder, can_select): - self.to_be_absolute = [] - self.pycore = pycore - self.filtering = FilteringVisitor(pycore, folder, can_select) - - def dispatch(self, import_): - result = self.filtering.dispatch(import_) - if result is not None: - import_.import_info = result - - -class AddingVisitor(ImportInfoVisitor): - """A class for adding imports - - Given a list of `ImportInfo`\s, it tries to add each import to the - module and returns `True` and gives up when an import can be added - to older ones. - - """ - - def __init__(self, pycore, import_list): - self.pycore = pycore - self.import_list = import_list - self.import_info = None - - def dispatch(self, import_): - for import_info in self.import_list: - self.import_info = import_info - if ImportInfoVisitor.dispatch(self, import_): - return True - - # TODO: Handle adding relative and absolute imports - def visitNormalImport(self, import_stmt, import_info): - if not isinstance(self.import_info, import_info.__class__): - return False - # Adding ``import x`` and ``import x.y`` that results ``import x.y`` - if len(import_info.names_and_aliases) == \ - len(self.import_info.names_and_aliases) == 1: - imported1 = import_info.names_and_aliases[0] - imported2 = self.import_info.names_and_aliases[0] - if imported1[1] == imported2[1] is None: - if imported1[0].startswith(imported2[0] + '.'): - return True - if imported2[0].startswith(imported1[0] + '.'): - import_stmt.import_info = self.import_info - return True - # Multiple imports using a single import statement is discouraged - # so we won't bother adding them. - if self.import_info._are_name_and_alias_lists_equal( - import_info.names_and_aliases, self.import_info.names_and_aliases): - return True - - def visitFromImport(self, import_stmt, import_info): - if isinstance(self.import_info, import_info.__class__) and \ - import_info.module_name == self.import_info.module_name and \ - import_info.level == self.import_info.level: - if import_info.is_star_import(): - return True - if self.import_info.is_star_import(): - import_stmt.import_info = self.import_info - return True - new_pairs = list(import_info.names_and_aliases) - for pair in self.import_info.names_and_aliases: - if pair not in new_pairs: - new_pairs.append(pair) - import_stmt.import_info = importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - return True - - -class ExpandStarsVisitor(ImportInfoVisitor): - - def __init__(self, pycore, folder, can_select): - self.pycore = pycore - self.filtering = FilteringVisitor(pycore, folder, can_select) - self.context = importinfo.ImportContext(pycore, folder) - - def visitNormalImport(self, import_stmt, import_info): - self.filtering.dispatch(import_stmt) - - def visitFromImport(self, import_stmt, import_info): - if import_info.is_star_import(): - new_pairs = [] - for name in import_info.get_imported_names(self.context): - new_pairs.append((name, None)) - new_import = importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - import_stmt.import_info = \ - self.filtering.visitFromImport(None, new_import) - else: - self.filtering.dispatch(import_stmt) - - -class SelfImportVisitor(ImportInfoVisitor): - - def __init__(self, pycore, current_folder, resource): - self.pycore = pycore - self.folder = current_folder - self.resource = resource - self.to_be_fixed = set() - self.to_be_renamed = set() - self.context = importinfo.ImportContext(pycore, current_folder) - - def visitNormalImport(self, import_stmt, import_info): - new_pairs = [] - for name, alias in import_info.names_and_aliases: - resource = self.pycore.find_module(name, folder=self.folder) - if resource is not None and resource == self.resource: - imported = name - if alias is not None: - imported = alias - self.to_be_fixed.add(imported) - else: - new_pairs.append((name, alias)) - if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): - import_stmt.import_info = importinfo.NormalImport(new_pairs) - - def visitFromImport(self, import_stmt, import_info): - resource = import_info.get_imported_resource(self.context) - if resource is None: - return - if resource == self.resource: - self._importing_names_from_self(import_info, import_stmt) - return - pymodule = self.pycore.resource_to_pyobject(resource) - new_pairs = [] - for name, alias in import_info.names_and_aliases: - try: - result = pymodule[name].get_object() - if isinstance(result, pyobjects.PyModule) and \ - result.get_resource() == self.resource: - imported = name - if alias is not None: - imported = alias - self.to_be_fixed.add(imported) - else: - new_pairs.append((name, alias)) - except exceptions.AttributeNotFoundError: - new_pairs.append((name, alias)) - if not import_info._are_name_and_alias_lists_equal( - new_pairs, import_info.names_and_aliases): - import_stmt.import_info = importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - - def _importing_names_from_self(self, import_info, import_stmt): - if not import_info.is_star_import(): - for name, alias in import_info.names_and_aliases: - if alias is not None: - self.to_be_renamed.add((alias, name)) - import_stmt.empty_import() - - -class SortingVisitor(ImportInfoVisitor): - - def __init__(self, pycore, current_folder): - self.pycore = pycore - self.folder = current_folder - self.standard = set() - self.third_party = set() - self.in_project = set() - self.future = set() - self.context = importinfo.ImportContext(pycore, current_folder) - - def visitNormalImport(self, import_stmt, import_info): - if import_info.names_and_aliases: - name, alias = import_info.names_and_aliases[0] - resource = self.pycore.find_module( - name, folder=self.folder) - self._check_imported_resource(import_stmt, resource, name) - - def visitFromImport(self, import_stmt, import_info): - resource = import_info.get_imported_resource(self.context) - self._check_imported_resource(import_stmt, resource, - import_info.module_name) - - def _check_imported_resource(self, import_stmt, resource, imported_name): - info = import_stmt.import_info - if resource is not None and resource.project == self.pycore.project: - self.in_project.add(import_stmt) - elif _is_future(info): - self.future.add(import_stmt) - elif imported_name.split('.')[0] in stdmods.standard_modules(): - self.standard.add(import_stmt) - else: - self.third_party.add(import_stmt) - - -class LongImportVisitor(ImportInfoVisitor): - - def __init__(self, current_folder, pycore, maxdots, maxlength): - self.maxdots = maxdots - self.maxlength = maxlength - self.to_be_renamed = set() - self.current_folder = current_folder - self.pycore = pycore - self.new_imports = [] - - def visitNormalImport(self, import_stmt, import_info): - new_pairs = [] - for name, alias in import_info.names_and_aliases: - if alias is None and self._is_long(name): - self.to_be_renamed.add(name) - last_dot = name.rindex('.') - from_ = name[:last_dot] - imported = name[last_dot + 1:] - self.new_imports.append( - importinfo.FromImport(from_, 0, ((imported, None), ))) - - def _is_long(self, name): - return name.count('.') > self.maxdots or \ - ('.' in name and len(name) > self.maxlength) - - -class RemovePyNameVisitor(ImportInfoVisitor): - - def __init__(self, pycore, pymodule, pyname, folder): - self.pymodule = pymodule - self.pyname = pyname - self.context = importinfo.ImportContext(pycore, folder) - - def visitFromImport(self, import_stmt, import_info): - new_pairs = [] - if not import_info.is_star_import(): - for name, alias in import_info.names_and_aliases: - try: - pyname = self.pymodule[alias or name] - if occurrences.same_pyname(self.pyname, pyname): - continue - except exceptions.AttributeNotFoundError: - pass - new_pairs.append((name, alias)) - return importinfo.FromImport( - import_info.module_name, import_info.level, new_pairs) - - def dispatch(self, import_): - result = ImportInfoVisitor.dispatch(self, import_) - if result is not None: - import_.import_info = result - - -def _is_future(info): - return isinstance(info, importinfo.FromImport) and \ - info.module_name == '__future__' diff --git a/pymode/libs3/rope/refactor/importutils/importinfo.py b/pymode/libs3/rope/refactor/importutils/importinfo.py deleted file mode 100644 index cbf49d48..00000000 --- a/pymode/libs3/rope/refactor/importutils/importinfo.py +++ /dev/null @@ -1,201 +0,0 @@ -class ImportStatement(object): - """Represent an import in a module - - `readonly` attribute controls whether this import can be changed - by import actions or not. - - """ - - def __init__(self, import_info, start_line, end_line, - main_statement=None, blank_lines=0): - self.start_line = start_line - self.end_line = end_line - self.readonly = False - self.main_statement = main_statement - self._import_info = None - self.import_info = import_info - self._is_changed = False - self.new_start = None - self.blank_lines = blank_lines - - def _get_import_info(self): - return self._import_info - - def _set_import_info(self, new_import): - if not self.readonly and \ - new_import is not None and not new_import == self._import_info: - self._is_changed = True - self._import_info = new_import - - import_info = property(_get_import_info, _set_import_info) - - def get_import_statement(self): - if self._is_changed or self.main_statement is None: - return self.import_info.get_import_statement() - else: - return self.main_statement - - def empty_import(self): - self.import_info = ImportInfo.get_empty_import() - - def move(self, lineno, blank_lines=0): - self.new_start = lineno - self.blank_lines = blank_lines - - def get_old_location(self): - return self.start_line, self.end_line - - def get_new_start(self): - return self.new_start - - def is_changed(self): - return self._is_changed or (self.new_start is not None or - self.new_start != self.start_line) - - def accept(self, visitor): - return visitor.dispatch(self) - - -class ImportInfo(object): - - def get_imported_primaries(self, context): - pass - - def get_imported_names(self, context): - return [primary.split('.')[0] - for primary in self.get_imported_primaries(context)] - - def get_import_statement(self): - pass - - def is_empty(self): - pass - - def __hash__(self): - return hash(self.get_import_statement()) - - def _are_name_and_alias_lists_equal(self, list1, list2): - if len(list1) != len(list2): - return False - for pair1, pair2 in list(zip(list1, list2)): - if pair1 != pair2: - return False - return True - - def __eq__(self, obj): - return isinstance(obj, self.__class__) and \ - self.get_import_statement() == obj.get_import_statement() - - def __ne__(self, obj): - return not self.__eq__(obj) - - @staticmethod - def get_empty_import(): - return EmptyImport() - - -class NormalImport(ImportInfo): - - def __init__(self, names_and_aliases): - self.names_and_aliases = names_and_aliases - - def get_imported_primaries(self, context): - result = [] - for name, alias in self.names_and_aliases: - if alias: - result.append(alias) - else: - result.append(name) - return result - - def get_import_statement(self): - result = 'import ' - for name, alias in self.names_and_aliases: - result += name - if alias: - result += ' as ' + alias - result += ', ' - return result[:-2] - - def is_empty(self): - return len(self.names_and_aliases) == 0 - - -class FromImport(ImportInfo): - - def __init__(self, module_name, level, names_and_aliases): - self.module_name = module_name - self.level = level - self.names_and_aliases = names_and_aliases - - def get_imported_primaries(self, context): - if self.names_and_aliases[0][0] == '*': - module = self.get_imported_module(context) - return [name for name in module - if not name.startswith('_')] - result = [] - for name, alias in self.names_and_aliases: - if alias: - result.append(alias) - else: - result.append(name) - return result - - def get_imported_resource(self, context): - """Get the imported resource - - Returns `None` if module was not found. - """ - if self.level == 0: - return context.pycore.find_module( - self.module_name, folder=context.folder) - else: - return context.pycore.find_relative_module( - self.module_name, context.folder, self.level) - - def get_imported_module(self, context): - """Get the imported `PyModule` - - Raises `rope.base.exceptions.ModuleNotFoundError` if module - could not be found. - """ - if self.level == 0: - return context.pycore.get_module( - self.module_name, context.folder) - else: - return context.pycore.get_relative_module( - self.module_name, context.folder, self.level) - - def get_import_statement(self): - result = 'from ' + '.' * self.level + self.module_name + ' import ' - for name, alias in self.names_and_aliases: - result += name - if alias: - result += ' as ' + alias - result += ', ' - return result[:-2] - - def is_empty(self): - return len(self.names_and_aliases) == 0 - - def is_star_import(self): - return len(self.names_and_aliases) > 0 and \ - self.names_and_aliases[0][0] == '*' - - -class EmptyImport(ImportInfo): - - names_and_aliases = [] - - def is_empty(self): - return True - - def get_imported_primaries(self, context): - return [] - - -class ImportContext(object): - - def __init__(self, pycore, folder): - self.pycore = pycore - self.folder = folder diff --git a/pymode/libs3/rope/refactor/importutils/module_imports.py b/pymode/libs3/rope/refactor/importutils/module_imports.py deleted file mode 100644 index cf9004f8..00000000 --- a/pymode/libs3/rope/refactor/importutils/module_imports.py +++ /dev/null @@ -1,451 +0,0 @@ -import functools -import rope.base.pynames -from rope.base import ast, utils -from rope.refactor.importutils import importinfo -from rope.refactor.importutils import actions - - -class ModuleImports(object): - - def __init__(self, pycore, pymodule, import_filter=None): - self.pycore = pycore - self.pymodule = pymodule - self.separating_lines = 0 - self.filter = import_filter - - @property - @utils.saveit - def imports(self): - finder = _GlobalImportFinder(self.pymodule, self.pycore) - result = finder.find_import_statements() - self.separating_lines = finder.get_separating_line_count() - if self.filter is not None: - for import_stmt in result: - if not self.filter(import_stmt): - import_stmt.readonly = True - return result - - def _get_unbound_names(self, defined_pyobject): - visitor = _GlobalUnboundNameFinder(self.pymodule, defined_pyobject) - ast.walk(self.pymodule.get_ast(), visitor) - return visitor.unbound - - def remove_unused_imports(self): - can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule)) - visitor = actions.RemovingVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - import_statement.accept(visitor) - - def get_used_imports(self, defined_pyobject): - result = [] - can_select = _OneTimeSelector(self._get_unbound_names(defined_pyobject)) - visitor = actions.FilteringVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - new_import = import_statement.accept(visitor) - if new_import is not None and not new_import.is_empty(): - result.append(new_import) - return result - - def get_changed_source(self): - imports = self.imports - after_removing = self._remove_imports(imports) - imports = [stmt for stmt in imports - if not stmt.import_info.is_empty()] - - first_non_blank = self._first_non_blank_line(after_removing, 0) - first_import = self._first_import_line() - 1 - result = [] - # Writing module docs - result.extend(after_removing[first_non_blank:first_import]) - # Writing imports - sorted_imports = sorted(imports, key = functools.cmp_to_key(self._compare_import_locations)) - for stmt in sorted_imports: - start = self._get_import_location(stmt) - if stmt != sorted_imports[0]: - result.append('\n' * stmt.blank_lines) - result.append(stmt.get_import_statement() + '\n') - if sorted_imports and first_non_blank < len(after_removing): - result.append('\n' * self.separating_lines) - - # Writing the body - first_after_imports = self._first_non_blank_line(after_removing, - first_import) - result.extend(after_removing[first_after_imports:]) - return ''.join(result) - - def _get_import_location(self, stmt): - start = stmt.get_new_start() - if start is None: - start = stmt.get_old_location()[0] - return start - - def _compare_import_locations(self, stmt1, stmt2): - def get_location(stmt): - if stmt.get_new_start() is not None: - return stmt.get_new_start() - else: - return stmt.get_old_location()[0] - return get_location(stmt1) - get_location(stmt2) - - def _remove_imports(self, imports): - lines = self.pymodule.source_code.splitlines(True) - after_removing = [] - last_index = 0 - for stmt in imports: - start, end = stmt.get_old_location() - after_removing.extend(lines[last_index:start - 1]) - last_index = end - 1 - for i in range(start, end): - after_removing.append('') - after_removing.extend(lines[last_index:]) - return after_removing - - def _first_non_blank_line(self, lines, lineno): - result = lineno - for line in lines[lineno:]: - if line.strip() == '': - result += 1 - else: - break - return result - - def add_import(self, import_info): - visitor = actions.AddingVisitor(self.pycore, [import_info]) - for import_statement in self.imports: - if import_statement.accept(visitor): - break - else: - lineno = self._get_new_import_lineno() - blanks = self._get_new_import_blanks() - self.imports.append(importinfo.ImportStatement( - import_info, lineno, lineno, - blank_lines=blanks)) - - def _get_new_import_blanks(self): - return 0 - - def _get_new_import_lineno(self): - if self.imports: - return self.imports[-1].end_line - return 1 - - def filter_names(self, can_select): - visitor = actions.RemovingVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - import_statement.accept(visitor) - - def expand_stars(self): - can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule)) - visitor = actions.ExpandStarsVisitor( - self.pycore, self._current_folder(), can_select) - for import_statement in self.imports: - import_statement.accept(visitor) - - def remove_duplicates(self): - added_imports = [] - for import_stmt in self.imports: - visitor = actions.AddingVisitor(self.pycore, - [import_stmt.import_info]) - for added_import in added_imports: - if added_import.accept(visitor): - import_stmt.empty_import() - else: - added_imports.append(import_stmt) - - def get_relative_to_absolute_list(self): - visitor = rope.refactor.importutils.actions.RelativeToAbsoluteVisitor( - self.pycore, self._current_folder()) - for import_stmt in self.imports: - if not import_stmt.readonly: - import_stmt.accept(visitor) - return visitor.to_be_absolute - - def get_self_import_fix_and_rename_list(self): - visitor = rope.refactor.importutils.actions.SelfImportVisitor( - self.pycore, self._current_folder(), self.pymodule.get_resource()) - for import_stmt in self.imports: - if not import_stmt.readonly: - import_stmt.accept(visitor) - return visitor.to_be_fixed, visitor.to_be_renamed - - def _current_folder(self): - return self.pymodule.get_resource().parent - - def sort_imports(self): - # IDEA: Sort from import list - visitor = actions.SortingVisitor(self.pycore, self._current_folder()) - for import_statement in self.imports: - import_statement.accept(visitor) - in_projects = sorted(visitor.in_project, key = self._compare_imports) - third_party = sorted(visitor.third_party, key = self._compare_imports) - standards = sorted(visitor.standard, key = self._compare_imports) - future = sorted(visitor.future, key = self._compare_imports) - blank_lines = 0 - last_index = self._first_import_line() - last_index = self._move_imports(future, last_index, 0) - last_index = self._move_imports(standards, last_index, 1) - last_index = self._move_imports(third_party, last_index, 1) - last_index = self._move_imports(in_projects, last_index, 1) - self.separating_lines = 2 - - def _first_import_line(self): - nodes = self.pymodule.get_ast().body - lineno = 0 - if self.pymodule.get_doc() is not None: - lineno = 1 - if len(nodes) > lineno: - lineno = self.pymodule.logical_lines.logical_line_in( - nodes[lineno].lineno)[0] - else: - lineno = self.pymodule.lines.length() - while lineno > 1: - line = self.pymodule.lines.get_line(lineno - 1) - if line.strip() == '': - lineno -= 1 - else: - break - return lineno - - def _compare_imports(self, stmt): - str = stmt.get_import_statement() - return (str.startswith('from '), str) - - def _move_imports(self, imports, index, blank_lines): - if imports: - imports[0].move(index, blank_lines) - index += 1 - if len(imports) > 1: - for stmt in imports[1:]: - stmt.move(index) - index += 1 - return index - - def handle_long_imports(self, maxdots, maxlength): - visitor = actions.LongImportVisitor( - self._current_folder(), self.pycore, maxdots, maxlength) - for import_statement in self.imports: - if not import_statement.readonly: - import_statement.accept(visitor) - for import_info in visitor.new_imports: - self.add_import(import_info) - return visitor.to_be_renamed - - def remove_pyname(self, pyname): - """Removes pyname when imported in ``from mod import x``""" - visitor = actions.RemovePyNameVisitor(self.pycore, self.pymodule, - pyname, self._current_folder()) - for import_stmt in self.imports: - import_stmt.accept(visitor) - - -class _OneTimeSelector(object): - - def __init__(self, names): - self.names = names - self.selected_names = set() - - def __call__(self, imported_primary): - if self._can_name_be_added(imported_primary): - for name in self._get_dotted_tokens(imported_primary): - self.selected_names.add(name) - return True - return False - - def _get_dotted_tokens(self, imported_primary): - tokens = imported_primary.split('.') - for i in range(len(tokens)): - yield '.'.join(tokens[:i + 1]) - - def _can_name_be_added(self, imported_primary): - for name in self._get_dotted_tokens(imported_primary): - if name in self.names and name not in self.selected_names: - return True - return False - - -class _UnboundNameFinder(object): - - def __init__(self, pyobject): - self.pyobject = pyobject - - def _visit_child_scope(self, node): - pyobject = self.pyobject.get_module().get_scope().\ - get_inner_scope_for_line(node.lineno).pyobject - visitor = _LocalUnboundNameFinder(pyobject, self) - for child in ast.get_child_nodes(node): - ast.walk(child, visitor) - - def _FunctionDef(self, node): - self._visit_child_scope(node) - - def _ClassDef(self, node): - self._visit_child_scope(node) - - def _Name(self, node): - if self._get_root()._is_node_interesting(node) and \ - not self.is_bound(node.id): - self.add_unbound(node.id) - - def _Attribute(self, node): - result = [] - while isinstance(node, ast.Attribute): - result.append(node.attr) - node = node.value - if isinstance(node, ast.Name): - result.append(node.id) - primary = '.'.join(reversed(result)) - if self._get_root()._is_node_interesting(node) and \ - not self.is_bound(primary): - self.add_unbound(primary) - else: - ast.walk(node, self) - - def _get_root(self): - pass - - def is_bound(self, name, propagated=False): - pass - - def add_unbound(self, name): - pass - - -class _GlobalUnboundNameFinder(_UnboundNameFinder): - - def __init__(self, pymodule, wanted_pyobject): - super(_GlobalUnboundNameFinder, self).__init__(pymodule) - self.unbound = set() - self.names = set() - for name, pyname in pymodule._get_structural_attributes().items(): - if not isinstance(pyname, (rope.base.pynames.ImportedName, - rope.base.pynames.ImportedModule)): - self.names.add(name) - wanted_scope = wanted_pyobject.get_scope() - self.start = wanted_scope.get_start() - self.end = wanted_scope.get_end() + 1 - - def _get_root(self): - return self - - def is_bound(self, primary, propagated=False): - name = primary.split('.')[0] - if name in self.names: - return True - return False - - def add_unbound(self, name): - names = name.split('.') - for i in range(len(names)): - self.unbound.add('.'.join(names[:i + 1])) - - def _is_node_interesting(self, node): - return self.start <= node.lineno < self.end - - -class _LocalUnboundNameFinder(_UnboundNameFinder): - - def __init__(self, pyobject, parent): - super(_LocalUnboundNameFinder, self).__init__(pyobject) - self.parent = parent - - def _get_root(self): - return self.parent._get_root() - - def is_bound(self, primary, propagated=False): - name = primary.split('.')[0] - if propagated: - names = self.pyobject.get_scope().get_propagated_names() - else: - names = self.pyobject.get_scope().get_names() - if name in names or self.parent.is_bound(name, propagated=True): - return True - return False - - def add_unbound(self, name): - self.parent.add_unbound(name) - - -class _GlobalImportFinder(object): - - def __init__(self, pymodule, pycore): - self.current_folder = None - if pymodule.get_resource(): - self.current_folder = pymodule.get_resource().parent - self.pymodule = pymodule - self.pycore = pycore - self.imports = [] - self.pymodule = pymodule - self.lines = self.pymodule.lines - - def visit_import(self, node, end_line): - start_line = node.lineno - import_statement = importinfo.ImportStatement( - importinfo.NormalImport(self._get_names(node.names)), - start_line, end_line, self._get_text(start_line, end_line), - blank_lines=self._count_empty_lines_before(start_line)) - self.imports.append(import_statement) - - def _count_empty_lines_before(self, lineno): - result = 0 - for current in range(lineno - 1, 0, -1): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result - - def _count_empty_lines_after(self, lineno): - result = 0 - for current in range(lineno + 1, self.lines.length()): - line = self.lines.get_line(current) - if line.strip() == '': - result += 1 - else: - break - return result - - def get_separating_line_count(self): - if not self.imports: - return 0 - return self._count_empty_lines_after(self.imports[-1].end_line - 1) - - def _get_text(self, start_line, end_line): - result = [] - for index in range(start_line, end_line): - result.append(self.lines.get_line(index)) - return '\n'.join(result) - - def visit_from(self, node, end_line): - level = 0 - if node.level: - level = node.level - import_info = importinfo.FromImport( - node.module or '', # see comment at rope.base.ast.walk - level, self._get_names(node.names)) - start_line = node.lineno - self.imports.append(importinfo.ImportStatement( - import_info, node.lineno, end_line, - self._get_text(start_line, end_line), - blank_lines=self._count_empty_lines_before(start_line))) - - def _get_names(self, alias_names): - result = [] - for alias in alias_names: - result.append((alias.name, alias.asname)) - return result - - def find_import_statements(self): - nodes = self.pymodule.get_ast().body - for index, node in enumerate(nodes): - if isinstance(node, (ast.Import, ast.ImportFrom)): - lines = self.pymodule.logical_lines - end_line = lines.logical_line_in(node.lineno)[1] + 1 - if isinstance(node, ast.Import): - self.visit_import(node, end_line) - if isinstance(node, ast.ImportFrom): - self.visit_from(node, end_line) - return self.imports diff --git a/pymode/libs3/rope/refactor/inline.py b/pymode/libs3/rope/refactor/inline.py deleted file mode 100644 index cfd64a7e..00000000 --- a/pymode/libs3/rope/refactor/inline.py +++ /dev/null @@ -1,615 +0,0 @@ -# Known Bugs when inlining a function/method -# The values passed to function are inlined using _inlined_variable. -# This may cause two problems, illustrated in the examples below -# -# def foo(var1): -# var1 = var1*10 -# return var1 -# -# If a call to foo(20) is inlined, the result of inlined function is 20, -# but it should be 200. -# -# def foo(var1): -# var2 = var1*10 -# return var2 -# -# 2- If a call to foo(10+10) is inlined the result of inlined function is 110 -# but it should be 200. - -import re - -import rope.base.exceptions -import rope.refactor.functionutils -from rope.base import (pynames, pyobjects, codeanalyze, - taskhandle, evaluate, worder, utils) -from rope.base.change import ChangeSet, ChangeContents -from rope.refactor import (occurrences, rename, sourceutils, - importutils, move, change_signature) - -def unique_prefix(): - n = 0 - while True: - yield "__" + str(n) + "__" - n += 1 - -def create_inline(project, resource, offset): - """Create a refactoring object for inlining - - Based on `resource` and `offset` it returns an instance of - `InlineMethod`, `InlineVariable` or `InlineParameter`. - - """ - pycore = project.pycore - pyname = _get_pyname(pycore, resource, offset) - message = 'Inline refactoring should be performed on ' \ - 'a method, local variable or parameter.' - if pyname is None: - raise rope.base.exceptions.RefactoringError(message) - if isinstance(pyname, pynames.ImportedName): - pyname = pyname._get_imported_pyname() - if isinstance(pyname, pynames.AssignedName): - return InlineVariable(project, resource, offset) - if isinstance(pyname, pynames.ParameterName): - return InlineParameter(project, resource, offset) - if isinstance(pyname.get_object(), pyobjects.PyFunction): - return InlineMethod(project, resource, offset) - else: - raise rope.base.exceptions.RefactoringError(message) - - -class _Inliner(object): - - def __init__(self, project, resource, offset): - self.project = project - self.pycore = project.pycore - self.pyname = _get_pyname(self.pycore, resource, offset) - range_finder = worder.Worder(resource.read()) - self.region = range_finder.get_primary_range(offset) - self.name = range_finder.get_word_at(offset) - self.offset = offset - self.original = resource - - def get_changes(self, *args, **kwds): - pass - - def get_kind(self): - """Return either 'variable', 'method' or 'parameter'""" - - -class InlineMethod(_Inliner): - - def __init__(self, *args, **kwds): - super(InlineMethod, self).__init__(*args, **kwds) - self.pyfunction = self.pyname.get_object() - self.pymodule = self.pyfunction.get_module() - self.resource = self.pyfunction.get_module().get_resource() - self.occurrence_finder = occurrences.create_finder( - self.pycore, self.name, self.pyname) - self.normal_generator = _DefinitionGenerator(self.project, - self.pyfunction) - self._init_imports() - - def _init_imports(self): - body = sourceutils.get_body(self.pyfunction) - body, imports = move.moving_code_with_imports( - self.pycore, self.resource, body) - self.imports = imports - self.others_generator = _DefinitionGenerator( - self.project, self.pyfunction, body=body) - - def _get_scope_range(self): - scope = self.pyfunction.get_scope() - lines = self.pymodule.lines - logicals = self.pymodule.logical_lines - start_line = scope.get_start() - if self.pyfunction.decorators: - decorators = self.pyfunction.decorators - if hasattr(decorators[0], 'lineno'): - start_line = decorators[0].lineno - start_offset = lines.get_line_start(start_line) - end_offset = min(lines.get_line_end(scope.end) + 1, - len(self.pymodule.source_code)) - return (start_offset, end_offset) - - def get_changes(self, remove=True, only_current=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes this refactoring makes - - If `remove` is `False` the definition will not be removed. If - `only_current` is `True`, the the current occurrence will be - inlined, only. - """ - changes = ChangeSet('Inline method <%s>' % self.name) - if resources is None: - resources = self.pycore.get_python_files() - if only_current: - resources = [self.original] - if remove: - resources.append(self.resource) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - for file in resources: - job_set.started_job(file.path) - if file == self.resource: - changes.add_change(self._defining_file_changes( - changes, remove=remove, only_current=only_current)) - else: - aim = None - if only_current and self.original == file: - aim = self.offset - handle = _InlineFunctionCallsForModuleHandle( - self.pycore, file, self.others_generator, aim) - result = move.ModuleSkipRenamer( - self.occurrence_finder, file, handle).get_changed_module() - if result is not None: - result = _add_imports(self.pycore, result, - file, self.imports) - if remove: - result = _remove_from(self.pycore, self.pyname, - result, file) - changes.add_change(ChangeContents(file, result)) - job_set.finished_job() - return changes - - def _get_removed_range(self): - scope = self.pyfunction.get_scope() - lines = self.pymodule.lines - logical = self.pymodule.logical_lines - start_line = scope.get_start() - start, end = self._get_scope_range() - end_line = scope.get_end() - for i in range(end_line + 1, lines.length()): - if lines.get_line(i).strip() == '': - end_line = i - else: - break - end = min(lines.get_line_end(end_line) + 1, - len(self.pymodule.source_code)) - return (start, end) - - def _defining_file_changes(self, changes, remove, only_current): - start_offset, end_offset = self._get_removed_range() - aim = None - if only_current: - if self.resource == self.original: - aim = self.offset - else: - # we don't want to change any of them - aim = len(self.resource.read()) + 100 - handle = _InlineFunctionCallsForModuleHandle( - self.pycore, self.resource, - self.normal_generator, aim_offset=aim) - replacement = None - if remove: - replacement = self._get_method_replacement() - result = move.ModuleSkipRenamer( - self.occurrence_finder, self.resource, handle, start_offset, - end_offset, replacement).get_changed_module() - return ChangeContents(self.resource, result) - - def _get_method_replacement(self): - if self._is_the_last_method_of_a_class(): - indents = sourceutils.get_indents( - self.pymodule.lines, self.pyfunction.get_scope().get_start()) - return ' ' * indents + 'pass\n' - return '' - - def _is_the_last_method_of_a_class(self): - pyclass = self.pyfunction.parent - if not isinstance(pyclass, pyobjects.PyClass): - return False - class_start, class_end = sourceutils.get_body_region(pyclass) - source = self.pymodule.source_code - lines = self.pymodule.lines - func_start, func_end = self._get_scope_range() - if source[class_start:func_start].strip() == '' and \ - source[func_end:class_end].strip() == '': - return True - return False - - def get_kind(self): - return 'method' - - -class InlineVariable(_Inliner): - - def __init__(self, *args, **kwds): - super(InlineVariable, self).__init__(*args, **kwds) - self.pymodule = self.pyname.get_definition_location()[0] - self.resource = self.pymodule.get_resource() - self._check_exceptional_conditions() - self._init_imports() - - def _check_exceptional_conditions(self): - if len(self.pyname.assignments) != 1: - raise rope.base.exceptions.RefactoringError( - 'Local variable should be assigned once for inlining.') - - def get_changes(self, remove=True, only_current=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - if resources is None: - if rename._is_local(self.pyname): - resources = [self.resource] - else: - resources = self.pycore.get_python_files() - if only_current: - resources = [self.original] - if remove and self.original != self.resource: - resources.append(self.resource) - changes = ChangeSet('Inline variable <%s>' % self.name) - jobset = task_handle.create_jobset('Calculating changes', - len(resources)) - - for resource in resources: - jobset.started_job(resource.path) - if resource == self.resource: - source = self._change_main_module(remove, only_current) - changes.add_change(ChangeContents(self.resource, source)) - else: - result = self._change_module(resource, remove, only_current) - if result is not None: - result = _add_imports(self.pycore, result, - resource, self.imports) - changes.add_change(ChangeContents(resource, result)) - jobset.finished_job() - return changes - - def _change_main_module(self, remove, only_current): - region = None - if only_current and self.original == self.resource: - region = self.region - return _inline_variable(self.pycore, self.pymodule, self.pyname, - self.name, remove=remove, region=region) - - def _init_imports(self): - vardef = _getvardef(self.pymodule, self.pyname) - self.imported, self.imports = move.moving_code_with_imports( - self.pycore, self.resource, vardef) - - def _change_module(self, resource, remove, only_current): - filters = [occurrences.NoImportsFilter(), - occurrences.PyNameFilter(self.pyname)] - if only_current and resource == self.original: - def check_aim(occurrence): - start, end = occurrence.get_primary_range() - if self.offset < start or end < self.offset: - return False - filters.insert(0, check_aim) - finder = occurrences.Finder(self.pycore, self.name, filters=filters) - changed = rename.rename_in_module( - finder, self.imported, resource=resource, replace_primary=True) - if changed and remove: - changed = _remove_from(self.pycore, self.pyname, changed, resource) - return changed - - def get_kind(self): - return 'variable' - - -class InlineParameter(_Inliner): - - def __init__(self, *args, **kwds): - super(InlineParameter, self).__init__(*args, **kwds) - resource, offset = self._function_location() - index = self.pyname.index - self.changers = [change_signature.ArgumentDefaultInliner(index)] - self.signature = change_signature.ChangeSignature(self.project, - resource, offset) - - def _function_location(self): - pymodule, lineno = self.pyname.get_definition_location() - resource = pymodule.get_resource() - start = pymodule.lines.get_line_start(lineno) - word_finder = worder.Worder(pymodule.source_code) - offset = word_finder.find_function_offset(start) - return resource, offset - - def get_changes(self, **kwds): - """Get the changes needed by this refactoring - - See `rope.refactor.change_signature.ChangeSignature.get_changes()` - for arguments. - """ - return self.signature.get_changes(self.changers, **kwds) - - def get_kind(self): - return 'parameter' - - -def _join_lines(lines): - definition_lines = [] - for unchanged_line in lines: - line = unchanged_line.strip() - if line.endswith('\\'): - line = line[:-1].strip() - definition_lines.append(line) - joined = ' '.join(definition_lines) - return joined - - -class _DefinitionGenerator(object): - unique_prefix = unique_prefix() - def __init__(self, project, pyfunction, body=None): - self.pycore = project.pycore - self.pyfunction = pyfunction - self.pymodule = pyfunction.get_module() - self.resource = self.pymodule.get_resource() - self.definition_info = self._get_definition_info() - self.definition_params = self._get_definition_params() - self._calculated_definitions = {} - if body is not None: - self.body = body - else: - self.body = sourceutils.get_body(self.pyfunction) - - def _get_definition_info(self): - return rope.refactor.functionutils.DefinitionInfo.read(self.pyfunction) - - def _get_definition_params(self): - definition_info = self.definition_info - paramdict = dict([pair for pair in definition_info.args_with_defaults]) - if definition_info.args_arg is not None or \ - definition_info.keywords_arg is not None: - raise rope.base.exceptions.RefactoringError( - 'Cannot inline functions with list and keyword arguements.') - if self.pyfunction.get_kind() == 'classmethod': - paramdict[definition_info.args_with_defaults[0][0]] = \ - self.pyfunction.parent.get_name() - return paramdict - - def get_function_name(self): - return self.pyfunction.get_name() - - def get_definition(self, primary, pyname, call, host_vars=[],returns=False): - # caching already calculated definitions - return self._calculate_definition(primary, pyname, call, - host_vars, returns) - - def _calculate_header(self, primary, pyname, call): - # A header is created which initializes parameters - # to the values passed to the function. - call_info = rope.refactor.functionutils.CallInfo.read( - primary, pyname, self.definition_info, call) - paramdict = self.definition_params - mapping = rope.refactor.functionutils.ArgumentMapping( - self.definition_info, call_info) - for param_name, value in mapping.param_dict.items(): - paramdict[param_name] = value - header = '' - to_be_inlined = [] - mod = self.pycore.get_string_module(self.body) - all_names = mod.get_scope().get_names() - assigned_names = [name for name in all_names if - isinstance(all_names[name], rope.base.pynamesdef.AssignedName)] - for name, value in paramdict.items(): - if name != value and value is not None: - header += name + ' = ' + value.replace('\n', ' ') + '\n' - to_be_inlined.append(name) - return header, to_be_inlined - - def _calculate_definition(self, primary, pyname, call, host_vars, returns): - - header, to_be_inlined = self._calculate_header(primary, pyname, call) - - source = header + self.body - mod = self.pycore.get_string_module(source) - name_dict = mod.get_scope().get_names() - all_names = [x for x in name_dict if - not isinstance(name_dict[x], rope.base.builtins.BuiltinName)] - - # If there is a name conflict, all variable names - # inside the inlined function are renamed - if len(set(all_names).intersection(set(host_vars))) > 0: - - prefix = _DefinitionGenerator.unique_prefix.next() - guest = self.pycore.get_string_module(source, self.resource) - - to_be_inlined = [prefix+item for item in to_be_inlined] - for item in all_names: - pyname = guest[item] - occurrence_finder = occurrences.create_finder( - self.pycore, item, pyname) - source = rename.rename_in_module(occurrence_finder, - prefix+item, pymodule=guest) - guest = self.pycore.get_string_module(source, self.resource) - - #parameters not reassigned inside the functions are now inlined. - for name in to_be_inlined: - pymodule = self.pycore.get_string_module(source, self.resource) - pyname = pymodule[name] - source = _inline_variable(self.pycore, pymodule, pyname, name) - - return self._replace_returns_with(source, returns) - - def _replace_returns_with(self, source, returns): - result = [] - returned = None - last_changed = 0 - for match in _DefinitionGenerator._get_return_pattern().finditer(source): - for key, value in match.groupdict().items(): - if value and key == 'return': - result.append(source[last_changed:match.start('return')]) - if returns: - self._check_nothing_after_return(source, - match.end('return')) - returned = _join_lines( - source[match.end('return'): len(source)].splitlines()) - last_changed = len(source) - else: - current = match.end('return') - while current < len(source) and source[current] in ' \t': - current += 1 - last_changed = current - if current == len(source) or source[current] == '\n': - result.append('pass') - result.append(source[last_changed:]) - return ''.join(result), returned - - def _check_nothing_after_return(self, source, offset): - lines = codeanalyze.SourceLinesAdapter(source) - lineno = lines.get_line_number(offset) - logical_lines = codeanalyze.LogicalLineFinder(lines) - lineno = logical_lines.logical_line_in(lineno)[1] - if source[lines.get_line_end(lineno):len(source)].strip() != '': - raise rope.base.exceptions.RefactoringError( - 'Cannot inline functions with statements after return statement.') - - @classmethod - def _get_return_pattern(cls): - if not hasattr(cls, '_return_pattern'): - def named_pattern(name, list_): - return "(?P<%s>" % name + "|".join(list_) + ")" - comment_pattern = named_pattern('comment', [r'#[^\n]*']) - string_pattern = named_pattern('string', - [codeanalyze.get_string_pattern()]) - return_pattern = r'\b(?Preturn)\b' - cls._return_pattern = re.compile(comment_pattern + "|" + - string_pattern + "|" + - return_pattern) - return cls._return_pattern - - -class _InlineFunctionCallsForModuleHandle(object): - - def __init__(self, pycore, resource, - definition_generator, aim_offset=None): - """Inlines occurrences - - If `aim` is not `None` only the occurrences that intersect - `aim` offset will be inlined. - - """ - self.pycore = pycore - self.generator = definition_generator - self.resource = resource - self.aim = aim_offset - - def occurred_inside_skip(self, change_collector, occurrence): - if not occurrence.is_defined(): - raise rope.base.exceptions.RefactoringError( - 'Cannot inline functions that reference themselves') - - def occurred_outside_skip(self, change_collector, occurrence): - start, end = occurrence.get_primary_range() - # we remove out of date imports later - if occurrence.is_in_import_statement(): - return - # the function is referenced outside an import statement - if not occurrence.is_called(): - raise rope.base.exceptions.RefactoringError( - 'Reference to inlining function other than function call' - ' in ' % (self.resource.path, start)) - if self.aim is not None and (self.aim < start or self.aim > end): - return - end_parens = self._find_end_parens(self.source, end - 1) - lineno = self.lines.get_line_number(start) - start_line, end_line = self.pymodule.logical_lines.\ - logical_line_in(lineno) - line_start = self.lines.get_line_start(start_line) - line_end = self.lines.get_line_end(end_line) - - - returns = self.source[line_start:start].strip() != '' or \ - self.source[end_parens:line_end].strip() != '' - indents = sourceutils.get_indents(self.lines, start_line) - primary, pyname = occurrence.get_primary_and_pyname() - - host = self.pycore.resource_to_pyobject(self.resource) - scope = host.scope.get_inner_scope_for_line(lineno) - definition, returned = self.generator.get_definition( - primary, pyname, self.source[start:end_parens], scope.get_names(), returns=returns) - - end = min(line_end + 1, len(self.source)) - change_collector.add_change(line_start, end, - sourceutils.fix_indentation(definition, indents)) - if returns: - name = returned - if name is None: - name = 'None' - change_collector.add_change( - line_end, end, self.source[line_start:start] + name + - self.source[end_parens:end]) - - def _find_end_parens(self, source, offset): - finder = worder.Worder(source) - return finder.get_word_parens_range(offset)[1] - - @property - @utils.saveit - def pymodule(self): - return self.pycore.resource_to_pyobject(self.resource) - - @property - @utils.saveit - def source(self): - if self.resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def lines(self): - return self.pymodule.lines - - -def _inline_variable(pycore, pymodule, pyname, name, - remove=True, region=None): - definition = _getvardef(pymodule, pyname) - start, end = _assigned_lineno(pymodule, pyname) - - occurrence_finder = occurrences.create_finder(pycore, name, pyname) - changed_source = rename.rename_in_module( - occurrence_finder, definition, pymodule=pymodule, - replace_primary=True, writes=False, region=region) - if changed_source is None: - changed_source = pymodule.source_code - if remove: - lines = codeanalyze.SourceLinesAdapter(changed_source) - source = changed_source[:lines.get_line_start(start)] + \ - changed_source[lines.get_line_end(end) + 1:] - else: - source = changed_source - return source - -def _getvardef(pymodule, pyname): - assignment = pyname.assignments[0] - lines = pymodule.lines - start, end = _assigned_lineno(pymodule, pyname) - definition_with_assignment = _join_lines( - [lines.get_line(n) for n in range(start, end + 1)]) - if assignment.levels: - raise rope.base.exceptions.RefactoringError( - 'Cannot inline tuple assignments.') - definition = definition_with_assignment[definition_with_assignment.\ - index('=') + 1:].strip() - return definition - -def _assigned_lineno(pymodule, pyname): - definition_line = pyname.assignments[0].ast_node.lineno - return pymodule.logical_lines.logical_line_in(definition_line) - -def _add_imports(pycore, source, resource, imports): - if not imports: - return source - pymodule = pycore.get_string_module(source, resource) - module_import = importutils.get_module_imports(pycore, pymodule) - for import_info in imports: - module_import.add_import(import_info) - source = module_import.get_changed_source() - pymodule = pycore.get_string_module(source, resource) - import_tools = importutils.ImportTools(pycore) - return import_tools.organize_imports(pymodule, unused=False, sort=False) - -def _get_pyname(pycore, resource, offset): - pymodule = pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(pymodule, offset) - if isinstance(pyname, pynames.ImportedName): - pyname = pyname._get_imported_pyname() - return pyname - -def _remove_from(pycore, pyname, source, resource): - pymodule = pycore.get_string_module(source, resource) - module_import = importutils.get_module_imports(pycore, pymodule) - module_import.remove_pyname(pyname) - return module_import.get_changed_source() diff --git a/pymode/libs3/rope/refactor/introduce_factory.py b/pymode/libs3/rope/refactor/introduce_factory.py deleted file mode 100644 index 5a885587..00000000 --- a/pymode/libs3/rope/refactor/introduce_factory.py +++ /dev/null @@ -1,133 +0,0 @@ -import rope.base.exceptions -import rope.base.pyobjects -from rope.base import taskhandle, evaluate -from rope.base.change import (ChangeSet, ChangeContents) -from rope.refactor import rename, occurrences, sourceutils, importutils - - -class IntroduceFactory(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.offset = offset - - this_pymodule = self.pycore.resource_to_pyobject(resource) - self.old_pyname = evaluate.eval_location(this_pymodule, offset) - if self.old_pyname is None or not isinstance(self.old_pyname.get_object(), - rope.base.pyobjects.PyClass): - raise rope.base.exceptions.RefactoringError( - 'Introduce factory should be performed on a class.') - self.old_name = self.old_pyname.get_object().get_name() - self.pymodule = self.old_pyname.get_object().get_module() - self.resource = self.pymodule.get_resource() - - def get_changes(self, factory_name, global_factory=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes this refactoring makes - - `factory_name` indicates the name of the factory function to - be added. If `global_factory` is `True` the factory will be - global otherwise a static method is added to the class. - - `resources` can be a list of `rope.base.resource.File`\s that - this refactoring should be applied on; if `None` all python - files in the project are searched. - - """ - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Introduce factory method <%s>' % factory_name) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - self._change_module(resources, changes, factory_name, - global_factory, job_set) - return changes - - def get_name(self): - """Return the name of the class""" - return self.old_name - - def _change_module(self, resources, changes, - factory_name, global_, job_set): - if global_: - replacement = '__rope_factory_%s_' % factory_name - else: - replacement = self._new_function_name(factory_name, global_) - - for file_ in resources: - job_set.started_job(file_.path) - if file_ == self.resource: - self._change_resource(changes, factory_name, global_) - job_set.finished_job() - continue - changed_code = self._rename_occurrences(file_, replacement, - global_) - if changed_code is not None: - if global_: - new_pymodule = self.pycore.get_string_module(changed_code, - self.resource) - modname = self.pycore.modname(self.resource) - changed_code, imported = importutils.add_import( - self.pycore, new_pymodule, modname, factory_name) - changed_code = changed_code.replace(replacement, imported) - changes.add_change(ChangeContents(file_, changed_code)) - job_set.finished_job() - - def _change_resource(self, changes, factory_name, global_): - class_scope = self.old_pyname.get_object().get_scope() - source_code = self._rename_occurrences( - self.resource, self._new_function_name(factory_name, - global_), global_) - if source_code is None: - source_code = self.pymodule.source_code - else: - self.pymodule = self.pycore.get_string_module( - source_code, resource=self.resource) - lines = self.pymodule.lines - start = self._get_insertion_offset(class_scope, lines) - result = source_code[:start] - result += self._get_factory_method(lines, class_scope, - factory_name, global_) - result += source_code[start:] - changes.add_change(ChangeContents(self.resource, result)) - - def _get_insertion_offset(self, class_scope, lines): - start_line = class_scope.get_end() - if class_scope.get_scopes(): - start_line = class_scope.get_scopes()[-1].get_end() - start = lines.get_line_end(start_line) + 1 - return start - - def _get_factory_method(self, lines, class_scope, - factory_name, global_): - unit_indents = ' ' * sourceutils.get_indent(self.pycore) - if global_: - if self._get_scope_indents(lines, class_scope) > 0: - raise rope.base.exceptions.RefactoringError( - 'Cannot make global factory method for nested classes.') - return ('\ndef %s(*args, **kwds):\n%sreturn %s(*args, **kwds)\n' % - (factory_name, unit_indents, self.old_name)) - unindented_factory = \ - ('@staticmethod\ndef %s(*args, **kwds):\n' % factory_name + - '%sreturn %s(*args, **kwds)\n' % (unit_indents, self.old_name)) - indents = self._get_scope_indents(lines, class_scope) + \ - sourceutils.get_indent(self.pycore) - return '\n' + sourceutils.indent_lines(unindented_factory, indents) - - def _get_scope_indents(self, lines, scope): - return sourceutils.get_indents(lines, scope.get_start()) - - def _new_function_name(self, factory_name, global_): - if global_: - return factory_name - else: - return self.old_name + '.' + factory_name - - def _rename_occurrences(self, file_, changed_name, global_factory): - finder = occurrences.create_finder(self.pycore, self.old_name, - self.old_pyname, only_calls=True) - result = rename.rename_in_module(finder, changed_name, resource=file_, - replace_primary=global_factory) - return result - -IntroduceFactoryRefactoring = IntroduceFactory diff --git a/pymode/libs3/rope/refactor/introduce_parameter.py b/pymode/libs3/rope/refactor/introduce_parameter.py deleted file mode 100644 index 312c61aa..00000000 --- a/pymode/libs3/rope/refactor/introduce_parameter.py +++ /dev/null @@ -1,95 +0,0 @@ -import rope.base.change -from rope.base import exceptions, evaluate, worder, codeanalyze -from rope.refactor import functionutils, sourceutils, occurrences - - -class IntroduceParameter(object): - """Introduce parameter refactoring - - This refactoring adds a new parameter to a function and replaces - references to an expression in it with the new parameter. - - The parameter finding part is different from finding similar - pieces in extract refactorings. In this refactoring parameters - are found based on the object they reference to. For instance - in:: - - class A(object): - var = None - - class B(object): - a = A() - - b = B() - a = b.a - - def f(a): - x = b.a.var + a.var - - using this refactoring on ``a.var`` with ``p`` as the new - parameter name, will result in:: - - def f(p=a.var): - x = p + p - - """ - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.resource = resource - self.offset = offset - self.pymodule = self.pycore.resource_to_pyobject(self.resource) - scope = self.pymodule.get_scope().get_inner_scope_for_offset(offset) - if scope.get_kind() != 'Function': - raise exceptions.RefactoringError( - 'Introduce parameter should be performed inside functions') - self.pyfunction = scope.pyobject - self.name, self.pyname = self._get_name_and_pyname() - if self.pyname is None: - raise exceptions.RefactoringError( - 'Cannot find the definition of <%s>' % self.name) - - def _get_primary(self): - word_finder = worder.Worder(self.resource.read()) - return word_finder.get_primary_at(self.offset) - - def _get_name_and_pyname(self): - return (worder.get_name_at(self.resource, self.offset), - evaluate.eval_location(self.pymodule, self.offset)) - - def get_changes(self, new_parameter): - definition_info = functionutils.DefinitionInfo.read(self.pyfunction) - definition_info.args_with_defaults.append((new_parameter, - self._get_primary())) - collector = codeanalyze.ChangeCollector(self.resource.read()) - header_start, header_end = self._get_header_offsets() - body_start, body_end = sourceutils.get_body_region(self.pyfunction) - collector.add_change(header_start, header_end, - definition_info.to_string()) - self._change_function_occurances(collector, body_start, - body_end, new_parameter) - changes = rope.base.change.ChangeSet('Introduce parameter <%s>' % - new_parameter) - change = rope.base.change.ChangeContents(self.resource, - collector.get_changed()) - changes.add_change(change) - return changes - - def _get_header_offsets(self): - lines = self.pymodule.lines - start_line = self.pyfunction.get_scope().get_start() - end_line = self.pymodule.logical_lines.\ - logical_line_in(start_line)[1] - start = lines.get_line_start(start_line) - end = lines.get_line_end(end_line) - start = self.pymodule.source_code.find('def', start) + 4 - end = self.pymodule.source_code.rfind(':', start, end) - return start, end - - def _change_function_occurances(self, collector, function_start, - function_end, new_name): - finder = occurrences.create_finder(self.pycore, self.name, self.pyname) - for occurrence in finder.find_occurrences(resource=self.resource): - start, end = occurrence.get_primary_range() - if function_start <= start < function_end: - collector.add_change(start, end, new_name) diff --git a/pymode/libs3/rope/refactor/localtofield.py b/pymode/libs3/rope/refactor/localtofield.py deleted file mode 100644 index 391fcac9..00000000 --- a/pymode/libs3/rope/refactor/localtofield.py +++ /dev/null @@ -1,50 +0,0 @@ -from rope.base import pynames, evaluate, exceptions, worder -from rope.refactor.rename import Rename - - -class LocalToField(object): - - def __init__(self, project, resource, offset): - self.project = project - self.pycore = project.pycore - self.resource = resource - self.offset = offset - - def get_changes(self): - name = worder.get_name_at(self.resource, self.offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) - pyname = evaluate.eval_location(this_pymodule, self.offset) - if not self._is_a_method_local(pyname): - raise exceptions.RefactoringError( - 'Convert local variable to field should be performed on \n' - 'a local variable of a method.') - - pymodule, lineno = pyname.get_definition_location() - function_scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - # Not checking redefinition - #self._check_redefinition(name, function_scope) - - new_name = self._get_field_name(function_scope.pyobject, name) - changes = Rename(self.project, self.resource, self.offset).\ - get_changes(new_name, resources=[self.resource]) - return changes - - def _check_redefinition(self, name, function_scope): - class_scope = function_scope.parent - if name in class_scope.pyobject: - raise exceptions.RefactoringError( - 'The field %s already exists' % name) - - def _get_field_name(self, pyfunction, name): - self_name = pyfunction.get_param_names()[0] - new_name = self_name + '.' + name - return new_name - - def _is_a_method_local(self, pyname): - pymodule, lineno = pyname.get_definition_location() - holding_scope = pymodule.get_scope().get_inner_scope_for_line(lineno) - parent = holding_scope.parent - return isinstance(pyname, pynames.AssignedName) and \ - pyname in list(holding_scope.get_names().values()) and \ - holding_scope.get_kind() == 'Function' and \ - parent is not None and parent.get_kind() == 'Class' diff --git a/pymode/libs3/rope/refactor/method_object.py b/pymode/libs3/rope/refactor/method_object.py deleted file mode 100644 index b3dd6bdd..00000000 --- a/pymode/libs3/rope/refactor/method_object.py +++ /dev/null @@ -1,87 +0,0 @@ -import warnings - -from rope.base import pyobjects, exceptions, change, evaluate, codeanalyze -from rope.refactor import sourceutils, occurrences, rename - - -class MethodObject(object): - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None or not isinstance(pyname.get_object(), - pyobjects.PyFunction): - raise exceptions.RefactoringError( - 'Replace method with method object refactoring should be ' - 'performed on a function.') - self.pyfunction = pyname.get_object() - self.pymodule = self.pyfunction.get_module() - self.resource = self.pymodule.get_resource() - - def get_new_class(self, name): - body = sourceutils.fix_indentation( - self._get_body(), sourceutils.get_indent(self.pycore) * 2) - return 'class %s(object):\n\n%s%sdef __call__(self):\n%s' % \ - (name, self._get_init(), - ' ' * sourceutils.get_indent(self.pycore), body) - - def get_changes(self, classname=None, new_class_name=None): - if new_class_name is not None: - warnings.warn( - 'new_class_name parameter is deprecated; use classname', - DeprecationWarning, stacklevel=2) - classname = new_class_name - collector = codeanalyze.ChangeCollector(self.pymodule.source_code) - start, end = sourceutils.get_body_region(self.pyfunction) - indents = sourceutils.get_indents( - self.pymodule.lines, self.pyfunction.get_scope().get_start()) + \ - sourceutils.get_indent(self.pycore) - new_contents = ' ' * indents + 'return %s(%s)()\n' % \ - (classname, ', '.join(self._get_parameter_names())) - collector.add_change(start, end, new_contents) - insertion = self._get_class_insertion_point() - collector.add_change(insertion, insertion, - '\n\n' + self.get_new_class(classname)) - changes = change.ChangeSet('Replace method with method object refactoring') - changes.add_change(change.ChangeContents(self.resource, - collector.get_changed())) - return changes - - def _get_class_insertion_point(self): - current = self.pyfunction - while current.parent != self.pymodule: - current = current.parent - end = self.pymodule.lines.get_line_end(current.get_scope().get_end()) - return min(end + 1, len(self.pymodule.source_code)) - - def _get_body(self): - body = sourceutils.get_body(self.pyfunction) - for param in self._get_parameter_names(): - body = param + ' = None\n' + body - pymod = self.pycore.get_string_module(body, self.resource) - pyname = pymod[param] - finder = occurrences.create_finder(self.pycore, param, pyname) - result = rename.rename_in_module(finder, 'self.' + param, - pymodule=pymod) - body = result[result.index('\n') + 1:] - return body - - def _get_init(self): - params = self._get_parameter_names() - indents = ' ' * sourceutils.get_indent(self.pycore) - if not params: - return '' - header = indents + 'def __init__(self' - body = '' - for arg in params: - new_name = arg - if arg == 'self': - new_name = 'host' - header += ', %s' % new_name - body += indents * 2 + 'self.%s = %s\n' % (arg, new_name) - header += '):' - return '%s\n%s\n' % (header, body) - - def _get_parameter_names(self): - return self.pyfunction.get_param_names() diff --git a/pymode/libs3/rope/refactor/move.py b/pymode/libs3/rope/refactor/move.py deleted file mode 100644 index eade323b..00000000 --- a/pymode/libs3/rope/refactor/move.py +++ /dev/null @@ -1,628 +0,0 @@ -"""A module containing classes for move refactoring - -`create_move()` is a factory for creating move refactoring objects -based on inputs. - -""" -from rope.base import pyobjects, codeanalyze, exceptions, pynames, taskhandle, evaluate, worder -from rope.base.change import ChangeSet, ChangeContents, MoveResource -from rope.refactor import importutils, rename, occurrences, sourceutils, functionutils - - -def create_move(project, resource, offset=None): - """A factory for creating Move objects - - Based on `resource` and `offset`, return one of `MoveModule`, - `MoveGlobal` or `MoveMethod` for performing move refactoring. - - """ - if offset is None: - return MoveModule(project, resource) - this_pymodule = project.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None: - raise exceptions.RefactoringError( - 'Move only works on classes, functions, modules and methods.') - pyobject = pyname.get_object() - if isinstance(pyobject, pyobjects.PyModule) or \ - isinstance(pyobject, pyobjects.PyPackage): - return MoveModule(project, pyobject.get_resource()) - if isinstance(pyobject, pyobjects.PyFunction) and \ - isinstance(pyobject.parent, pyobjects.PyClass): - return MoveMethod(project, resource, offset) - if isinstance(pyobject, pyobjects.PyDefinedObject) and \ - isinstance(pyobject.parent, pyobjects.PyModule): - return MoveGlobal(project, resource, offset) - raise exceptions.RefactoringError( - 'Move only works on global classes/functions, modules and methods.') - - -class MoveMethod(object): - """For moving methods - - It makes a new method in the destination class and changes - the body of the old method to call the new method. You can - inline the old method to change all of its occurrences. - - """ - - def __init__(self, project, resource, offset): - self.project = project - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - self.method_name = worder.get_name_at(resource, offset) - self.pyfunction = pyname.get_object() - if self.pyfunction.get_kind() != 'method': - raise exceptions.RefactoringError('Only normal methods' - ' can be moved.') - - def get_changes(self, dest_attr, new_name=None, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Return the changes needed for this refactoring - - Parameters: - - - `dest_attr`: the name of the destination attribute - - `new_name`: the name of the new method; if `None` uses - the old name - - `resources` can be a list of `rope.base.resources.File`\s to - apply this refactoring on. If `None`, the restructuring - will be applied to all python files. - - """ - changes = ChangeSet('Moving method <%s>' % self.method_name) - if resources is None: - resources = self.pycore.get_python_files() - if new_name is None: - new_name = self.get_method_name() - resource1, start1, end1, new_content1 = \ - self._get_changes_made_by_old_class(dest_attr, new_name) - collector1 = codeanalyze.ChangeCollector(resource1.read()) - collector1.add_change(start1, end1, new_content1) - - resource2, start2, end2, new_content2 = \ - self._get_changes_made_by_new_class(dest_attr, new_name) - if resource1 == resource2: - collector1.add_change(start2, end2, new_content2) - else: - collector2 = codeanalyze.ChangeCollector(resource2.read()) - collector2.add_change(start2, end2, new_content2) - result = collector2.get_changed() - import_tools = importutils.ImportTools(self.pycore) - new_imports = self._get_used_imports(import_tools) - if new_imports: - goal_pymodule = self.pycore.get_string_module(result, - resource2) - result = _add_imports_to_module( - import_tools, goal_pymodule, new_imports) - if resource2 in resources: - changes.add_change(ChangeContents(resource2, result)) - - if resource1 in resources: - changes.add_change(ChangeContents(resource1, - collector1.get_changed())) - return changes - - def get_method_name(self): - return self.method_name - - def _get_used_imports(self, import_tools): - return importutils.get_imports(self.pycore, self.pyfunction) - - def _get_changes_made_by_old_class(self, dest_attr, new_name): - pymodule = self.pyfunction.get_module() - indents = self._get_scope_indents(self.pyfunction) - body = 'return self.%s.%s(%s)\n' % (dest_attr, new_name, - self._get_passed_arguments_string()) - region = sourceutils.get_body_region(self.pyfunction) - return (pymodule.get_resource(), region[0], region[1], - sourceutils.fix_indentation(body, indents)) - - def _get_scope_indents(self, pyobject): - pymodule = pyobject.get_module() - return sourceutils.get_indents( - pymodule.lines, pyobject.get_scope().get_start()) + \ - sourceutils.get_indent(self.pycore) - - def _get_changes_made_by_new_class(self, dest_attr, new_name): - old_pyclass = self.pyfunction.parent - if dest_attr not in old_pyclass: - raise exceptions.RefactoringError( - 'Destination attribute <%s> not found' % dest_attr) - pyclass = old_pyclass[dest_attr].get_object().get_type() - if not isinstance(pyclass, pyobjects.PyClass): - raise exceptions.RefactoringError( - 'Unknown class type for attribute <%s>' % dest_attr) - pymodule = pyclass.get_module() - resource = pyclass.get_module().get_resource() - start, end = sourceutils.get_body_region(pyclass) - pre_blanks = '\n' - if pymodule.source_code[start:end].strip() != 'pass': - pre_blanks = '\n\n' - start = end - indents = self._get_scope_indents(pyclass) - body = pre_blanks + sourceutils.fix_indentation( - self.get_new_method(new_name), indents) - return resource, start, end, body - - def get_new_method(self, name): - return '%s\n%s' % ( - self._get_new_header(name), - sourceutils.fix_indentation(self._get_body(), - sourceutils.get_indent(self.pycore))) - - def _get_unchanged_body(self): - return sourceutils.get_body(self.pyfunction) - - def _get_body(self, host='host'): - self_name = self._get_self_name() - body = self_name + ' = None\n' + self._get_unchanged_body() - pymodule = self.pycore.get_string_module(body) - finder = occurrences.create_finder( - self.pycore, self_name, pymodule[self_name]) - result = rename.rename_in_module(finder, host, pymodule=pymodule) - if result is None: - result = body - return result[result.index('\n') + 1:] - - def _get_self_name(self): - return self.pyfunction.get_param_names()[0] - - def _get_new_header(self, name): - header = 'def %s(self' % name - if self._is_host_used(): - header += ', host' - definition_info = functionutils.DefinitionInfo.read(self.pyfunction) - others = definition_info.arguments_to_string(1) - if others: - header += ', ' + others - return header + '):' - - def _get_passed_arguments_string(self): - result = '' - if self._is_host_used(): - result = 'self' - definition_info = functionutils.DefinitionInfo.read(self.pyfunction) - others = definition_info.arguments_to_string(1) - if others: - if result: - result += ', ' - result += others - return result - - def _is_host_used(self): - return self._get_body('__old_self') != self._get_unchanged_body() - - -class MoveGlobal(object): - """For moving global function and classes""" - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - this_pymodule = self.pycore.resource_to_pyobject(resource) - self.old_pyname = evaluate.eval_location(this_pymodule, offset) - self.old_name = self.old_pyname.get_object().get_name() - pymodule = self.old_pyname.get_object().get_module() - self.source = pymodule.get_resource() - self.tools = _MoveTools(self.pycore, self.source, - self.old_pyname, self.old_name) - self.import_tools = self.tools.import_tools - self._check_exceptional_conditions() - - def _check_exceptional_conditions(self): - if self.old_pyname is None or \ - not isinstance(self.old_pyname.get_object(), pyobjects.PyDefinedObject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed on a class/function.') - moving_pyobject = self.old_pyname.get_object() - if not self._is_global(moving_pyobject): - raise exceptions.RefactoringError( - 'Move refactoring should be performed on a global class/function.') - - def _is_global(self, pyobject): - return pyobject.get_scope().parent == pyobject.get_module().get_scope() - - def get_changes(self, dest, resources=None, - task_handle=taskhandle.NullTaskHandle()): - if resources is None: - resources = self.pycore.get_python_files() - if dest is None or not dest.exists(): - raise exceptions.RefactoringError( - 'Move destination does not exist.') - if dest.is_folder() and dest.has_child('__init__.py'): - dest = dest.get_child('__init__.py') - if dest.is_folder(): - raise exceptions.RefactoringError( - 'Move destination for non-modules should not be folders.') - if self.source == dest: - raise exceptions.RefactoringError( - 'Moving global elements to the same module.') - return self._calculate_changes(dest, resources, task_handle) - - def _calculate_changes(self, dest, resources, task_handle): - changes = ChangeSet('Moving global <%s>' % self.old_name) - job_set = task_handle.create_jobset('Collecting Changes', - len(resources)) - for file_ in resources: - job_set.started_job(file_.path) - if file_ == self.source: - changes.add_change(self._source_module_changes(dest)) - elif file_ == dest: - changes.add_change(self._dest_module_changes(dest)) - elif self.tools.occurs_in_module(resource=file_): - pymodule = self.pycore.resource_to_pyobject(file_) - # Changing occurrences - placeholder = '__rope_renaming_%s_' % self.old_name - source = self.tools.rename_in_module(placeholder, - resource=file_) - should_import = source is not None - # Removing out of date imports - pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.remove_old_imports(pymodule) - # Adding new import - if should_import: - pymodule = self.tools.new_pymodule(pymodule, source) - source, imported = importutils.add_import( - self.pycore, pymodule, self._new_modname(dest), self.old_name) - source = source.replace(placeholder, imported) - source = self.tools.new_source(pymodule, source) - if source != file_.read(): - changes.add_change(ChangeContents(file_, source)) - job_set.finished_job() - return changes - - def _source_module_changes(self, dest): - placeholder = '__rope_moving_%s_' % self.old_name - handle = _ChangeMoveOccurrencesHandle(placeholder) - occurrence_finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname) - start, end = self._get_moving_region() - renamer = ModuleSkipRenamer(occurrence_finder, self.source, - handle, start, end) - source = renamer.get_changed_module() - if handle.occurred: - pymodule = self.pycore.get_string_module(source, self.source) - # Adding new import - source, imported = importutils.add_import( - self.pycore, pymodule, self._new_modname(dest), self.old_name) - source = source.replace(placeholder, imported) - return ChangeContents(self.source, source) - - def _new_modname(self, dest): - return self.pycore.modname(dest) - - def _dest_module_changes(self, dest): - # Changing occurrences - pymodule = self.pycore.resource_to_pyobject(dest) - source = self.tools.rename_in_module(self.old_name, pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) - - moving, imports = self._get_moving_element_with_imports() - source = self.tools.remove_old_imports(pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) - pymodule, has_changed = self._add_imports2(pymodule, imports) - - module_with_imports = self.import_tools.module_imports(pymodule) - source = pymodule.source_code - lineno = 0 - if module_with_imports.imports: - lineno = module_with_imports.imports[-1].end_line - 1 - else: - while lineno < pymodule.lines.length() and \ - pymodule.lines.get_line(lineno + 1).lstrip().startswith('#'): - lineno += 1 - if lineno > 0: - cut = pymodule.lines.get_line_end(lineno) + 1 - result = source[:cut] + '\n\n' + moving + source[cut:] - else: - result = moving + source - # Organizing imports - source = result - pymodule = self.pycore.get_string_module(source, dest) - source = self.import_tools.organize_imports(pymodule, sort=False, - unused=False) - return ChangeContents(dest, source) - - def _get_moving_element_with_imports(self): - return moving_code_with_imports( - self.pycore, self.source, self._get_moving_element()) - - def _get_module_with_imports(self, source_code, resource): - pymodule = self.pycore.get_string_module(source_code, resource) - return self.import_tools.module_imports(pymodule) - - def _get_moving_element(self): - start, end = self._get_moving_region() - moving = self.source.read()[start:end] - return moving.rstrip() + '\n' - - def _get_moving_region(self): - pymodule = self.pycore.resource_to_pyobject(self.source) - lines = pymodule.lines - scope = self.old_pyname.get_object().get_scope() - start = lines.get_line_start(scope.get_start()) - end_line = scope.get_end() - while end_line < lines.length() and \ - lines.get_line(end_line + 1).strip() == '': - end_line += 1 - end = min(lines.get_line_end(end_line) + 1, len(pymodule.source_code)) - return start, end - - def _add_imports2(self, pymodule, new_imports): - source = self.tools.add_imports(pymodule, new_imports) - if source is None: - return pymodule, False - else: - resource = pymodule.get_resource() - pymodule = self.pycore.get_string_module(source, resource) - return pymodule, True - - -class MoveModule(object): - """For moving modules and packages""" - - def __init__(self, project, resource): - self.project = project - self.pycore = project.pycore - if not resource.is_folder() and resource.name == '__init__.py': - resource = resource.parent - if resource.is_folder() and not resource.has_child('__init__.py'): - raise exceptions.RefactoringError( - 'Cannot move non-package folder.') - dummy_pymodule = self.pycore.get_string_module('') - self.old_pyname = pynames.ImportedModule(dummy_pymodule, - resource=resource) - self.source = self.old_pyname.get_object().get_resource() - if self.source.is_folder(): - self.old_name = self.source.name - else: - self.old_name = self.source.name[:-3] - self.tools = _MoveTools(self.pycore, self.source, - self.old_pyname, self.old_name) - self.import_tools = self.tools.import_tools - - def get_changes(self, dest, resources=None, - task_handle=taskhandle.NullTaskHandle()): - moving_pyobject = self.old_pyname.get_object() - if resources is None: - resources = self.pycore.get_python_files() - if dest is None or not dest.is_folder(): - raise exceptions.RefactoringError( - 'Move destination for modules should be packages.') - return self._calculate_changes(dest, resources, task_handle) - - def _calculate_changes(self, dest, resources, task_handle): - changes = ChangeSet('Moving module <%s>' % self.old_name) - job_set = task_handle.create_jobset('Collecting changes', - len(resources)) - for module in resources: - job_set.started_job(module.path) - if module == self.source: - self._change_moving_module(changes, dest) - else: - source = self._change_occurrences_in_module(dest, - resource=module) - if source is not None: - changes.add_change(ChangeContents(module, source)) - job_set.finished_job() - if self.project == self.source.project: - changes.add_change(MoveResource(self.source, dest.path)) - return changes - - def _new_modname(self, dest): - destname = self.pycore.modname(dest) - if destname: - return destname + '.' + self.old_name - return self.old_name - - def _new_import(self, dest): - return importutils.NormalImport([(self._new_modname(dest), None)]) - - def _change_moving_module(self, changes, dest): - if not self.source.is_folder(): - pymodule = self.pycore.resource_to_pyobject(self.source) - source = self.import_tools.relatives_to_absolutes(pymodule) - pymodule = self.tools.new_pymodule(pymodule, source) - source = self._change_occurrences_in_module(dest, pymodule) - source = self.tools.new_source(pymodule, source) - if source != self.source.read(): - changes.add_change(ChangeContents(self.source, source)) - - def _change_occurrences_in_module(self, dest, pymodule=None, - resource=None): - if not self.tools.occurs_in_module(pymodule=pymodule, - resource=resource): - return - if pymodule is None: - pymodule = self.pycore.resource_to_pyobject(resource) - new_name = self._new_modname(dest) - new_import = self._new_import(dest) - source = self.tools.rename_in_module( - new_name, imports=True, pymodule=pymodule, resource=resource) - should_import = self.tools.occurs_in_module( - pymodule=pymodule, resource=resource, imports=False) - pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.remove_old_imports(pymodule) - if should_import: - pymodule = self.tools.new_pymodule(pymodule, source) - source = self.tools.add_imports(pymodule, [new_import]) - source = self.tools.new_source(pymodule, source) - if source != pymodule.resource.read(): - return source - - -class _ChangeMoveOccurrencesHandle(object): - - def __init__(self, new_name): - self.new_name = new_name - self.occurred = False - - def occurred_inside_skip(self, change_collector, occurrence): - pass - - def occurred_outside_skip(self, change_collector, occurrence): - start, end = occurrence.get_primary_range() - change_collector.add_change(start, end, self.new_name) - self.occurred = True - - -class _MoveTools(object): - - def __init__(self, pycore, source, pyname, old_name): - self.pycore = pycore - self.source = source - self.old_pyname = pyname - self.old_name = old_name - self.import_tools = importutils.ImportTools(self.pycore) - - def remove_old_imports(self, pymodule): - old_source = pymodule.source_code - module_with_imports = self.import_tools.module_imports(pymodule) - class CanSelect(object): - changed = False - old_name = self.old_name - old_pyname = self.old_pyname - def __call__(self, name): - try: - if name == self.old_name and \ - pymodule[name].get_object() == \ - self.old_pyname.get_object(): - self.changed = True - return False - except exceptions.AttributeNotFoundError: - pass - return True - can_select = CanSelect() - module_with_imports.filter_names(can_select) - new_source = module_with_imports.get_changed_source() - if old_source != new_source: - return new_source - - def rename_in_module(self, new_name, pymodule=None, - imports=False, resource=None): - occurrence_finder = self._create_finder(imports) - source = rename.rename_in_module( - occurrence_finder, new_name, replace_primary=True, - pymodule=pymodule, resource=resource) - return source - - def occurs_in_module(self, pymodule=None, resource=None, imports=True): - finder = self._create_finder(imports) - for occurrence in finder.find_occurrences(pymodule=pymodule, - resource=resource): - return True - return False - - def _create_finder(self, imports): - return occurrences.create_finder(self.pycore, self.old_name, - self.old_pyname, imports=imports) - - def new_pymodule(self, pymodule, source): - if source is not None: - return self.pycore.get_string_module( - source, pymodule.get_resource()) - return pymodule - - def new_source(self, pymodule, source): - if source is None: - return pymodule.source_code - return source - - def add_imports(self, pymodule, new_imports): - return _add_imports_to_module(self.import_tools, pymodule, new_imports) - - -def _add_imports_to_module(import_tools, pymodule, new_imports): - module_with_imports = import_tools.module_imports(pymodule) - for new_import in new_imports: - module_with_imports.add_import(new_import) - return module_with_imports.get_changed_source() - - -def moving_code_with_imports(pycore, resource, source): - import_tools = importutils.ImportTools(pycore) - pymodule = pycore.get_string_module(source, resource) - origin = pycore.resource_to_pyobject(resource) - - imports = [] - for stmt in import_tools.module_imports(origin).imports: - imports.append(stmt.import_info) - - back_names = [] - for name in origin: - if name not in pymodule: - back_names.append(name) - imports.append(import_tools.get_from_import(resource, back_names)) - - source = _add_imports_to_module(import_tools, pymodule, imports) - pymodule = pycore.get_string_module(source, resource) - - source = import_tools.relatives_to_absolutes(pymodule) - pymodule = pycore.get_string_module(source, resource) - source = import_tools.organize_imports(pymodule, selfs=False) - pymodule = pycore.get_string_module(source, resource) - - # extracting imports after changes - module_imports = import_tools.module_imports(pymodule) - imports = [import_stmt.import_info - for import_stmt in module_imports.imports] - start = 1 - if module_imports.imports: - start = module_imports.imports[-1].end_line - lines = codeanalyze.SourceLinesAdapter(source) - while start < lines.length() and not lines.get_line(start).strip(): - start += 1 - moving = source[lines.get_line_start(start):] - return moving, imports - - -class ModuleSkipRenamerHandle(object): - - def occurred_outside_skip(self, change_collector, occurrence): - pass - - def occurred_inside_skip(self, change_collector, occurrence): - pass - - -class ModuleSkipRenamer(object): - """Rename occurrences in a module - - This class can be used when you want to treat a region in a file - separately from other parts when renaming. - - """ - - def __init__(self, occurrence_finder, resource, handle=None, - skip_start=0, skip_end=0, replacement=''): - """Constructor - - if replacement is `None` the region is not changed. Otherwise - it is replaced with `replacement`. - - """ - self.occurrence_finder = occurrence_finder - self.resource = resource - self.skip_start = skip_start - self.skip_end = skip_end - self.replacement = replacement - self.handle = handle - if self.handle is None: - self.handle = ModuleSkipHandle() - - def get_changed_module(self): - source = self.resource.read() - change_collector = codeanalyze.ChangeCollector(source) - if self.replacement is not None: - change_collector.add_change(self.skip_start, self.skip_end, - self.replacement) - for occurrence in self.occurrence_finder.find_occurrences(self.resource): - start, end = occurrence.get_primary_range() - if self.skip_start <= start < self.skip_end: - self.handle.occurred_inside_skip(change_collector, occurrence) - else: - self.handle.occurred_outside_skip(change_collector, occurrence) - result = change_collector.get_changed() - if result is not None and result != source: - return result diff --git a/pymode/libs3/rope/refactor/multiproject.py b/pymode/libs3/rope/refactor/multiproject.py deleted file mode 100644 index 6a85d2a2..00000000 --- a/pymode/libs3/rope/refactor/multiproject.py +++ /dev/null @@ -1,78 +0,0 @@ -"""This module can be used for performing cross-project refactorings - -See the "cross-project refactorings" section of ``docs/library.txt`` -file. - -""" - -from rope.base import resources, project, libutils - - -class MultiProjectRefactoring(object): - - def __init__(self, refactoring, projects, addpath=True): - """Create a multiproject proxy for the main refactoring - - `projects` are other project. - - """ - self.refactoring = refactoring - self.projects = projects - self.addpath = addpath - - def __call__(self, project, *args, **kwds): - """Create the refactoring""" - return _MultiRefactoring(self.refactoring, self.projects, - self.addpath, project, *args, **kwds) - - -class _MultiRefactoring(object): - - def __init__(self, refactoring, other_projects, addpath, - project, *args, **kwds): - self.refactoring = refactoring - self.projects = [project] + other_projects - for other_project in other_projects: - for folder in self.project.pycore.get_source_folders(): - other_project.get_prefs().add('python_path', folder.real_path) - self.refactorings = [] - for other in self.projects: - args, kwds = self._resources_for_args(other, args, kwds) - self.refactorings.append( - self.refactoring(other, *args, **kwds)) - - def get_all_changes(self, *args, **kwds): - """Get a project to changes dict""" - result = [] - for project, refactoring in zip(self.projects, self.refactorings): - args, kwds = self._resources_for_args(project, args, kwds) - result.append((project, refactoring.get_changes(*args, **kwds))) - return result - - def __getattr__(self, name): - return getattr(self.main_refactoring, name) - - def _resources_for_args(self, project, args, kwds): - newargs = [self._change_project_resource(project, arg) for arg in args] - newkwds = dict((name, self._change_project_resource(project, value)) - for name, value in kwds.items()) - return newargs, newkwds - - def _change_project_resource(self, project, obj): - if isinstance(obj, resources.Resource) and \ - obj.project != project: - return libutils.path_to_resource(project, obj.real_path) - return obj - - @property - def project(self): - return self.projects[0] - - @property - def main_refactoring(self): - return self.refactorings[0] - - -def perform(project_changes): - for project, changes in project_changes: - project.do(changes) diff --git a/pymode/libs3/rope/refactor/occurrences.py b/pymode/libs3/rope/refactor/occurrences.py deleted file mode 100644 index 2808ed2c..00000000 --- a/pymode/libs3/rope/refactor/occurrences.py +++ /dev/null @@ -1,334 +0,0 @@ -import re - -import rope.base.pynames -from rope.base import pynames, pyobjects, codeanalyze, evaluate, exceptions, utils, worder - - -class Finder(object): - """For finding occurrences of a name - - The constructor takes a `filters` argument. It should be a list - of functions that take a single argument. For each possible - occurrence, these functions are called in order with the an - instance of `Occurrence`: - - * If it returns `None` other filters are tried. - * If it returns `True`, the occurrence will be a match. - * If it returns `False`, the occurrence will be skipped. - * If all of the filters return `None`, it is skipped also. - - """ - - def __init__(self, pycore, name, filters=[lambda o: True], docs=False): - self.pycore = pycore - self.name = name - self.docs = docs - self.filters = filters - self._textual_finder = _TextualFinder(name, docs=docs) - - def find_occurrences(self, resource=None, pymodule=None): - """Generate `Occurrence` instances""" - tools = _OccurrenceToolsCreator(self.pycore, resource=resource, - pymodule=pymodule, docs=self.docs) - for offset in self._textual_finder.find_offsets(tools.source_code): - occurrence = Occurrence(tools, offset) - for filter in self.filters: - result = filter(occurrence) - if result is None: - continue - if result: - yield occurrence - break - - -def create_finder(pycore, name, pyname, only_calls=False, imports=True, - unsure=None, docs=False, instance=None, in_hierarchy=False): - """A factory for `Finder` - - Based on the arguments it creates a list of filters. `instance` - argument is needed only when you want implicit interfaces to be - considered. - - """ - pynames = set([pyname]) - filters = [] - if only_calls: - filters.append(CallsFilter()) - if not imports: - filters.append(NoImportsFilter()) - if isinstance(instance, rope.base.pynames.ParameterName): - for pyobject in instance.get_objects(): - try: - pynames.add(pyobject[name]) - except exceptions.AttributeNotFoundError: - pass - for pyname in pynames: - filters.append(PyNameFilter(pyname)) - if in_hierarchy: - filters.append(InHierarchyFilter(pyname)) - if unsure: - filters.append(UnsureFilter(unsure)) - return Finder(pycore, name, filters=filters, docs=docs) - - -class Occurrence(object): - - def __init__(self, tools, offset): - self.tools = tools - self.offset = offset - self.resource = tools.resource - - @utils.saveit - def get_word_range(self): - return self.tools.word_finder.get_word_range(self.offset) - - @utils.saveit - def get_primary_range(self): - return self.tools.word_finder.get_primary_range(self.offset) - - @utils.saveit - def get_pyname(self): - try: - return self.tools.name_finder.get_pyname_at(self.offset) - except exceptions.BadIdentifierError: - pass - - @utils.saveit - def get_primary_and_pyname(self): - try: - return self.tools.name_finder.get_primary_and_pyname_at(self.offset) - except exceptions.BadIdentifierError: - pass - - @utils.saveit - def is_in_import_statement(self): - return (self.tools.word_finder.is_from_statement(self.offset) or - self.tools.word_finder.is_import_statement(self.offset)) - - def is_called(self): - return self.tools.word_finder.is_a_function_being_called(self.offset) - - def is_defined(self): - return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) - - def is_a_fixed_primary(self): - return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) or \ - self.tools.word_finder.is_a_name_after_from_import(self.offset) - - def is_written(self): - return self.tools.word_finder.is_assigned_here(self.offset) - - def is_unsure(self): - return unsure_pyname(self.get_pyname()) - - @property - @utils.saveit - def lineno(self): - offset = self.get_word_range()[0] - return self.tools.pymodule.lines.get_line_number(offset) - - -def same_pyname(expected, pyname): - """Check whether `expected` and `pyname` are the same""" - if expected is None or pyname is None: - return False - if expected == pyname: - return True - if type(expected) not in (pynames.ImportedModule, pynames.ImportedName) and \ - type(pyname) not in (pynames.ImportedModule, pynames.ImportedName): - return False - return expected.get_definition_location() == pyname.get_definition_location() and \ - expected.get_object() == pyname.get_object() - -def unsure_pyname(pyname, unbound=True): - """Return `True` if we don't know what this name references""" - if pyname is None: - return True - if unbound and not isinstance(pyname, pynames.UnboundName): - return False - if pyname.get_object() == pyobjects.get_unknown(): - return True - - -class PyNameFilter(object): - """For finding occurrences of a name""" - - def __init__(self, pyname): - self.pyname = pyname - - def __call__(self, occurrence): - if same_pyname(self.pyname, occurrence.get_pyname()): - return True - - -class InHierarchyFilter(object): - """For finding occurrences of a name""" - - def __init__(self, pyname, implementations_only=False): - self.pyname = pyname - self.impl_only = implementations_only - self.pyclass = self._get_containing_class(pyname) - if self.pyclass is not None: - self.name = pyname.get_object().get_name() - self.roots = self._get_root_classes(self.pyclass, self.name) - else: - self.roots = None - - def __call__(self, occurrence): - if self.roots is None: - return - pyclass = self._get_containing_class(occurrence.get_pyname()) - if pyclass is not None: - roots = self._get_root_classes(pyclass, self.name) - if self.roots.intersection(roots): - return True - - def _get_containing_class(self, pyname): - if isinstance(pyname, pynames.DefinedName): - scope = pyname.get_object().get_scope() - parent = scope.parent - if parent is not None and parent.get_kind() == 'Class': - return parent.pyobject - - def _get_root_classes(self, pyclass, name): - if self.impl_only and pyclass == self.pyclass: - return set([pyclass]) - result = set() - for superclass in pyclass.get_superclasses(): - if name in superclass: - result.update(self._get_root_classes(superclass, name)) - if not result: - return set([pyclass]) - return result - - -class UnsureFilter(object): - - def __init__(self, unsure): - self.unsure = unsure - - def __call__(self, occurrence): - if occurrence.is_unsure() and self.unsure(occurrence): - return True - - -class NoImportsFilter(object): - - def __call__(self, occurrence): - if occurrence.is_in_import_statement(): - return False - - -class CallsFilter(object): - - def __call__(self, occurrence): - if not occurrence.is_called(): - return False - - -class _TextualFinder(object): - - def __init__(self, name, docs=False): - self.name = name - self.docs = docs - self.comment_pattern = _TextualFinder.any('comment', [r'#[^\n]*']) - self.string_pattern = _TextualFinder.any( - 'string', [codeanalyze.get_string_pattern()]) - self.pattern = self._get_occurrence_pattern(self.name) - - def find_offsets(self, source): - if not self._fast_file_query(source): - return - if self.docs: - searcher = self._normal_search - else: - searcher = self._re_search - for matched in searcher(source): - yield matched - - def _re_search(self, source): - for match in self.pattern.finditer(source): - for key, value in match.groupdict().items(): - if value and key == 'occurrence': - yield match.start(key) - - def _normal_search(self, source): - current = 0 - while True: - try: - found = source.index(self.name, current) - current = found + len(self.name) - if (found == 0 or not self._is_id_char(source[found - 1])) and \ - (current == len(source) or not self._is_id_char(source[current])): - yield found - except ValueError: - break - - def _is_id_char(self, c): - return c.isalnum() or c == '_' - - def _fast_file_query(self, source): - try: - source.index(self.name) - return True - except ValueError: - return False - - def _get_source(self, resource, pymodule): - if resource is not None: - return resource.read() - else: - return pymodule.source_code - - def _get_occurrence_pattern(self, name): - occurrence_pattern = _TextualFinder.any('occurrence', - ['\\b' + name + '\\b']) - pattern = re.compile(occurrence_pattern + '|' + self.comment_pattern + - '|' + self.string_pattern) - return pattern - - @staticmethod - def any(name, list_): - return '(?P<%s>' % name + '|'.join(list_) + ')' - - -class _OccurrenceToolsCreator(object): - - def __init__(self, pycore, resource=None, pymodule=None, docs=False): - self.pycore = pycore - self.__resource = resource - self.__pymodule = pymodule - self.docs = docs - - @property - @utils.saveit - def name_finder(self): - return evaluate.ScopeNameFinder(self.pymodule) - - @property - @utils.saveit - def source_code(self): - if self.__resource is not None: - return self.resource.read() - else: - return self.pymodule.source_code - - @property - @utils.saveit - def word_finder(self): - return worder.Worder(self.source_code, self.docs) - - @property - @utils.saveit - def resource(self): - if self.__resource is not None: - return self.__resource - if self.__pymodule is not None: - return self.__pymodule.resource - - @property - @utils.saveit - def pymodule(self): - if self.__pymodule is not None: - return self.__pymodule - return self.pycore.resource_to_pyobject(self.resource) diff --git a/pymode/libs3/rope/refactor/patchedast.py b/pymode/libs3/rope/refactor/patchedast.py deleted file mode 100644 index 042b33dd..00000000 --- a/pymode/libs3/rope/refactor/patchedast.py +++ /dev/null @@ -1,734 +0,0 @@ -import collections -import re -import warnings - -from rope.base import ast, codeanalyze, exceptions - - -def get_patched_ast(source, sorted_children=False): - """Adds ``region`` and ``sorted_children`` fields to nodes - - Adds ``sorted_children`` field only if `sorted_children` is True. - - """ - return patch_ast(ast.parse(source), source, sorted_children) - - -def patch_ast(node, source, sorted_children=False): - """Patches the given node - - After calling, each node in `node` will have a new field named - `region` that is a tuple containing the start and end offsets - of the code that generated it. - - If `sorted_children` is true, a `sorted_children` field will - be created for each node, too. It is a list containing child - nodes as well as whitespaces and comments that occur between - them. - - """ - if hasattr(node, 'region'): - return node - walker = _PatchingASTWalker(source, children=sorted_children) - ast.call_for_nodes(node, walker) - return node - - -def node_region(patched_ast_node): - """Get the region of a patched ast node""" - return patched_ast_node.region - - -def write_ast(patched_ast_node): - """Extract source form a patched AST node with `sorted_children` field - - If the node is patched with sorted_children turned off you can use - `node_region` function for obtaining code using module source code. - """ - result = [] - for child in patched_ast_node.sorted_children: - if isinstance(child, ast.AST): - result.append(write_ast(child)) - else: - result.append(child) - return ''.join(result) - - -class MismatchedTokenError(exceptions.RopeError): - pass - - -class _PatchingASTWalker(object): - - def __init__(self, source, children=False): - self.source = _Source(source) - self.children = children - self.lines = codeanalyze.SourceLinesAdapter(source) - self.children_stack = [] - - Number = object() - String = object() - - def __call__(self, node): - method = getattr(self, '_' + node.__class__.__name__, None) - if method is not None: - return method(node) - # ???: Unknown node; what should we do here? - warnings.warn('Unknown node type <%s>; please report!' - % node.__class__.__name__, RuntimeWarning) - node.region = (self.source.offset, self.source.offset) - if self.children: - node.sorted_children = ast.get_children(node) - - def _handle(self, node, base_children, eat_parens=False, eat_spaces=False): - if hasattr(node, 'region'): - # ???: The same node was seen twice; what should we do? - warnings.warn( - 'Node <%s> has been already patched; please report!' % - node.__class__.__name__, RuntimeWarning) - return - base_children = collections.deque(base_children) - self.children_stack.append(base_children) - children = collections.deque() - formats = [] - suspected_start = self.source.offset - start = suspected_start - first_token = True - while base_children: - child = base_children.popleft() - if child is None: - continue - offset = self.source.offset - if isinstance(child, ast.arg): - region = self.source.consume(child.arg) - child = self.source[region[0]:region[1]] - token_start = offset - elif isinstance(child, ast.AST): - ast.call_for_nodes(child, self) - token_start = child.region[0] - else: - if child is self.String: - region = self.source.consume_string( - end=self._find_next_statement_start()) - elif child is self.Number: - region = self.source.consume_number() - elif child == '!=': - # INFO: This has been added to handle deprecated ``<>`` - region = self.source.consume_not_equal() - else: - region = self.source.consume(child) - child = self.source[region[0]:region[1]] - token_start = region[0] - if not first_token: - formats.append(self.source[offset:token_start]) - if self.children: - children.append(self.source[offset:token_start]) - else: - first_token = False - start = token_start - if self.children: - children.append(child) - start = self._handle_parens(children, start, formats) - if eat_parens: - start = self._eat_surrounding_parens( - children, suspected_start, start) - if eat_spaces: - if self.children: - children.appendleft(self.source[0:start]) - end_spaces = self.source[self.source.offset:] - self.source.consume(end_spaces) - if self.children: - children.append(end_spaces) - start = 0 - if self.children: - node.sorted_children = children - node.region = (start, self.source.offset) - self.children_stack.pop() - - def _handle_parens(self, children, start, formats): - """Changes `children` and returns new start""" - opens, closes = self._count_needed_parens(formats) - old_end = self.source.offset - new_end = None - for i in range(closes): - new_end = self.source.consume(')')[1] - if new_end is not None: - if self.children: - children.append(self.source[old_end:new_end]) - new_start = start - for i in range(opens): - new_start = self.source.rfind_token('(', 0, new_start) - if new_start != start: - if self.children: - children.appendleft(self.source[new_start:start]) - start = new_start - return start - - def _eat_surrounding_parens(self, children, suspected_start, start): - index = self.source.rfind_token('(', suspected_start, start) - if index is not None: - old_start = start - old_offset = self.source.offset - start = index - if self.children: - children.appendleft(self.source[start + 1:old_start]) - children.appendleft('(') - token_start, token_end = self.source.consume(')') - if self.children: - children.append(self.source[old_offset:token_start]) - children.append(')') - return start - - def _count_needed_parens(self, children): - start = 0 - opens = 0 - for child in children: - if not isinstance(child, str): - continue - if child == '' or child[0] in '\'"': - continue - index = 0 - while index < len(child): - if child[index] == ')': - if opens > 0: - opens -= 1 - else: - start += 1 - if child[index] == '(': - opens += 1 - if child[index] == '#': - try: - index = child.index('\n', index) - except ValueError: - break - index += 1 - return start, opens - - def _find_next_statement_start(self): - for children in reversed(self.children_stack): - for child in children: - if isinstance(child, ast.stmt): - return child.col_offset \ - + self.lines.get_line_start(child.lineno) - return len(self.source.source) - - _operators = {'And': 'and', 'Or': 'or', 'Add': '+', 'Sub': '-', 'Mult': '*', - 'Div': '/', 'Mod': '%', 'Pow': '**', 'LShift': '<<', - 'RShift': '>>', 'BitOr': '|', 'BitAnd': '&', 'BitXor': '^', - 'FloorDiv': '//', 'Invert': '~', 'Not': 'not', 'UAdd': '+', - 'USub': '-', 'Eq': '==', 'NotEq': '!=', 'Lt': '<', - 'LtE': '<=', 'Gt': '>', 'GtE': '>=', 'Is': 'is', - 'IsNot': 'is not', 'In': 'in', 'NotIn': 'not in'} - - def _get_op(self, node): - return self._operators[node.__class__.__name__].split(' ') - - def _Attribute(self, node): - self._handle(node, [node.value, '.', node.attr]) - - def _Assert(self, node): - children = ['assert', node.test] - if node.msg: - children.append(',') - children.append(node.msg) - self._handle(node, children) - - def _Assign(self, node): - children = self._child_nodes(node.targets, '=') - children.append('=') - children.append(node.value) - self._handle(node, children) - - def _AugAssign(self, node): - children = [node.target] - children.extend(self._get_op(node.op)) - children.extend(['=', node.value]) - self._handle(node, children) - - def _Repr(self, node): - self._handle(node, ['`', node.value, '`']) - - def _BinOp(self, node): - children = [node.left] + self._get_op(node.op) + [node.right] - self._handle(node, children) - - def _BoolOp(self, node): - self._handle(node, self._child_nodes(node.values, - self._get_op(node.op)[0])) - - def _Break(self, node): - self._handle(node, ['break']) - - def _Call(self, node): - children = [node.func, '('] - args = list(node.args) + node.keywords - children.extend(self._child_nodes(args, ',')) - if node.starargs is not None: - if args: - children.append(',') - children.extend(['*', node.starargs]) - if node.kwargs is not None: - if args or node.starargs is not None: - children.append(',') - children.extend(['**', node.kwargs]) - children.append(')') - self._handle(node, children) - - def _ClassDef(self, node): - children = [] - if getattr(node, 'decorator_list', None): - for decorator in node.decorator_list: - children.append('@') - children.append(decorator) - children.extend(['class', node.name]) - if node.bases: - children.append('(') - children.extend(self._child_nodes(node.bases, ',')) - children.append(')') - children.append(':') - children.extend(node.body) - self._handle(node, children) - - def _Compare(self, node): - children = [] - children.append(node.left) - for op, expr in zip(node.ops, node.comparators): - children.extend(self._get_op(op)) - children.append(expr) - self._handle(node, children) - - def _Delete(self, node): - self._handle(node, ['del'] + self._child_nodes(node.targets, ',')) - - def _Num(self, node): - self._handle(node, [self.Number]) - - def _Str(self, node): - self._handle(node, [self.String]) - - def _Continue(self, node): - self._handle(node, ['continue']) - - def _Dict(self, node): - children = [] - children.append('{') - if node.keys: - for index, (key, value) in enumerate(list(zip(node.keys, node.values))): - children.extend([key, ':', value]) - if index < len(node.keys) - 1: - children.append(',') - children.append('}') - self._handle(node, children) - - def _Ellipsis(self, node): - self._handle(node, ['...']) - - def _Expr(self, node): - self._handle(node, [node.value]) - - def _Exec(self, node): - children = [] - children.extend(['exec', node.body]) - if node.globals: - children.extend(['in', node.globals]) - if node.locals: - children.extend([',', node.locals]) - self._handle(node, children) - - def _ExtSlice(self, node): - children = [] - for index, dim in enumerate(node.dims): - if index > 0: - children.append(',') - children.append(dim) - self._handle(node, children) - - def _For(self, node): - children = ['for', node.target, 'in', node.iter, ':'] - children.extend(node.body) - if node.orelse: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _ImportFrom(self, node): - children = ['from'] - if node.level: - children.append('.' * node.level) - children.extend([node.module or '', # see comment at rope.base.ast.walk - 'import']) - children.extend(self._child_nodes(node.names, ',')) - self._handle(node, children) - - def _alias(self, node): - children = [node.name] - if node.asname: - children.extend(['as', node.asname]) - self._handle(node, children) - - def _FunctionDef(self, node): - children = [] - try: - decorators = getattr(node, 'decorator_list') - except AttributeError: - decorators = getattr(node, 'decorators', None) - if decorators: - for decorator in decorators: - children.append('@') - children.append(decorator) - children.extend(['def', node.name, '(', node.args]) - children.extend([')', ':']) - children.extend(node.body) - self._handle(node, children) - - def _arguments(self, node): - children = [] - args = list(node.args) - defaults = [None] * (len(args) - len(node.defaults)) + list(node.defaults) - for index, (arg, default) in enumerate(list(zip(args, defaults))): - if index > 0: - children.append(',') - self._add_args_to_children(children, arg, default) - if node.vararg is not None: - if args: - children.append(',') - children.extend(['*', node.vararg]) - if node.kwarg is not None: - if args or node.vararg is not None: - children.append(',') - children.extend(['**', node.kwarg]) - self._handle(node, children) - - def _add_args_to_children(self, children, arg, default): - if isinstance(arg, (list, tuple)): - self._add_tuple_parameter(children, arg) - else: - children.append(arg) - if default is not None: - children.append('=') - children.append(default) - - def _add_tuple_parameter(self, children, arg): - children.append('(') - for index, token in enumerate(arg): - if index > 0: - children.append(',') - if isinstance(token, (list, tuple)): - self._add_tuple_parameter(children, token) - else: - children.append(token) - children.append(')') - - def _GeneratorExp(self, node): - children = [node.elt] - children.extend(node.generators) - self._handle(node, children, eat_parens=True) - - def _comprehension(self, node): - children = ['for', node.target, 'in', node.iter] - if node.ifs: - for if_ in node.ifs: - children.append('if') - children.append(if_) - self._handle(node, children) - - def _Global(self, node): - children = self._child_nodes(node.names, ',') - children.insert(0, 'global') - self._handle(node, children) - - def _If(self, node): - if self._is_elif(node): - children = ['elif'] - else: - children = ['if'] - children.extend([node.test, ':']) - children.extend(node.body) - if node.orelse: - if len(node.orelse) == 1 and self._is_elif(node.orelse[0]): - pass - else: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _is_elif(self, node): - if not isinstance(node, ast.If): - return False - offset = self.lines.get_line_start(node.lineno) + node.col_offset - word = self.source[offset:offset + 4] - # XXX: This is a bug; the offset does not point to the first - alt_word = self.source[offset - 5:offset - 1] - return 'elif' in (word, alt_word) - - def _IfExp(self, node): - return self._handle(node, [node.body, 'if', node.test, - 'else', node.orelse]) - - def _Import(self, node): - children = ['import'] - children.extend(self._child_nodes(node.names, ',')) - self._handle(node, children) - - def _keyword(self, node): - self._handle(node, [node.arg, '=', node.value]) - - def _Lambda(self, node): - self._handle(node, ['lambda', node.args, ':', node.body]) - - def _List(self, node): - self._handle(node, ['['] + self._child_nodes(node.elts, ',') + [']']) - - def _ListComp(self, node): - children = ['[', node.elt] - children.extend(node.generators) - children.append(']') - self._handle(node, children) - - def _Module(self, node): - self._handle(node, list(node.body), eat_spaces=True) - - def _Name(self, node): - self._handle(node, [node.id]) - - def _Pass(self, node): - self._handle(node, ['pass']) - - def _Print(self, node): - children = ['print'] - if node.dest: - children.extend(['>>', node.dest]) - if node.values: - children.append(',') - children.extend(self._child_nodes(node.values, ',')) - if not node.nl: - children.append(',') - self._handle(node, children) - - def _Raise(self, node): - children = ['raise'] - if node.cause: - children.append(node.cause) - if node.exc: - children.append(node.exc) - self._handle(node, children) - - def _Return(self, node): - children = ['return'] - if node.value: - children.append(node.value) - self._handle(node, children) - - def _Sliceobj(self, node): - children = [] - for index, slice in enumerate(node.nodes): - if index > 0: - children.append(':') - if slice: - children.append(slice) - self._handle(node, children) - - def _Index(self, node): - self._handle(node, [node.value]) - - def _Subscript(self, node): - self._handle(node, [node.value, '[', node.slice, ']']) - - def _Slice(self, node): - children = [] - if node.lower: - children.append(node.lower) - children.append(':') - if node.upper: - children.append(node.upper) - if node.step: - children.append(':') - children.append(node.step) - self._handle(node, children) - - def _TryFinally(self, node): - children = [] - if len(node.body) != 1 or not isinstance(node.body[0], ast.TryExcept): - children.extend(['try', ':']) - children.extend(node.body) - children.extend(['finally', ':']) - children.extend(node.finalbody) - self._handle(node, children) - - def _TryExcept(self, node): - children = ['try', ':'] - children.extend(node.body) - children.extend(node.handlers) - if node.orelse: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _ExceptHandler(self, node): - self._excepthandler(node) - - def _excepthandler(self, node): - children = ['except'] - if node.type: - children.append(node.type) - if node.name: - children.extend(['as', node.name]) - children.append(':') - children.extend(node.body) - self._handle(node, children) - - def _Tuple(self, node): - if node.elts: - self._handle(node, self._child_nodes(node.elts, ','), - eat_parens=True) - else: - self._handle(node, ['(', ')']) - - def _UnaryOp(self, node): - children = self._get_op(node.op) - children.append(node.operand) - self._handle(node, children) - - def _Yield(self, node): - children = ['yield'] - if node.value: - children.append(node.value) - self._handle(node, children) - - def _While(self, node): - children = ['while', node.test, ':'] - children.extend(node.body) - if node.orelse: - children.extend(['else', ':']) - children.extend(node.orelse) - self._handle(node, children) - - def _With(self, node): - children = ['with', node.context_expr] - if node.optional_vars: - children.extend(['as', node.optional_vars]) - children.append(':') - children.extend(node.body) - self._handle(node, children) - - def _child_nodes(self, nodes, separator): - children = [] - for index, child in enumerate(nodes): - children.append(child) - if index < len(nodes) - 1: - children.append(separator) - return children - - -class _Source(object): - - def __init__(self, source): - self.source = source - self.offset = 0 - - def consume(self, token): - try: - while True: - new_offset = self.source.index(token, self.offset) - if self._good_token(token, new_offset): - break - else: - self._skip_comment() - except (ValueError, TypeError): - raise MismatchedTokenError( - 'Token <%s> at %s cannot be matched' % - (token, self._get_location())) - self.offset = new_offset + len(token) - return (new_offset, self.offset) - - def consume_string(self, end=None): - if _Source._string_pattern is None: - original = codeanalyze.get_string_pattern() - pattern = r'(%s)((\s|\\\n|#[^\n]*\n)*(%s))*' % \ - (original, original) - _Source._string_pattern = re.compile(pattern) - repattern = _Source._string_pattern - return self._consume_pattern(repattern, end) - - def consume_number(self): - if _Source._number_pattern is None: - _Source._number_pattern = re.compile( - self._get_number_pattern()) - repattern = _Source._number_pattern - return self._consume_pattern(repattern) - - def consume_not_equal(self): - if _Source._not_equals_pattern is None: - _Source._not_equals_pattern = re.compile(r'<>|!=') - repattern = _Source._not_equals_pattern - return self._consume_pattern(repattern) - - def _good_token(self, token, offset, start=None): - """Checks whether consumed token is in comments""" - if start is None: - start = self.offset - try: - comment_index = self.source.rindex('#', start, offset) - except ValueError: - return True - try: - new_line_index = self.source.rindex('\n', start, offset) - except ValueError: - return False - return comment_index < new_line_index - - def _skip_comment(self): - self.offset = self.source.index('\n', self.offset + 1) - - def _get_location(self): - lines = self.source[:self.offset].split('\n') - return (len(lines), len(lines[-1])) - - def _consume_pattern(self, repattern, end=None): - while True: - if end is None: - end = len(self.source) - match = repattern.search(self.source, self.offset, end) - if self._good_token(match.group(), match.start()): - break - else: - self._skip_comment() - self.offset = match.end() - return match.start(), match.end() - - def till_token(self, token): - new_offset = self.source.index(token, self.offset) - return self[self.offset:new_offset] - - def rfind_token(self, token, start, end): - index = start - while True: - try: - index = self.source.rindex(token, start, end) - if self._good_token(token, index, start=start): - return index - else: - end = index - except ValueError: - return None - - def from_offset(self, offset): - return self[offset:self.offset] - - def find_backwards(self, pattern, offset): - return self.source.rindex(pattern, 0, offset) - - def __getitem__(self, index): - return self.source[index] - - def __getslice__(self, i, j): - return self.source[i:j] - - def _get_number_pattern(self): - # HACK: It is merely an approaximation and does the job - integer = r'(0|0x)?[\da-fA-F]+[lL]?' - return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d*)?[jJ]?' % integer - - _string_pattern = None - _number_pattern = None - _not_equals_pattern = None diff --git a/pymode/libs3/rope/refactor/rename.py b/pymode/libs3/rope/refactor/rename.py deleted file mode 100644 index f61e4c40..00000000 --- a/pymode/libs3/rope/refactor/rename.py +++ /dev/null @@ -1,216 +0,0 @@ -import warnings - -from rope.base import exceptions, pyobjects, pynames, taskhandle, evaluate, worder, codeanalyze -from rope.base.change import ChangeSet, ChangeContents, MoveResource -from rope.refactor import occurrences, sourceutils - - -class Rename(object): - """A class for performing rename refactoring - - It can rename everything: classes, functions, modules, packages, - methods, variables and keyword arguments. - - """ - - def __init__(self, project, resource, offset=None): - """If `offset` is None, the `resource` itself will be renamed""" - self.project = project - self.pycore = project.pycore - self.resource = resource - if offset is not None: - self.old_name = worder.get_name_at(self.resource, offset) - this_pymodule = self.pycore.resource_to_pyobject(self.resource) - self.old_instance, self.old_pyname = \ - evaluate.eval_location2(this_pymodule, offset) - if self.old_pyname is None: - raise exceptions.RefactoringError( - 'Rename refactoring should be performed' - ' on resolvable python identifiers.') - else: - if not resource.is_folder() and resource.name == '__init__.py': - resource = resource.parent - dummy_pymodule = self.pycore.get_string_module('') - self.old_instance = None - self.old_pyname = pynames.ImportedModule(dummy_pymodule, - resource=resource) - if resource.is_folder(): - self.old_name = resource.name - else: - self.old_name = resource.name[:-3] - - def get_old_name(self): - return self.old_name - - def get_changes(self, new_name, in_file=None, in_hierarchy=False, - unsure=None, docs=False, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes needed for this refactoring - - Parameters: - - - `in_hierarchy`: when renaming a method this keyword forces - to rename all matching methods in the hierarchy - - `docs`: when `True` rename refactoring will rename - occurrences in comments and strings where the name is - visible. Setting it will make renames faster, too. - - `unsure`: decides what to do about unsure occurrences. - If `None`, they are ignored. Otherwise `unsure` is - called with an instance of `occurrence.Occurrence` as - parameter. If it returns `True`, the occurrence is - considered to be a match. - - `resources` can be a list of `rope.base.resources.File`\s to - apply this refactoring on. If `None`, the restructuring - will be applied to all python files. - - `in_file`: this argument has been deprecated; use - `resources` instead. - - """ - if unsure in (True, False): - warnings.warn( - 'unsure parameter should be a function that returns ' - 'True or False', DeprecationWarning, stacklevel=2) - def unsure_func(value=unsure): - return value - unsure = unsure_func - if in_file is not None: - warnings.warn( - '`in_file` argument has been deprecated; use `resources` ' - 'instead. ', DeprecationWarning, stacklevel=2) - if in_file: - resources = [self.resource] - if _is_local(self.old_pyname): - resources = [self.resource] - if resources is None: - resources = self.pycore.get_python_files() - changes = ChangeSet('Renaming <%s> to <%s>' % - (self.old_name, new_name)) - finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname, unsure=unsure, - docs=docs, instance=self.old_instance, - in_hierarchy=in_hierarchy and self.is_method()) - job_set = task_handle.create_jobset('Collecting Changes', len(resources)) - for file_ in resources: - job_set.started_job(file_.path) - new_content = rename_in_module(finder, new_name, resource=file_) - if new_content is not None: - changes.add_change(ChangeContents(file_, new_content)) - job_set.finished_job() - if self._is_renaming_a_module(): - resource = self.old_pyname.get_object().get_resource() - if self._is_allowed_to_move(resources, resource): - self._rename_module(resource, new_name, changes) - return changes - - def _is_allowed_to_move(self, resources, resource): - if resource.is_folder(): - try: - return resource.get_child('__init__.py') in resources - except exceptions.ResourceNotFoundError: - return False - else: - return resource in resources - - def _is_renaming_a_module(self): - if isinstance(self.old_pyname.get_object(), pyobjects.AbstractModule): - return True - return False - - def is_method(self): - pyname = self.old_pyname - return isinstance(pyname, pynames.DefinedName) and \ - isinstance(pyname.get_object(), pyobjects.PyFunction) and \ - isinstance(pyname.get_object().parent, pyobjects.PyClass) - - def _rename_module(self, resource, new_name, changes): - if not resource.is_folder(): - new_name = new_name + '.py' - parent_path = resource.parent.path - if parent_path == '': - new_location = new_name - else: - new_location = parent_path + '/' + new_name - changes.add_change(MoveResource(resource, new_location)) - - -class ChangeOccurrences(object): - """A class for changing the occurrences of a name in a scope - - This class replaces the occurrences of a name. Note that it only - changes the scope containing the offset passed to the constructor. - What's more it does not have any side-effects. That is for - example changing occurrences of a module does not rename the - module; it merely replaces the occurrences of that module in a - scope with the given expression. This class is useful for - performing many custom refactorings. - - """ - - def __init__(self, project, resource, offset): - self.pycore = project.pycore - self.resource = resource - self.offset = offset - self.old_name = worder.get_name_at(resource, offset) - self.pymodule = self.pycore.resource_to_pyobject(self.resource) - self.old_pyname = evaluate.eval_location(self.pymodule, offset) - - def get_old_name(self): - word_finder = worder.Worder(self.resource.read()) - return word_finder.get_primary_at(self.offset) - - def _get_scope_offset(self): - lines = self.pymodule.lines - scope = self.pymodule.get_scope().\ - get_inner_scope_for_line(lines.get_line_number(self.offset)) - start = lines.get_line_start(scope.get_start()) - end = lines.get_line_end(scope.get_end()) - return start, end - - def get_changes(self, new_name, only_calls=False, reads=True, writes=True): - changes = ChangeSet('Changing <%s> occurrences to <%s>' % - (self.old_name, new_name)) - scope_start, scope_end = self._get_scope_offset() - finder = occurrences.create_finder( - self.pycore, self.old_name, self.old_pyname, - imports=False, only_calls=only_calls) - new_contents = rename_in_module( - finder, new_name, pymodule=self.pymodule, replace_primary=True, - region=(scope_start, scope_end), reads=reads, writes=writes) - if new_contents is not None: - changes.add_change(ChangeContents(self.resource, new_contents)) - return changes - - -def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None, - replace_primary=False, region=None, reads=True, writes=True): - """Returns the changed source or `None` if there is no changes""" - if resource is not None: - source_code = resource.read() - else: - source_code = pymodule.source_code - change_collector = codeanalyze.ChangeCollector(source_code) - for occurrence in occurrences_finder.find_occurrences(resource, pymodule): - if replace_primary and occurrence.is_a_fixed_primary(): - continue - if replace_primary: - start, end = occurrence.get_primary_range() - else: - start, end = occurrence.get_word_range() - if (not reads and not occurrence.is_written()) or \ - (not writes and occurrence.is_written()): - continue - if region is None or region[0] <= start < region[1]: - change_collector.add_change(start, end, new_name) - return change_collector.get_changed() - -def _is_local(pyname): - module, lineno = pyname.get_definition_location() - if lineno is None: - return False - scope = module.get_scope().get_inner_scope_for_line(lineno) - if isinstance(pyname, pynames.DefinedName) and \ - scope.get_kind() in ('Function', 'Class'): - scope = scope.parent - return scope.get_kind() == 'Function' and \ - pyname in list(scope.get_names().values()) and \ - isinstance(pyname, pynames.AssignedName) diff --git a/pymode/libs3/rope/refactor/restructure.py b/pymode/libs3/rope/refactor/restructure.py deleted file mode 100644 index 1573c2fe..00000000 --- a/pymode/libs3/rope/refactor/restructure.py +++ /dev/null @@ -1,307 +0,0 @@ -import warnings - -from rope.base import change, taskhandle, builtins, ast, codeanalyze -from rope.refactor import patchedast, similarfinder, sourceutils -from rope.refactor.importutils import module_imports - - -class Restructure(object): - """A class to perform python restructurings - - A restructuring transforms pieces of code matching `pattern` to - `goal`. In the `pattern` wildcards can appear. Wildcards match - some piece of code based on their kind and arguments that are - passed to them through `args`. - - `args` is a dictionary of wildcard names to wildcard arguments. - If the argument is a tuple, the first item of the tuple is - considered to be the name of the wildcard to use; otherwise the - "default" wildcard is used. For getting the list arguments a - wildcard supports, see the pydoc of the wildcard. (see - `rope.refactor.wildcard.DefaultWildcard` for the default - wildcard.) - - `wildcards` is the list of wildcard types that can appear in - `pattern`. See `rope.refactor.wildcards`. If a wildcard does not - specify its kind (by using a tuple in args), the wildcard named - "default" is used. So there should be a wildcard with "default" - name in `wildcards`. - - `imports` is the list of imports that changed modules should - import. Note that rope handles duplicate imports and does not add - the import if it already appears. - - Example #1:: - - pattern ${pyobject}.get_attribute(${name}) - goal ${pyobject}[${name}] - args pyobject: instance=rope.base.pyobjects.PyObject - - Example #2:: - - pattern ${name} in ${pyobject}.get_attributes() - goal ${name} in {pyobject} - args pyobject: instance=rope.base.pyobjects.PyObject - - Example #3:: - - pattern ${pycore}.create_module(${project}.root, ${name}) - goal generate.create_module(${project}, ${name}) - - imports - from rope.contrib import generate - - args - pycore: type=rope.base.pycore.PyCore - project: type=rope.base.project.Project - - Example #4:: - - pattern ${pow}(${param1}, ${param2}) - goal ${param1} ** ${param2} - args pow: name=mod.pow, exact - - Example #5:: - - pattern ${inst}.longtask(${p1}, ${p2}) - goal - ${inst}.subtask1(${p1}) - ${inst}.subtask2(${p2}) - args - inst: type=mod.A,unsure - - """ - - def __init__(self, project, pattern, goal, args=None, - imports=None, wildcards=None): - """Construct a restructuring - - See class pydoc for more info about the arguments. - - """ - self.pycore = project.pycore - self.pattern = pattern - self.goal = goal - self.args = args - if self.args is None: - self.args = {} - self.imports = imports - if self.imports is None: - self.imports = [] - self.wildcards = wildcards - self.template = similarfinder.CodeTemplate(self.goal) - - def get_changes(self, checks=None, imports=None, resources=None, - task_handle=taskhandle.NullTaskHandle()): - """Get the changes needed by this restructuring - - `resources` can be a list of `rope.base.resources.File`\s to - apply the restructuring on. If `None`, the restructuring will - be applied to all python files. - - `checks` argument has been deprecated. Use the `args` argument - of the constructor. The usage of:: - - strchecks = {'obj1.type': 'mod.A', 'obj2': 'mod.B', - 'obj3.object': 'mod.C'} - checks = restructuring.make_checks(strchecks) - - can be replaced with:: - - args = {'obj1': 'type=mod.A', 'obj2': 'name=mod.B', - 'obj3': 'object=mod.C'} - - where obj1, obj2 and obj3 are wildcard names that appear - in restructuring pattern. - - """ - if checks is not None: - warnings.warn( - 'The use of checks parameter is deprecated; ' - 'use the args parameter of the constructor instead.', - DeprecationWarning, stacklevel=2) - for name, value in checks.items(): - self.args[name] = similarfinder._pydefined_to_str(value) - if imports is not None: - warnings.warn( - 'The use of imports parameter is deprecated; ' - 'use imports parameter of the constructor, instead.', - DeprecationWarning, stacklevel=2) - self.imports = imports - changes = change.ChangeSet('Restructuring <%s> to <%s>' % - (self.pattern, self.goal)) - if resources is not None: - files = [resource for resource in resources - if self.pycore.is_python_file(resource)] - else: - files = self.pycore.get_python_files() - job_set = task_handle.create_jobset('Collecting Changes', len(files)) - for resource in files: - job_set.started_job(resource.path) - pymodule = self.pycore.resource_to_pyobject(resource) - finder = similarfinder.SimilarFinder(pymodule, - wildcards=self.wildcards) - matches = list(finder.get_matches(self.pattern, self.args)) - computer = self._compute_changes(matches, pymodule) - result = computer.get_changed() - if result is not None: - imported_source = self._add_imports(resource, result, - self.imports) - changes.add_change(change.ChangeContents(resource, - imported_source)) - job_set.finished_job() - return changes - - def _compute_changes(self, matches, pymodule): - return _ChangeComputer( - pymodule.source_code, pymodule.get_ast(), - pymodule.lines, self.template, matches) - - def _add_imports(self, resource, source, imports): - if not imports: - return source - import_infos = self._get_import_infos(resource, imports) - pymodule = self.pycore.get_string_module(source, resource) - imports = module_imports.ModuleImports(self.pycore, pymodule) - for import_info in import_infos: - imports.add_import(import_info) - return imports.get_changed_source() - - def _get_import_infos(self, resource, imports): - pymodule = self.pycore.get_string_module('\n'.join(imports), - resource) - imports = module_imports.ModuleImports(self.pycore, pymodule) - return [imports.import_info - for imports in imports.imports] - - def make_checks(self, string_checks): - """Convert str to str dicts to str to PyObject dicts - - This function is here to ease writing a UI. - - """ - checks = {} - for key, value in string_checks.items(): - is_pyname = not key.endswith('.object') and \ - not key.endswith('.type') - evaluated = self._evaluate(value, is_pyname=is_pyname) - if evaluated is not None: - checks[key] = evaluated - return checks - - def _evaluate(self, code, is_pyname=True): - attributes = code.split('.') - pyname = None - if attributes[0] in ('__builtin__', '__builtins__'): - class _BuiltinsStub(object): - def get_attribute(self, name): - return builtins.builtins[name] - pyobject = _BuiltinsStub() - else: - pyobject = self.pycore.get_module(attributes[0]) - for attribute in attributes[1:]: - pyname = pyobject[attribute] - if pyname is None: - return None - pyobject = pyname.get_object() - return pyname if is_pyname else pyobject - - -def replace(code, pattern, goal): - """used by other refactorings""" - finder = similarfinder.RawSimilarFinder(code) - matches = list(finder.get_matches(pattern)) - ast = patchedast.get_patched_ast(code) - lines = codeanalyze.SourceLinesAdapter(code) - template = similarfinder.CodeTemplate(goal) - computer = _ChangeComputer(code, ast, lines, template, matches) - result = computer.get_changed() - if result is None: - return code - return result - - -class _ChangeComputer(object): - - def __init__(self, code, ast, lines, goal, matches): - self.source = code - self.goal = goal - self.matches = matches - self.ast = ast - self.lines = lines - self.matched_asts = {} - self._nearest_roots = {} - if self._is_expression(): - for match in self.matches: - self.matched_asts[match.ast] = match - - def get_changed(self): - if self._is_expression(): - result = self._get_node_text(self.ast) - if result == self.source: - return None - return result - else: - collector = codeanalyze.ChangeCollector(self.source) - last_end = -1 - for match in self.matches: - start, end = match.get_region() - if start < last_end: - if not self._is_expression(): - continue - last_end = end - replacement = self._get_matched_text(match) - collector.add_change(start, end, replacement) - return collector.get_changed() - - def _is_expression(self): - return self.matches and isinstance(self.matches[0], - similarfinder.ExpressionMatch) - - def _get_matched_text(self, match): - mapping = {} - for name in self.goal.get_names(): - node = match.get_ast(name) - if node is None: - raise similarfinder.BadNameInCheckError( - 'Unknown name <%s>' % name) - force = self._is_expression() and match.ast == node - mapping[name] = self._get_node_text(node, force) - unindented = self.goal.substitute(mapping) - return self._auto_indent(match.get_region()[0], unindented) - - def _get_node_text(self, node, force=False): - if not force and node in self.matched_asts: - return self._get_matched_text(self.matched_asts[node]) - start, end = patchedast.node_region(node) - main_text = self.source[start:end] - collector = codeanalyze.ChangeCollector(main_text) - for node in self._get_nearest_roots(node): - sub_start, sub_end = patchedast.node_region(node) - collector.add_change(sub_start - start, sub_end - start, - self._get_node_text(node)) - result = collector.get_changed() - if result is None: - return main_text - return result - - def _auto_indent(self, offset, text): - lineno = self.lines.get_line_number(offset) - indents = sourceutils.get_indents(self.lines, lineno) - result = [] - for index, line in enumerate(text.splitlines(True)): - if index != 0 and line.strip(): - result.append(' ' * indents) - result.append(line) - return ''.join(result) - - def _get_nearest_roots(self, node): - if node not in self._nearest_roots: - result = [] - for child in ast.get_child_nodes(node): - if child in self.matched_asts: - result.append(child) - else: - result.extend(self._get_nearest_roots(child)) - self._nearest_roots[node] = result - return self._nearest_roots[node] diff --git a/pymode/libs3/rope/refactor/similarfinder.py b/pymode/libs3/rope/refactor/similarfinder.py deleted file mode 100644 index 70ae7e15..00000000 --- a/pymode/libs3/rope/refactor/similarfinder.py +++ /dev/null @@ -1,362 +0,0 @@ -"""This module can be used for finding similar code""" -import re - -import rope.refactor.wildcards -from rope.base import codeanalyze, evaluate, exceptions, ast, builtins -from rope.refactor import (patchedast, sourceutils, occurrences, - wildcards, importutils) - - -class BadNameInCheckError(exceptions.RefactoringError): - pass - - -class SimilarFinder(object): - """`SimilarFinder` can be used to find similar pieces of code - - See the notes in the `rope.refactor.restructure` module for more - info. - - """ - - def __init__(self, pymodule, wildcards=None): - """Construct a SimilarFinder""" - self.source = pymodule.source_code - self.raw_finder = RawSimilarFinder( - pymodule.source_code, pymodule.get_ast(), self._does_match) - self.pymodule = pymodule - if wildcards is None: - self.wildcards = {} - for wildcard in [rope.refactor.wildcards. - DefaultWildcard(pymodule.pycore.project)]: - self.wildcards[wildcard.get_name()] = wildcard - else: - self.wildcards = wildcards - - def get_matches(self, code, args={}, start=0, end=None): - self.args = args - if end is None: - end = len(self.source) - skip_region = None - if 'skip' in args.get('', {}): - resource, region = args['']['skip'] - if resource == self.pymodule.get_resource(): - skip_region = region - return self.raw_finder.get_matches(code, start=start, end=end, - skip=skip_region) - - def get_match_regions(self, *args, **kwds): - for match in self.get_matches(*args, **kwds): - yield match.get_region() - - def _does_match(self, node, name): - arg = self.args.get(name, '') - kind = 'default' - if isinstance(arg, (tuple, list)): - kind = arg[0] - arg = arg[1] - suspect = wildcards.Suspect(self.pymodule, node, name) - return self.wildcards[kind].matches(suspect, arg) - - -class RawSimilarFinder(object): - """A class for finding similar expressions and statements""" - - def __init__(self, source, node=None, does_match=None): - if node is None: - node = ast.parse(source) - if does_match is None: - self.does_match = self._simple_does_match - else: - self.does_match = does_match - self._init_using_ast(node, source) - - def _simple_does_match(self, node, name): - return isinstance(node, (ast.expr, ast.Name)) - - def _init_using_ast(self, node, source): - self.source = source - self._matched_asts = {} - if not hasattr(node, 'region'): - patchedast.patch_ast(node, source) - self.ast = node - - def get_matches(self, code, start=0, end=None, skip=None): - """Search for `code` in source and return a list of `Match`\es - - `code` can contain wildcards. ``${name}`` matches normal - names and ``${?name} can match any expression. You can use - `Match.get_ast()` for getting the node that has matched a - given pattern. - - """ - if end is None: - end = len(self.source) - for match in self._get_matched_asts(code): - match_start, match_end = match.get_region() - if start <= match_start and match_end <= end: - if skip is not None and (skip[0] < match_end and - skip[1] > match_start): - continue - yield match - - def _get_matched_asts(self, code): - if code not in self._matched_asts: - wanted = self._create_pattern(code) - matches = _ASTMatcher(self.ast, wanted, - self.does_match).find_matches() - self._matched_asts[code] = matches - return self._matched_asts[code] - - def _create_pattern(self, expression): - expression = self._replace_wildcards(expression) - node = ast.parse(expression) - # Getting Module.Stmt.nodes - nodes = node.body - if len(nodes) == 1 and isinstance(nodes[0], ast.Expr): - # Getting Discard.expr - wanted = nodes[0].value - else: - wanted = nodes - return wanted - - def _replace_wildcards(self, expression): - ropevar = _RopeVariable() - template = CodeTemplate(expression) - mapping = {} - for name in template.get_names(): - mapping[name] = ropevar.get_var(name) - return template.substitute(mapping) - - -class _ASTMatcher(object): - - def __init__(self, body, pattern, does_match): - """Searches the given pattern in the body AST. - - body is an AST node and pattern can be either an AST node or - a list of ASTs nodes - """ - self.body = body - self.pattern = pattern - self.matches = None - self.ropevar = _RopeVariable() - self.matches_callback = does_match - - def find_matches(self): - if self.matches is None: - self.matches = [] - ast.call_for_nodes(self.body, self._check_node, recursive=True) - return self.matches - - def _check_node(self, node): - if isinstance(self.pattern, list): - self._check_statements(node) - else: - self._check_expression(node) - - def _check_expression(self, node): - mapping = {} - if self._match_nodes(self.pattern, node, mapping): - self.matches.append(ExpressionMatch(node, mapping)) - - def _check_statements(self, node): - for child in ast.get_children(node): - if isinstance(child, (list, tuple)): - self.__check_stmt_list(child) - - def __check_stmt_list(self, nodes): - for index in range(len(nodes)): - if len(nodes) - index >= len(self.pattern): - current_stmts = nodes[index:index + len(self.pattern)] - mapping = {} - if self._match_stmts(current_stmts, mapping): - self.matches.append(StatementMatch(current_stmts, mapping)) - - def _match_nodes(self, expected, node, mapping): - if isinstance(expected, ast.Name): - if self.ropevar.is_var(expected.id): - return self._match_wildcard(expected, node, mapping) - if not isinstance(expected, ast.AST): - return expected == node - if expected.__class__ != node.__class__: - return False - - children1 = self._get_children(expected) - children2 = self._get_children(node) - if len(children1) != len(children2): - return False - for child1, child2 in zip(children1, children2): - if isinstance(child1, ast.AST): - if not self._match_nodes(child1, child2, mapping): - return False - elif isinstance(child1, (list, tuple)): - if not isinstance(child2, (list, tuple)) or \ - len(child1) != len(child2): - return False - for c1, c2 in zip(child1, child2): - if not self._match_nodes(c1, c2, mapping): - return False - else: - if child1 != child2: - return False - return True - - def _get_children(self, node): - """Return not `ast.expr_context` children of `node`""" - children = ast.get_children(node) - return [child for child in children - if not isinstance(child, ast.expr_context)] - - def _match_stmts(self, current_stmts, mapping): - if len(current_stmts) != len(self.pattern): - return False - for stmt, expected in zip(current_stmts, self.pattern): - if not self._match_nodes(expected, stmt, mapping): - return False - return True - - def _match_wildcard(self, node1, node2, mapping): - name = self.ropevar.get_base(node1.id) - if name not in mapping: - if self.matches_callback(node2, name): - mapping[name] = node2 - return True - return False - else: - return self._match_nodes(mapping[name], node2, {}) - - -class Match(object): - - def __init__(self, mapping): - self.mapping = mapping - - def get_region(self): - """Returns match region""" - - def get_ast(self, name): - """Return the ast node that has matched rope variables""" - return self.mapping.get(name, None) - - -class ExpressionMatch(Match): - - def __init__(self, ast, mapping): - super(ExpressionMatch, self).__init__(mapping) - self.ast = ast - - def get_region(self): - return self.ast.region - - -class StatementMatch(Match): - - def __init__(self, ast_list, mapping): - super(StatementMatch, self).__init__(mapping) - self.ast_list = ast_list - - def get_region(self): - return self.ast_list[0].region[0], self.ast_list[-1].region[1] - - -class CodeTemplate(object): - - def __init__(self, template): - self.template = template - self._find_names() - - def _find_names(self): - self.names = {} - for match in CodeTemplate._get_pattern().finditer(self.template): - if 'name' in match.groupdict() and \ - match.group('name') is not None: - start, end = match.span('name') - name = self.template[start + 2:end - 1] - if name not in self.names: - self.names[name] = [] - self.names[name].append((start, end)) - - def get_names(self): - return list(self.names.keys()) - - def substitute(self, mapping): - collector = codeanalyze.ChangeCollector(self.template) - for name, occurrences in self.names.items(): - for region in occurrences: - collector.add_change(region[0], region[1], mapping[name]) - result = collector.get_changed() - if result is None: - return self.template - return result - - _match_pattern = None - - @classmethod - def _get_pattern(cls): - if cls._match_pattern is None: - pattern = codeanalyze.get_comment_pattern() + '|' + \ - codeanalyze.get_string_pattern() + '|' + \ - r'(?P\$\{[^\s\$\}]*\})' - cls._match_pattern = re.compile(pattern) - return cls._match_pattern - - -class _RopeVariable(object): - """Transform and identify rope inserted wildcards""" - - _normal_prefix = '__rope__variable_normal_' - _any_prefix = '__rope__variable_any_' - - def get_var(self, name): - if name.startswith('?'): - return self._get_any(name) - else: - return self._get_normal(name) - - def is_var(self, name): - return self._is_normal(name) or self._is_var(name) - - def get_base(self, name): - if self._is_normal(name): - return name[len(self._normal_prefix):] - if self._is_var(name): - return '?' + name[len(self._any_prefix):] - - def _get_normal(self, name): - return self._normal_prefix + name - - def _get_any(self, name): - return self._any_prefix + name[1:] - - def _is_normal(self, name): - return name.startswith(self._normal_prefix) - - def _is_var(self, name): - return name.startswith(self._any_prefix) - - -def make_pattern(code, variables): - variables = set(variables) - collector = codeanalyze.ChangeCollector(code) - def does_match(node, name): - return isinstance(node, ast.Name) and node.id == name - finder = RawSimilarFinder(code, does_match=does_match) - for variable in variables: - for match in finder.get_matches('${%s}' % variable): - start, end = match.get_region() - collector.add_change(start, end, '${%s}' % variable) - result = collector.get_changed() - return result if result is not None else code - - -def _pydefined_to_str(pydefined): - address = [] - if isinstance(pydefined, (builtins.BuiltinClass, builtins.BuiltinFunction)): - return '__builtins__.' + pydefined.get_name() - else: - while pydefined.parent is not None: - address.insert(0, pydefined.get_name()) - pydefined = pydefined.parent - module_name = pydefined.pycore.modname(pydefined.resource) - return '.'.join(module_name.split('.') + address) diff --git a/pymode/libs3/rope/refactor/sourceutils.py b/pymode/libs3/rope/refactor/sourceutils.py deleted file mode 100644 index f64213db..00000000 --- a/pymode/libs3/rope/refactor/sourceutils.py +++ /dev/null @@ -1,92 +0,0 @@ -from rope.base import ast, codeanalyze - - -def get_indents(lines, lineno): - return codeanalyze.count_line_indents(lines.get_line(lineno)) - - -def find_minimum_indents(source_code): - result = 80 - lines = source_code.split('\n') - for line in lines: - if line.strip() == '': - continue - result = min(result, codeanalyze.count_line_indents(line)) - return result - - -def indent_lines(source_code, amount): - if amount == 0: - return source_code - lines = source_code.splitlines(True) - result = [] - for l in lines: - if l.strip() == '': - result.append('\n') - continue - if amount < 0: - indents = codeanalyze.count_line_indents(l) - result.append(max(0, indents + amount) * ' ' + l.lstrip()) - else: - result.append(' ' * amount + l) - return ''.join(result) - - -def fix_indentation(code, new_indents): - """Change the indentation of `code` to `new_indents`""" - min_indents = find_minimum_indents(code) - return indent_lines(code, new_indents - min_indents) - - -def add_methods(pymodule, class_scope, methods_sources): - source_code = pymodule.source_code - lines = pymodule.lines - insertion_line = class_scope.get_end() - if class_scope.get_scopes(): - insertion_line = class_scope.get_scopes()[-1].get_end() - insertion_offset = lines.get_line_end(insertion_line) - methods = '\n\n' + '\n\n'.join(methods_sources) - indented_methods = fix_indentation( - methods, get_indents(lines, class_scope.get_start()) + - get_indent(pymodule.pycore)) - result = [] - result.append(source_code[:insertion_offset]) - result.append(indented_methods) - result.append(source_code[insertion_offset:]) - return ''.join(result) - - -def get_body(pyfunction): - """Return unindented function body""" - scope = pyfunction.get_scope() - pymodule = pyfunction.get_module() - start, end = get_body_region(pyfunction) - return fix_indentation(pymodule.source_code[start:end], 0) - - -def get_body_region(defined): - """Return the start and end offsets of function body""" - scope = defined.get_scope() - pymodule = defined.get_module() - lines = pymodule.lines - node = defined.get_ast() - start_line = node.lineno - if defined.get_doc() is None: - start_line = node.body[0].lineno - elif len(node.body) > 1: - start_line = node.body[1].lineno - start = lines.get_line_start(start_line) - scope_start = pymodule.logical_lines.logical_line_in(scope.start) - if scope_start[1] >= start_line: - # a one-liner! - # XXX: what if colon appears in a string - start = pymodule.source_code.index(':', start) + 1 - while pymodule.source_code[start].isspace(): - start += 1 - end = min(lines.get_line_end(scope.end) + 1, len(pymodule.source_code)) - return start, end - - -def get_indent(pycore): - project = pycore.project - return project.prefs.get('indent_size', 4) diff --git a/pymode/libs3/rope/refactor/suites.py b/pymode/libs3/rope/refactor/suites.py deleted file mode 100644 index d955c819..00000000 --- a/pymode/libs3/rope/refactor/suites.py +++ /dev/null @@ -1,142 +0,0 @@ -from rope.base import ast - - -def find_visible(node, lines): - """Return the line which is visible from all `lines`""" - root = ast_suite_tree(node) - return find_visible_for_suite(root, lines) - - -def find_visible_for_suite(root, lines): - if len(lines) == 1: - return lines[0] - line1 = lines[0] - line2 = find_visible_for_suite(root, lines[1:]) - suite1 = root.find_suite(line1) - suite2 = root.find_suite(line2) - def valid(suite): - return suite is not None and not suite.ignored - if valid(suite1) and not valid(suite2): - return line1 - if not valid(suite1) and valid(suite2): - return line2 - if not valid(suite1) and not valid(suite2): - return None - while suite1 != suite2 and suite1.parent != suite2.parent: - if suite1._get_level() < suite2._get_level(): - line2 = suite2.get_start() - suite2 = suite2.parent - elif suite1._get_level() > suite2._get_level(): - line1 = suite1.get_start() - suite1 = suite1.parent - else: - line1 = suite1.get_start() - line2 = suite2.get_start() - suite1 = suite1.parent - suite2 = suite2.parent - if suite1 == suite2: - return min(line1, line2) - return min(suite1.get_start(), suite2.get_start()) - - -def ast_suite_tree(node): - if hasattr(node, 'lineno'): - lineno = node.lineno - else: - lineno = 1 - return Suite(node.body, lineno) - - -class Suite(object): - - def __init__(self, child_nodes, lineno, parent=None, ignored=False): - self.parent = parent - self.lineno = lineno - self.child_nodes = child_nodes - self._children = None - self.ignored = ignored - - def get_start(self): - if self.parent is None: - if self.child_nodes: - return self.local_start() - else: - return 1 - return self.lineno - - def get_children(self): - if self._children is None: - walker = _SuiteWalker(self) - for child in self.child_nodes: - ast.walk(child, walker) - self._children = walker.suites - return self._children - - def local_start(self): - return self.child_nodes[0].lineno - - def local_end(self): - end = self.child_nodes[-1].lineno - if self.get_children(): - end = max(end, self.get_children()[-1].local_end()) - return end - - def find_suite(self, line): - if line is None: - return None - for child in self.get_children(): - if child.local_start() <= line <= child.local_end(): - return child.find_suite(line) - return self - - def _get_level(self): - if self.parent is None: - return 0 - return self.parent._get_level() + 1 - - -class _SuiteWalker(object): - - def __init__(self, suite): - self.suite = suite - self.suites = [] - - def _If(self, node): - self._add_if_like_node(node) - - def _For(self, node): - self._add_if_like_node(node) - - def _While(self, node): - self._add_if_like_node(node) - - def _With(self, node): - self.suites.append(Suite(node.body, node.lineno, self.suite)) - - def _TryFinally(self, node): - if len(node.finalbody) == 1 and \ - isinstance(node.body[0], ast.TryExcept): - self._TryExcept(node.body[0]) - else: - self.suites.append(Suite(node.body, node.lineno, self.suite)) - self.suites.append(Suite(node.finalbody, node.lineno, self.suite)) - - def _TryExcept(self, node): - self.suites.append(Suite(node.body, node.lineno, self.suite)) - for handler in node.handlers: - self.suites.append(Suite(handler.body, node.lineno, self.suite)) - if node.orelse: - self.suites.append(Suite(node.orelse, node.lineno, self.suite)) - - def _add_if_like_node(self, node): - self.suites.append(Suite(node.body, node.lineno, self.suite)) - if node.orelse: - self.suites.append(Suite(node.orelse, node.lineno, self.suite)) - - def _FunctionDef(self, node): - self.suites.append(Suite(node.body, node.lineno, - self.suite, ignored=True)) - - def _ClassDef(self, node): - self.suites.append(Suite(node.body, node.lineno, - self.suite, ignored=True)) diff --git a/pymode/libs3/rope/refactor/topackage.py b/pymode/libs3/rope/refactor/topackage.py deleted file mode 100644 index b7113979..00000000 --- a/pymode/libs3/rope/refactor/topackage.py +++ /dev/null @@ -1,32 +0,0 @@ -import rope.refactor.importutils -from rope.base.change import ChangeSet, ChangeContents, MoveResource, CreateFolder - - -class ModuleToPackage(object): - - def __init__(self, project, resource): - self.project = project - self.pycore = project.pycore - self.resource = resource - - def get_changes(self): - changes = ChangeSet('Transform <%s> module to package' % - self.resource.path) - new_content = self._transform_relatives_to_absolute(self.resource) - if new_content is not None: - changes.add_change(ChangeContents(self.resource, new_content)) - parent = self.resource.parent - name = self.resource.name[:-3] - changes.add_change(CreateFolder(parent, name)) - parent_path = parent.path + '/' - if not parent.path: - parent_path = '' - new_path = parent_path + '%s/__init__.py' % name - if self.resource.project == self.project: - changes.add_change(MoveResource(self.resource, new_path)) - return changes - - def _transform_relatives_to_absolute(self, resource): - pymodule = self.pycore.resource_to_pyobject(resource) - import_tools = rope.refactor.importutils.ImportTools(self.pycore) - return import_tools.relatives_to_absolutes(pymodule) diff --git a/pymode/libs3/rope/refactor/usefunction.py b/pymode/libs3/rope/refactor/usefunction.py deleted file mode 100644 index b0621525..00000000 --- a/pymode/libs3/rope/refactor/usefunction.py +++ /dev/null @@ -1,171 +0,0 @@ -from rope.base import (change, taskhandle, evaluate, - exceptions, pyobjects, pynames, ast) -from rope.refactor import restructure, sourceutils, similarfinder, importutils - - -class UseFunction(object): - """Try to use a function wherever possible""" - - def __init__(self, project, resource, offset): - self.project = project - self.offset = offset - this_pymodule = project.pycore.resource_to_pyobject(resource) - pyname = evaluate.eval_location(this_pymodule, offset) - if pyname is None: - raise exceptions.RefactoringError('Unresolvable name selected') - self.pyfunction = pyname.get_object() - if not isinstance(self.pyfunction, pyobjects.PyFunction) or \ - not isinstance(self.pyfunction.parent, pyobjects.PyModule): - raise exceptions.RefactoringError( - 'Use function works for global functions, only.') - self.resource = self.pyfunction.get_module().get_resource() - self._check_returns() - - def _check_returns(self): - node = self.pyfunction.get_ast() - if _yield_count(node): - raise exceptions.RefactoringError('Use function should not ' - 'be used on generators.') - returns = _return_count(node) - if returns > 1: - raise exceptions.RefactoringError('usefunction: Function has more ' - 'than one return statement.') - if returns == 1 and not _returns_last(node): - raise exceptions.RefactoringError('usefunction: return should ' - 'be the last statement.') - - def get_changes(self, resources=None, - task_handle=taskhandle.NullTaskHandle()): - if resources is None: - resources = self.project.pycore.get_python_files() - changes = change.ChangeSet('Using function <%s>' % - self.pyfunction.get_name()) - if self.resource in resources: - newresources = list(resources) - newresources.remove(self.resource) - for c in self._restructure(newresources, task_handle).changes: - changes.add_change(c) - if self.resource in resources: - for c in self._restructure([self.resource], task_handle, - others=False).changes: - changes.add_change(c) - return changes - - def get_function_name(self): - return self.pyfunction.get_name() - - def _restructure(self, resources, task_handle, others=True): - body = self._get_body() - pattern = self._make_pattern() - goal = self._make_goal(import_=others) - imports = None - if others: - imports = ['import %s' % self._module_name()] - - body_region = sourceutils.get_body_region(self.pyfunction) - args_value = {'skip': (self.resource, body_region)} - args = {'': args_value} - - restructuring = restructure.Restructure( - self.project, pattern, goal, args=args, imports=imports) - return restructuring.get_changes(resources=resources, - task_handle=task_handle) - - def _find_temps(self): - return find_temps(self.project, self._get_body()) - - def _module_name(self): - return self.project.pycore.modname(self.resource) - - def _make_pattern(self): - params = self.pyfunction.get_param_names() - body = self._get_body() - body = restructure.replace(body, 'return', 'pass') - wildcards = list(params) - wildcards.extend(self._find_temps()) - if self._does_return(): - if self._is_expression(): - replacement = '${%s}' % self._rope_returned - else: - replacement = '%s = ${%s}' % (self._rope_result, - self._rope_returned) - body = restructure.replace( - body, 'return ${%s}' % self._rope_returned, - replacement) - wildcards.append(self._rope_result) - return similarfinder.make_pattern(body, wildcards) - - def _get_body(self): - return sourceutils.get_body(self.pyfunction) - - def _make_goal(self, import_=False): - params = self.pyfunction.get_param_names() - function_name = self.pyfunction.get_name() - if import_: - function_name = self._module_name() + '.' + function_name - goal = '%s(%s)' % (function_name, - ', ' .join(('${%s}' % p) for p in params)) - if self._does_return() and not self._is_expression(): - goal = '${%s} = %s' % (self._rope_result, goal) - return goal - - def _does_return(self): - body = self._get_body() - removed_return = restructure.replace(body, 'return ${result}', '') - return removed_return != body - - def _is_expression(self): - return len(self.pyfunction.get_ast().body) == 1 - - _rope_result = '_rope__result' - _rope_returned = '_rope__returned' - - -def find_temps(project, code): - code = 'def f():\n' + sourceutils.indent_lines(code, 4) - pymodule = project.pycore.get_string_module(code) - result = [] - function_scope = pymodule.get_scope().get_scopes()[0] - for name, pyname in function_scope.get_names().items(): - if isinstance(pyname, pynames.AssignedName): - result.append(name) - return result - - -def _returns_last(node): - return node.body and isinstance(node.body[-1], ast.Return) - -def _yield_count(node): - visitor = _ReturnOrYieldFinder() - visitor.start_walking(node) - return visitor.yields - -def _return_count(node): - visitor = _ReturnOrYieldFinder() - visitor.start_walking(node) - return visitor.returns - -class _ReturnOrYieldFinder(object): - - def __init__(self): - self.returns = 0 - self.yields = 0 - - def _Return(self, node): - self.returns += 1 - - def _Yield(self, node): - self.yields += 1 - - def _FunctionDef(self, node): - pass - - def _ClassDef(self, node): - pass - - def start_walking(self, node): - nodes = [node] - if isinstance(node, ast.FunctionDef): - nodes = ast.get_child_nodes(node) - for child in nodes: - ast.walk(child, self) diff --git a/pymode/libs3/rope/refactor/wildcards.py b/pymode/libs3/rope/refactor/wildcards.py deleted file mode 100644 index 6c487a2a..00000000 --- a/pymode/libs3/rope/refactor/wildcards.py +++ /dev/null @@ -1,176 +0,0 @@ -from rope.base import ast, evaluate, builtins, pyobjects -from rope.refactor import patchedast, occurrences - - -class Wildcard(object): - - def get_name(self): - """Return the name of this wildcard""" - - def matches(self, suspect, arg): - """Return `True` if `suspect` matches this wildcard""" - - -class Suspect(object): - - def __init__(self, pymodule, node, name): - self.name = name - self.pymodule = pymodule - self.node = node - - -class DefaultWildcard(object): - """The default restructuring wildcard - - The argument passed to this wildcard is in the - ``key1=value1,key2=value2,...`` format. Possible keys are: - - * name - for checking the reference - * type - for checking the type - * object - for checking the object - * instance - for checking types but similar to builtin isinstance - * exact - matching only occurrences with the same name as the wildcard - * unsure - matching unsure occurrences - - """ - - def __init__(self, project): - self.project = project - - def get_name(self): - return 'default' - - def matches(self, suspect, arg=''): - args = parse_arg(arg) - - if not self._check_exact(args, suspect): - return False - if not self._check_object(args, suspect): - return False - return True - - def _check_object(self, args, suspect): - kind = None - expected = None - unsure = args.get('unsure', False) - for check in ['name', 'object', 'type', 'instance']: - if check in args: - kind = check - expected = args[check] - if expected is not None: - checker = _CheckObject(self.project, expected, - kind, unsure=unsure) - return checker(suspect.pymodule, suspect.node) - return True - - def _check_exact(self, args, suspect): - node = suspect.node - if args.get('exact'): - if not isinstance(node, ast.Name) or not node.id == suspect.name: - return False - else: - if not isinstance(node, ast.expr): - return False - return True - - -def parse_arg(arg): - if isinstance(arg, dict): - return arg - result = {} - tokens = arg.split(',') - for token in tokens: - if '=' in token: - parts = token.split('=', 1) - result[parts[0].strip()] = parts[1].strip() - else: - result[token.strip()] = True - return result - - -class _CheckObject(object): - - def __init__(self, project, expected, kind='object', unsure=False): - self.project = project - self.kind = kind - self.unsure = unsure - self.expected = self._evaluate(expected) - - def __call__(self, pymodule, node): - pyname = self._evaluate_node(pymodule, node) - if pyname is None or self.expected is None: - return self.unsure - if self._unsure_pyname(pyname, unbound=self.kind=='name'): - return True - if self.kind == 'name': - return self._same_pyname(self.expected, pyname) - else: - pyobject = pyname.get_object() - if self.kind == 'object': - objects = [pyobject] - if self.kind == 'type': - objects = [pyobject.get_type()] - if self.kind == 'instance': - objects = [pyobject] - objects.extend(self._get_super_classes(pyobject)) - objects.extend(self._get_super_classes(pyobject.get_type())) - for pyobject in objects: - if self._same_pyobject(self.expected.get_object(), pyobject): - return True - return False - - def _get_super_classes(self, pyobject): - result = [] - if isinstance(pyobject, pyobjects.AbstractClass): - for superclass in pyobject.get_superclasses(): - result.append(superclass) - result.extend(self._get_super_classes(superclass)) - return result - - def _same_pyobject(self, expected, pyobject): - return expected == pyobject - - def _same_pyname(self, expected, pyname): - return occurrences.same_pyname(expected, pyname) - - def _unsure_pyname(self, pyname, unbound=True): - return self.unsure and occurrences.unsure_pyname(pyname, unbound) - - def _split_name(self, name): - parts = name.split('.') - expression, kind = parts[0], parts[-1] - if len(parts) == 1: - kind = 'name' - return expression, kind - - def _evaluate_node(self, pymodule, node): - scope = pymodule.get_scope().get_inner_scope_for_line(node.lineno) - expression = node - if isinstance(expression, ast.Name) and \ - isinstance(expression.ctx, ast.Store): - start, end = patchedast.node_region(expression) - text = pymodule.source_code[start:end] - return evaluate.eval_str(scope, text) - else: - return evaluate.eval_node(scope, expression) - - def _evaluate(self, code): - attributes = code.split('.') - pyname = None - if attributes[0] in ('__builtin__', '__builtins__'): - class _BuiltinsStub(object): - def get_attribute(self, name): - return builtins.builtins[name] - def __getitem__(self, name): - return builtins.builtins[name] - def __contains__(self, name): - return name in builtins.builtins - pyobject = _BuiltinsStub() - else: - pyobject = self.project.pycore.get_module(attributes[0]) - for attribute in attributes[1:]: - pyname = pyobject[attribute] - if pyname is None: - return None - pyobject = pyname.get_object() - return pyname diff --git a/pymode/rope.py b/pymode/rope.py index df02b9c8..3a20c267 100644 --- a/pymode/rope.py +++ b/pymode/rope.py @@ -1,4 +1,5 @@ -""" Rope support in pymode. """ +"""Integration with Rope library.""" + from __future__ import absolute_import, print_function import os.path @@ -6,7 +7,7 @@ import site import sys -from rope.base import project, libutils, exceptions, change, worder # noqa +from rope.base import project, libutils, exceptions, change, worder, pycore from rope.base.fscommands import FileSystemCommands # noqa from rope.base.taskhandle import TaskHandle # noqa from rope.contrib import autoimport as rope_autoimport, codeassist, findit, generate # noqa @@ -16,7 +17,7 @@ def look_ropeproject(path): - """ Search for ropeproject in current and parent dirs. + """Search for ropeproject in current and parent dirs. :return str|None: A finded path @@ -73,7 +74,7 @@ def complete(dot=False): cline = env.current.line[:col] env.debug('dot completion', cline) - if FROM_RE.match(cline) or cline.endswith('..') or cline.endswith('\.'): + if FROM_RE.match(cline) or cline.endswith('..') or cline.endswith('\.'): # noqa return env.stop("") proposals = get_proporsals(source, offset, dot=dot) @@ -105,7 +106,7 @@ def get_proporsals(source, offset, base='', dot=False): :return str: """ - with RopeContext() as ctx: + with RopeContext() as ctx: # noqa try: proposals = codeassist.code_assist( @@ -346,7 +347,10 @@ class RopeContext(object): """ A context manager to have a rope project context. """ - def __init__(self, path, project_path): + projects = {} + resource = {} + + def __init__(self, path=None, project_path=None): """ Init Rope context. """ self.path = path @@ -913,3 +917,19 @@ def _insert_import(name, module, ctx): progress = ProgressHandler('Apply changes ...') ctx.project.do(changes, task_handle=progress.handle) reload_changes(changes) + + +# Monkey patch Rope +def find_source_folders(self, folder): + """Look only python files an packages.""" + for resource in folder.get_folders(): + if self._is_package(resource): # noqa + return [folder] + + for resource in folder.get_files(): + if resource.name.endswith('.py'): + return [folder] + + return [] + +pycore.PyCore._find_source_folders = find_source_folders # noqa diff --git a/pymode/utils.py b/pymode/utils.py index eb947b88..1ce2e343 100644 --- a/pymode/utils.py +++ b/pymode/utils.py @@ -6,7 +6,7 @@ from contextlib import contextmanager import vim # noqa -from ._compat import StringIO, PY2 +from ._compat import StringIO DEBUG = int(vim.eval('g:pymode_debug')) @@ -37,4 +37,3 @@ def patch_paths(): Load required modules from the plugin's sources. """ sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs')) - sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs2' if PY2 else 'libs3')) From 55b878d1bcc5990aa2d33123fd1db3fdeebb212a Mon Sep 17 00:00:00 2001 From: Bryce Guinta Date: Thu, 22 Dec 2016 05:41:19 -0700 Subject: [PATCH 012/246] Add a 'frequent problems' section into README.rst to avoid non-issue issues being opened. --- README.rst | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/README.rst b/README.rst index a47d460e..867029d5 100644 --- a/README.rst +++ b/README.rst @@ -163,6 +163,22 @@ You can override the default key bindings by redefining them in your `.vimrc`, f let g:pymode_doc_bind = "" +Frequent Problems +================= + +Read this section before opening an issue on the tracker. + +Python 3 Syntax +--------------- + +By default python-mode uses python 2 syntax checking. To enable python 3 +syntax checking (e.g. for async) add:: + + let g:pymode_python = 'python3' + +To your vimrc or exrc file + + Documentation ============= From 9155371c0fbfcf68e4e83628b03794cdb64e1a56 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 23 Dec 2016 15:36:42 +0300 Subject: [PATCH 013/246] Setup bumpversion. --- .bumpversion.cfg | 10 ++++++++++ Makefile | 25 +++++++++++++++++++++++++ plugin/pymode.vim | 1 - 3 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 .bumpversion.cfg diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 00000000..c6665ff2 --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,10 @@ +[bumpversion] +commit = True +current_version = 0.8.1 +files = plugin/pymode.vim +tag = True +tag_name = {new_version} + +[bumpversion:file:.envfile] +search = Version: {current_version} +replace = Version: {new_version} diff --git a/Makefile b/Makefile index e27a8785..7706091b 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,31 @@ clean: rm -rf $(CURDIR)/build rm -rf *.deb +VERSION?=minor +# target: release - Bump version +release: + git fetch origin + git checkout master + git rebase + git merge develop + bumpversion $(VERSION) + git checkout develop + git rebase + git merge master + git push origin develop master + git push --tags + +.PHONY: minor +minor: release + +.PHONY: patch +patch: + make release VERSION=patch + +.PHONY: major +major: + make release VERSION=major + # Temporary disable rope tests on Travis .PHONY: travis travis: diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 26541ae2..788b216d 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -314,4 +314,3 @@ endif command! PymodeVersion echomsg "Pymode version: " . g:pymode_version . " interpreter: " . g:pymode_python . " lint: " . g:pymode_lint . " rope: " . g:pymode_rope augroup pymode - From 17b097d37faa9ecfdf5f152a34f0d2a86fed14f3 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 23 Dec 2016 15:40:16 +0300 Subject: [PATCH 014/246] Setup bumpversion. --- .bumpversion.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index c6665ff2..7200f866 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -5,6 +5,6 @@ files = plugin/pymode.vim tag = True tag_name = {new_version} -[bumpversion:file:.envfile] +[bumpversion:file:docs/pymode.txt] search = Version: {current_version} replace = Version: {new_version} From b5c9c3fc8578f774b47087e84827247baf995ee4 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 23 Dec 2016 15:43:16 +0300 Subject: [PATCH 015/246] Setup bumpversion. --- .bumpversion.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 7200f866..e1157721 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -5,6 +5,6 @@ files = plugin/pymode.vim tag = True tag_name = {new_version} -[bumpversion:file:docs/pymode.txt] +[bumpversion:file:doc/pymode.txt] search = Version: {current_version} replace = Version: {new_version} From 2f850dcf506af85be22365aebda6ff737e5f004e Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 23 Dec 2016 15:43:54 +0300 Subject: [PATCH 016/246] =?UTF-8?q?Bump=20version:=200.8.1=20=E2=86=92=200?= =?UTF-8?q?.9.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 3 ++- doc/pymode.txt | 2 +- plugin/pymode.vim | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e1157721..260501bb 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,6 @@ [bumpversion] commit = True -current_version = 0.8.1 +current_version = 0.9.0 files = plugin/pymode.vim tag = True tag_name = {new_version} @@ -8,3 +8,4 @@ tag_name = {new_version} [bumpversion:file:doc/pymode.txt] search = Version: {current_version} replace = Version: {new_version} + diff --git a/doc/pymode.txt b/doc/pymode.txt index 1cb91fdc..060cc7a7 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -6,7 +6,7 @@ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ - Version: 0.8.1 + Version: 0.9.0 ============================================================================== CONTENTS *pymode-contents* diff --git a/plugin/pymode.vim b/plugin/pymode.vim index e8bf71f0..895153a0 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.8.1" +let g:pymode_version = "0.9.0" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() From eeb45fa722b488987121f155b53f5d7bdaa5a335 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 23 Dec 2016 15:43:16 +0300 Subject: [PATCH 017/246] Setup bumpversion. --- .bumpversion.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 7200f866..e1157721 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -5,6 +5,6 @@ files = plugin/pymode.vim tag = True tag_name = {new_version} -[bumpversion:file:docs/pymode.txt] +[bumpversion:file:doc/pymode.txt] search = Version: {current_version} replace = Version: {new_version} From 67d4b9441b55dc4ae6e6e81058398ec2491bbf0a Mon Sep 17 00:00:00 2001 From: mattdodge Date: Fri, 23 Dec 2016 10:15:55 -0800 Subject: [PATCH 018/246] Allow virtual environments to be absolute paths --- autoload/pymode/virtualenv.vim | 4 ++-- doc/pymode.txt | 4 ++-- pymode/virtualenv.py | 5 +++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/autoload/pymode/virtualenv.vim b/autoload/pymode/virtualenv.vim index e8207b04..7401e94b 100644 --- a/autoload/pymode/virtualenv.vim +++ b/autoload/pymode/virtualenv.vim @@ -11,7 +11,7 @@ fun! pymode#virtualenv#init() "{{{ endfunction "}}} -fun! pymode#virtualenv#activate(relpath) "{{{ - let g:pymode_virtualenv_path = getcwd() . '/' . a:relpath +fun! pymode#virtualenv#activate(path) "{{{ + let g:pymode_virtualenv_path = a:path call pymode#virtualenv#init() endfunction "}}} diff --git a/doc/pymode.txt b/doc/pymode.txt index 060cc7a7..1d7bb24f 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -212,8 +212,8 @@ Bind keys to show documentation for current word (selection) *pymode-virtualenv* Commands: -*:PymodeVirtualenv* -- Activate virtualenv (path is related to -current working directory) +*:PymodeVirtualenv* -- Activate virtualenv (path can be absolute or +relative to current working directory) Enable automatic virtualenv detection *'g:pymode_virtualenv'* > diff --git a/pymode/virtualenv.py b/pymode/virtualenv.py index 063a1c71..bf8f55fc 100644 --- a/pymode/virtualenv.py +++ b/pymode/virtualenv.py @@ -15,6 +15,11 @@ def enable_virtualenv(): """ path = env.var('g:pymode_virtualenv_path') + # Normalize path to be an absolute path + # If an absolute path is provided, that path will be returned, otherwise + # the returned path will be an absolute path but computed relative + # to the current working directory + path = os.path.abspath(path) enabled = env.var('g:pymode_virtualenv_enabled') if path == enabled: env.message('Virtualenv %s already enabled.' % path) From 05a0e8553f97716a7846d2a83861f650942af8e6 Mon Sep 17 00:00:00 2001 From: mattdodge Date: Fri, 23 Dec 2016 10:49:46 -0800 Subject: [PATCH 019/246] Add basic unit test with a TODO --- t/virtualenv.vim | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 t/virtualenv.vim diff --git a/t/virtualenv.vim b/t/virtualenv.vim new file mode 100644 index 00000000..fcac026f --- /dev/null +++ b/t/virtualenv.vim @@ -0,0 +1,28 @@ +describe 'pymode-virtualenv' + + before + source plugin/pymode.vim + set filetype=python + end + + after + bd! + end + + " TODO: How can we mock the virtualenv activation to check that the + " proper path is set to pymode_virtualenv_enabled? Right now, the + " python function enable_virtualenv gets called but fails when trying + " to actually activate so the env.let never gets called + + it 'accepts relative paths' + call pymode#virtualenv#activate("sample/relative/path") + " Our path variable is the path argument + Expect g:pymode_virtualenv_path == "sample/relative/path" + end + + it 'accepts absolute paths' + call pymode#virtualenv#activate("/sample/absolute/path") + " Our path variable is the path argument + Expect g:pymode_virtualenv_path == "/sample/absolute/path" + end +end From f7ccee54743800ef2aab264dafeed3d5a4ec1fbd Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Mon, 26 Dec 2016 09:44:27 +0300 Subject: [PATCH 020/246] Update authors. --- AUTHORS | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index fedbb7c8..cc3de277 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,6 +1,7 @@ -Maintainer: +Maintainers: * Kirill Klenov +* Bryce Guinta (https://github.com/brycepg) Contributors: @@ -31,6 +32,7 @@ Contributors: * Lawrence Akka (https://github.com/lawrenceakka); * Lowe Thiderman (http://github.com/thiderman); * Martin Brochhaus (http://github.com/mbrochh); +* Matt Dodge (https://github.com/mattdodge); * Matthew Moses (http://github.com/mlmoses); * Maxim (https://github.com/mpyatishev); * Mel Boyce (http://github.com/syngin); From 1b30f1f546b0eeb1ae8d6b1b6d36ebfca4cb8a70 Mon Sep 17 00:00:00 2001 From: Semyon Maryasin Date: Mon, 23 Jan 2017 04:50:44 +0300 Subject: [PATCH 021/246] Do support underscore in numbers, pep515 (py3.6) --- syntax/python.vim | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/syntax/python.vim b/syntax/python.vim index aebb15ad..608d887f 100644 --- a/syntax/python.vim +++ b/syntax/python.vim @@ -250,16 +250,16 @@ endif " Numbers {{{ " =========== - syn match pythonHexError "\<0[xX]\x*[g-zG-Z]\x*[lL]\=\>" display - syn match pythonHexNumber "\<0[xX]\x\+[lL]\=\>" display - syn match pythonOctNumber "\<0[oO]\o\+[lL]\=\>" display - syn match pythonBinNumber "\<0[bB][01]\+[lL]\=\>" display - syn match pythonNumber "\<\d\+[lLjJ]\=\>" display - syn match pythonFloat "\.\d\+\([eE][+-]\=\d\+\)\=[jJ]\=\>" display - syn match pythonFloat "\<\d\+[eE][+-]\=\d\+[jJ]\=\>" display - syn match pythonFloat "\<\d\+\.\d*\([eE][+-]\=\d\+\)\=[jJ]\=" display - syn match pythonOctError "\<0[oO]\=\o*[8-9]\d*[lL]\=\>" display - syn match pythonBinError "\<0[bB][01]*[2-9]\d*[lL]\=\>" display + syn match pythonHexError "\<0[xX][0-9a-fA-F_]*[g-zG-Z][0-9a-fA-F_]*[lL]\=\>" display + syn match pythonHexNumber "\<0[xX][0-9a-fA-F_]\+[lL]\=\>" display + syn match pythonOctNumber "\<0[oO][0-7_]\+[lL]\=\>" display + syn match pythonBinNumber "\<0[bB][01_]\+[lL]\=\>" display + syn match pythonNumber "\<[0-9_]\+[lLjJ]\=\>" display + syn match pythonFloat "\.[0-9_]\+\([eE][+-]\=[0-9_]\+\)\=[jJ]\=\>" display + syn match pythonFloat "\<[0-9_]\+[eE][+-]\=[0-9_]\+[jJ]\=\>" display + syn match pythonFloat "\<[0-9_]\+\.[0-9_]*\([eE][+-]\=[0-9_]\+\)\=[jJ]\=" display + syn match pythonOctError "\<0[oO]\=[0-7_]*[8-9][0-9_]*[lL]\=\>" display + syn match pythonBinError "\<0[bB][01_]*[2-9][0-9_]*[lL]\=\>" display " }}} From b44f3642373536978d693c0ee77b52e5c3d24a4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A1=D0=B5=D0=BC=D1=91=D0=BD=20=D0=9C=D0=B0=D1=80=D1=8C?= =?UTF-8?q?=D1=8F=D1=81=D0=B8=D0=BD?= Date: Mon, 23 Jan 2017 06:14:01 +0400 Subject: [PATCH 022/246] Update owner name in links in readme After migrating this repo from `klen` to `python-mode` account, Travis build status indicator ceased to work. This commit fixes that, together with streamlining other links. --- README.rst | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/README.rst b/README.rst index 867029d5..0706f0c6 100644 --- a/README.rst +++ b/README.rst @@ -1,8 +1,8 @@ |logo| Python-mode, Python in VIM ################################# -.. image:: https://travis-ci.org/klen/python-mode.png?branch=develop - :target: https://travis-ci.org/klen/python-mode +.. image:: https://travis-ci.org/python-mode/python-mode.png?branch=develop + :target: https://travis-ci.org/python-mode/python-mode ----- @@ -13,9 +13,9 @@ ----- | -| Src: https://github.com/klen/python-mode +| Src: https://github.com/python-mode/python-mode | Homepage: https://klen.github.io/python-mode/ -| Docs: https://github.com/klen/python-mode/blob/develop/doc/pymode.txt +| Docs: https://github.com/python-mode/python-mode/blob/develop/doc/pymode.txt | Python-mode is a vim plugin that helps you to create python code very quickly @@ -74,7 +74,7 @@ Using pathogen (recommended) % cd ~/.vim % mkdir -p bundle && cd bundle - % git clone https://github.com/klen/python-mode.git + % git clone https://github.com/python-mode/python-mode.git - Enable `pathogen `_ in your ``~/.vimrc``: :: @@ -93,7 +93,7 @@ Manually -------- :: - % git clone https://github.com/klen/python-mode.git + % git clone https://github.com/python-mode/python-mode.git % cd python-mode % cp -R * ~/.vim @@ -144,7 +144,7 @@ plugin. 2. Type `:PymodeTroubleshooting` And fix any warnings or copy the output and send it to me. (For example, by -creating a `new github issue `_ +creating a `new github issue `_ if one does not already exist for the problem). @@ -190,7 +190,7 @@ Bugtracker If you have any suggestions, bug reports or annoyances please report them to the issue tracker -at https://github.com/klen/python-mode/issues +at https://github.com/python-mode/python-mode/issues Contributing @@ -202,7 +202,7 @@ Contributing Also see the `AUTHORS` file. Development of python-mode happens at github: -https://github.com/klen/python-mode +https://github.com/python-mode/python-mode Please make a pull request to `development` branch and add yourself to `AUTHORS`. @@ -210,25 +210,25 @@ Please make a pull request to `development` branch and add yourself to Source Links =================== - `doc/pymode.txt - `__ + `__ -- ``:help pymode`` - `plugin/pymode.vim - `__ + `__ -- python-mode VIM plugin - `syntax/python.vim - `__ + `__ -- python-mode ``python.vim`` VIM syntax - `syntax/pyrex.vim - `__ + `__ -- ``pyrex.vim`` VIM syntax (pyrex, Cython) - `t/ - `__ + `__ -- ``*.vim`` more python-mode VIM configuration - `pymode/ - `__ + `__ -- ``*.py`` -- python-mode Python module - `pymode/libs/ - `__ + `__ -- ``*.py`` -- `Python Libraries <#python-libraries>`__ @@ -236,7 +236,7 @@ Python Libraries ------------------ Vendored Python modules are located mostly in -`pymode/libs/ `__. +`pymode/libs/ `__. ====== @@ -364,4 +364,4 @@ My address is here: "Russia, 143500, MO, Istra, pos. Severny 8-3" to "Kirill Kle .. _pythonvim: http://www.hlabs.spb.ru/vim/python.vim .. _pep8_: http://github.com/jcrocholl/pep8 .. _pep8indent: http://github.com/hynek/vim-python-pep8-indent -.. |logo| image:: https://raw.github.com/klen/python-mode/develop/logo.png +.. |logo| image:: https://raw.github.com/python-mode/python-mode/develop/logo.png From 72bbe35974b170c62887f7b7a767e8620dc624fa Mon Sep 17 00:00:00 2001 From: monteiro Date: Mon, 6 Feb 2017 12:19:24 -0200 Subject: [PATCH 023/246] fixed folding for inline def statements --- autoload/pymode/folding.vim | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index b8362722..5a692ab6 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -8,7 +8,7 @@ let s:def_regex = g:pymode_folding_regex let s:blank_regex = '^\s*$' " Spyder, a very popular IDE for python has a template which includes " '@author:' ; thus the regex below. -let s:decorator_regex = '^\s*@\(author:\)\@!' +let s:decorator_regex = '^\s*@\(author:\)\@!' let s:doc_begin_regex = '^\s*[uU]\=\%("""\|''''''\)' let s:doc_end_regex = '\%("""\|''''''\)\s*$' " This one is needed for the while loop to count for opening and closing @@ -200,7 +200,7 @@ fun! s:BlockStart(lnum) "{{{ " Now find the class/def one shiftwidth lower than the start of the " aforementioned indent block. - if next_stmt_at_def_indent && next_stmt_at_def_indent < a:lnum + if next_stmt_at_def_indent && a:lnum <= next_stmt_at_def_indent let max_indent = max([indent(next_stmt_at_def_indent) - &shiftwidth, 0]) else let max_indent = max([indent(prevnonblank(a:lnum)) - &shiftwidth, 0]) @@ -211,6 +211,14 @@ endfunction "}}} fun! s:BlockEnd(lnum) "{{{ " Note: Make sure to reset cursor position after using this function. call cursor(a:lnum, 0) + " Regex translation: + " \v: very magic + " \s: any space char + " {...}: zero to more as many as possible + " \S: non whitespace + " index [0]: gets the line returned by searchpos + " flag 'n': do not move cursor + " flag 'W': don't wrap around the end of the file return searchpos('\v^\s{,'.indent('.').'}\S', 'nW')[0] - 1 endfunction "}}} @@ -227,7 +235,7 @@ function! s:Is_opening_folding(lnum) "{{{ for i in range(1, a:lnum) let i_line = getline(i) - if i_line =~ s:doc_line_regex + if i_line =~ s:doc_line_regex " echom "case 00 on line " . i continue endif @@ -252,7 +260,7 @@ function! s:Is_opening_folding(lnum) "{{{ elseif i_line =~ s:doc_general_regex " echom "extra docstrings on line " . i let extra_docstrings = extra_docstrings + 1 - endif + endif endfor if fmod(number_of_folding, 2) == 1 "If odd then it is an opening From a8dcbc8805bf743455809df1f7c17d11e92fe38a Mon Sep 17 00:00:00 2001 From: monteiro Date: Mon, 6 Feb 2017 12:32:59 -0200 Subject: [PATCH 024/246] improved folding of various cases --- autoload/pymode/folding.vim | 10 +--------- plugin/pymode.vim | 6 +++--- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 5a692ab6..218520f0 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -200,7 +200,7 @@ fun! s:BlockStart(lnum) "{{{ " Now find the class/def one shiftwidth lower than the start of the " aforementioned indent block. - if next_stmt_at_def_indent && a:lnum <= next_stmt_at_def_indent + if next_stmt_at_def_indent && next_stmt_at_def_indent < a:lnum let max_indent = max([indent(next_stmt_at_def_indent) - &shiftwidth, 0]) else let max_indent = max([indent(prevnonblank(a:lnum)) - &shiftwidth, 0]) @@ -211,14 +211,6 @@ endfunction "}}} fun! s:BlockEnd(lnum) "{{{ " Note: Make sure to reset cursor position after using this function. call cursor(a:lnum, 0) - " Regex translation: - " \v: very magic - " \s: any space char - " {...}: zero to more as many as possible - " \S: non whitespace - " index [0]: gets the line returned by searchpos - " flag 'n': do not move cursor - " flag 'W': don't wrap around the end of the file return searchpos('\v^\s{,'.indent('.').'}\S', 'nW')[0] - 1 endfunction "}}} diff --git a/plugin/pymode.vim b/plugin/pymode.vim index 26541ae2..3ac97388 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,5 +1,5 @@ " vi: fdl=1 -let g:pymode_version = "0.8.1" +let g:pymode_version = "0.9.0" com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version com! PymodeTroubleshooting call pymode#troubleshooting#test() @@ -39,7 +39,8 @@ call pymode#default("g:pymode_folding", 1) " Maximum file length to check for nested class/def statements call pymode#default("g:pymode_folding_nest_limit", 1000) " Change for folding customization (by example enable fold for 'if', 'for') -call pymode#default("g:pymode_folding_regex", '^\s*\%(class\|def\) \w\+') +call pymode#default("g:pymode_folding_regex", '^\s*\%(class\|def\|async\s\+def\) .\+\(:\s\+\w\)\@!') + " Enable/disable python motion operators call pymode#default("g:pymode_motion", 1) @@ -314,4 +315,3 @@ endif command! PymodeVersion echomsg "Pymode version: " . g:pymode_version . " interpreter: " . g:pymode_python . " lint: " . g:pymode_lint . " rope: " . g:pymode_rope augroup pymode - From 12d0d60e44c9e3adef61e1553980ef730c4096ee Mon Sep 17 00:00:00 2001 From: Semyon Maryasin Date: Sat, 11 Feb 2017 20:04:35 +0300 Subject: [PATCH 025/246] Don't treat lone _ as a digit --- syntax/python.vim | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/syntax/python.vim b/syntax/python.vim index 608d887f..317ef1d9 100644 --- a/syntax/python.vim +++ b/syntax/python.vim @@ -251,13 +251,13 @@ endif " =========== syn match pythonHexError "\<0[xX][0-9a-fA-F_]*[g-zG-Z][0-9a-fA-F_]*[lL]\=\>" display - syn match pythonHexNumber "\<0[xX][0-9a-fA-F_]\+[lL]\=\>" display - syn match pythonOctNumber "\<0[oO][0-7_]\+[lL]\=\>" display - syn match pythonBinNumber "\<0[bB][01_]\+[lL]\=\>" display - syn match pythonNumber "\<[0-9_]\+[lLjJ]\=\>" display - syn match pythonFloat "\.[0-9_]\+\([eE][+-]\=[0-9_]\+\)\=[jJ]\=\>" display - syn match pythonFloat "\<[0-9_]\+[eE][+-]\=[0-9_]\+[jJ]\=\>" display - syn match pythonFloat "\<[0-9_]\+\.[0-9_]*\([eE][+-]\=[0-9_]\+\)\=[jJ]\=" display + syn match pythonHexNumber "\<0[xX][0-9a-fA-F_]*[0-9a-fA-F][0-9a-fA-F_]*[lL]\=\>" display + syn match pythonOctNumber "\<0[oO][0-7_]*[0-7][0-7_]*[lL]\=\>" display + syn match pythonBinNumber "\<0[bB][01_]*[01][01_]*[lL]\=\>" display + syn match pythonNumber "\<[0-9_]*[0-9][0-9_]*[lLjJ]\=\>" display + syn match pythonFloat "\.[0-9_]*[0-9][0-9_]*\([eE][+-]\=[0-9_]*[0-9][0-9_]*\)\=[jJ]\=\>" display + syn match pythonFloat "\<[0-9_]*[0-9][0-9_]*[eE][+-]\=[0-9_]\+[jJ]\=\>" display + syn match pythonFloat "\<[0-9_]*[0-9][0-9_]*\.[0-9_]*\([eE][+-]\=[0-9_]*[0-9][0-9_]*\)\=[jJ]\=" display syn match pythonOctError "\<0[oO]\=[0-7_]*[8-9][0-9_]*[lL]\=\>" display syn match pythonBinError "\<0[bB][01_]*[2-9][0-9_]*[lL]\=\>" display From 9bd0e40d272cca1562db8643234268263fc325f2 Mon Sep 17 00:00:00 2001 From: Semyon Maryasin Date: Sat, 11 Feb 2017 20:05:54 +0300 Subject: [PATCH 026/246] Number may not start with underscore --- syntax/python.vim | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/syntax/python.vim b/syntax/python.vim index 317ef1d9..090e9be1 100644 --- a/syntax/python.vim +++ b/syntax/python.vim @@ -254,10 +254,10 @@ endif syn match pythonHexNumber "\<0[xX][0-9a-fA-F_]*[0-9a-fA-F][0-9a-fA-F_]*[lL]\=\>" display syn match pythonOctNumber "\<0[oO][0-7_]*[0-7][0-7_]*[lL]\=\>" display syn match pythonBinNumber "\<0[bB][01_]*[01][01_]*[lL]\=\>" display - syn match pythonNumber "\<[0-9_]*[0-9][0-9_]*[lLjJ]\=\>" display + syn match pythonNumber "\<[0-9][0-9_]*[lLjJ]\=\>" display syn match pythonFloat "\.[0-9_]*[0-9][0-9_]*\([eE][+-]\=[0-9_]*[0-9][0-9_]*\)\=[jJ]\=\>" display - syn match pythonFloat "\<[0-9_]*[0-9][0-9_]*[eE][+-]\=[0-9_]\+[jJ]\=\>" display - syn match pythonFloat "\<[0-9_]*[0-9][0-9_]*\.[0-9_]*\([eE][+-]\=[0-9_]*[0-9][0-9_]*\)\=[jJ]\=" display + syn match pythonFloat "\<[0-9][0-9_]*[eE][+-]\=[0-9_]\+[jJ]\=\>" display + syn match pythonFloat "\<[0-9][0-9_]*\.[0-9_]*\([eE][+-]\=[0-9_]*[0-9][0-9_]*\)\=[jJ]\=" display syn match pythonOctError "\<0[oO]\=[0-7_]*[8-9][0-9_]*[lL]\=\>" display syn match pythonBinError "\<0[bB][01_]*[2-9][0-9_]*[lL]\=\>" display From 8587789fe828a22641b79a56e465fea1d4f35ede Mon Sep 17 00:00:00 2001 From: Jacob Niehus Date: Tue, 28 Feb 2017 20:23:39 -0700 Subject: [PATCH 027/246] Make folding much faster in some cases Cache the results of Is_opening_folding once per change so it doesn't loop over the same lines multiple times unnecessarily. --- autoload/pymode/folding.vim | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 218520f0..399748e3 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -216,6 +216,15 @@ endfunction "}}} function! s:Is_opening_folding(lnum) "{{{ " Helper function to see if docstring is opening or closing + + " Cache the result so the loop runs only once per change + if get(b:, 'fold_changenr', -1) == changenr() + return b:fold_cache[a:lnum] "If odd then it is an opening + else + let b:fold_changenr = changenr() + let b:fold_cache = [] + endif + let number_of_folding = 0 " To be analized if odd/even to inform if it is opening or closing. let has_open_docstring = 0 " To inform is already has an open docstring. let extra_docstrings = 0 " To help skipping ''' and """ which are not docstrings @@ -224,7 +233,9 @@ function! s:Is_opening_folding(lnum) "{{{ " not just triple quotes (that could be a regular string). " " Iterater over all lines from the start until current line (inclusive) - for i in range(1, a:lnum) + for i in range(1, line('$')) + call add(b:fold_cache, number_of_folding % 2) + let i_line = getline(i) if i_line =~ s:doc_line_regex @@ -255,11 +266,9 @@ function! s:Is_opening_folding(lnum) "{{{ endif endfor - if fmod(number_of_folding, 2) == 1 "If odd then it is an opening - return 1 - else - return 0 - endif + call add(b:fold_cache, number_of_folding % 2) + + return b:fold_cache[a:lnum] endfunction "}}} " vim: fdm=marker:fdl=0 From 8abc4363b0e6f073a9bc29af17752ea72a8cdf92 Mon Sep 17 00:00:00 2001 From: Colin Kennedy Date: Thu, 18 May 2017 13:29:00 -0800 Subject: [PATCH 028/246] Updated docstring regex to allow folding of raw python strings --- autoload/pymode/folding.vim | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim index 399748e3..3b29aebb 100644 --- a/autoload/pymode/folding.vim +++ b/autoload/pymode/folding.vim @@ -9,12 +9,12 @@ let s:blank_regex = '^\s*$' " Spyder, a very popular IDE for python has a template which includes " '@author:' ; thus the regex below. let s:decorator_regex = '^\s*@\(author:\)\@!' -let s:doc_begin_regex = '^\s*[uU]\=\%("""\|''''''\)' +let s:doc_begin_regex = '^\s*[uUrR]\=\%("""\|''''''\)' let s:doc_end_regex = '\%("""\|''''''\)\s*$' " This one is needed for the while loop to count for opening and closing " docstrings. let s:doc_general_regex = '\%("""\|''''''\)' -let s:doc_line_regex = '^\s*[uU]\=\("""\|''''''\).\+\1\s*$' +let s:doc_line_regex = '^\s*[uUrR]\=\("""\|''''''\).\+\1\s*$' let s:symbol = matchstr(&fillchars, 'fold:\zs.') " handles multibyte characters if s:symbol == '' let s:symbol = ' ' @@ -42,7 +42,7 @@ fun! pymode#folding#text() " {{{ let line = substitute(line, '\t', onetab, 'g') let line = strpart(line, 0, windowwidth - 2 -len(foldedlinecount)) - let line = substitute(line, '[uU]\=\%("""\|''''''\)', '', '') + let line = substitute(line, '[uUrR]\=\%("""\|''''''\)', '', '') let fillcharcount = windowwidth - len(line) - len(foldedlinecount) + 1 return line . ' ' . repeat(s:symbol, fillcharcount) . ' ' . foldedlinecount endfunction "}}} From a8b1fe6887fa3ac875524ce907c4c6396135deb5 Mon Sep 17 00:00:00 2001 From: Kirill Klenov Date: Fri, 23 Dec 2016 15:21:09 +0300 Subject: [PATCH 029/246] Update libs. --- pylama.ini | 1 + pymode/libs/_markerlib/__init__.py | 16 - pymode/libs/_markerlib/markers.py | 119 - .../astroid-1.4.9.dist-info/DESCRIPTION.rst | 66 + pymode/libs/astroid-1.4.9.dist-info/INSTALLER | 1 + pymode/libs/astroid-1.4.9.dist-info/METADATA | 84 + pymode/libs/astroid-1.4.9.dist-info/RECORD | 272 ++ .../WHEEL | 3 +- .../astroid-1.4.9.dist-info/metadata.json | 1 + .../astroid-1.4.9.dist-info/top_level.txt | 1 + pymode/libs/astroid/__init__.py | 15 +- pymode/libs/astroid/__pkginfo__.py | 10 +- pymode/libs/astroid/arguments.py | 233 ++ pymode/libs/astroid/as_string.py | 115 +- pymode/libs/astroid/bases.py | 290 +- ...nference.py => brain_builtin_inference.py} | 143 +- pymode/libs/astroid/brain/brain_dateutil.py | 15 + .../astroid/brain/{py2gi.py => brain_gi.py} | 56 +- .../{py2mechanize.py => brain_mechanize.py} | 0 .../brain/{pynose.py => brain_nose.py} | 5 +- pymode/libs/astroid/brain/brain_numpy.py | 62 + pymode/libs/astroid/brain/brain_pytest.py | 76 + pymode/libs/astroid/brain/brain_qt.py | 44 + .../brain/{pysix_moves.py => brain_six.py} | 29 +- pymode/libs/astroid/brain/brain_ssl.py | 65 + pymode/libs/astroid/brain/brain_stdlib.py | 473 +++ pymode/libs/astroid/brain/py2pytest.py | 31 - pymode/libs/astroid/brain/py2qt4.py | 22 - pymode/libs/astroid/brain/py2stdlib.py | 334 -- pymode/libs/astroid/builder.py | 185 +- pymode/libs/astroid/context.py | 81 + pymode/libs/astroid/decorators.py | 75 + pymode/libs/astroid/exceptions.py | 20 + pymode/libs/astroid/inference.py | 320 +- pymode/libs/astroid/inspector.py | 273 -- pymode/libs/astroid/manager.py | 214 +- pymode/libs/astroid/mixins.py | 51 +- pymode/libs/astroid/modutils.py | 153 +- pymode/libs/astroid/node_classes.py | 399 ++- pymode/libs/astroid/nodes.py | 57 +- pymode/libs/astroid/objects.py | 186 + pymode/libs/astroid/protocols.py | 259 +- pymode/libs/astroid/raw_building.py | 40 +- pymode/libs/astroid/rebuilder.py | 681 ++-- pymode/libs/astroid/scoped_nodes.py | 1072 +++--- pymode/libs/astroid/test_utils.py | 25 +- .../_vendor => astroid/tests}/__init__.py | 0 pymode/libs/astroid/tests/resources.py | 72 + .../python2/data/MyPyPa-0.1.0-py2.5.egg | Bin 0 -> 1222 bytes .../python2/data/MyPyPa-0.1.0-py2.5.zip | Bin 0 -> 1222 bytes .../testdata/python2/data/SSL1/Connection1.py | 14 + .../testdata/python2/data/SSL1/__init__.py | 1 + .../tests/testdata/python2/data/__init__.py | 1 + .../testdata/python2/data/absimp/__init__.py | 5 + .../data/absimp/sidepackage/__init__.py | 3 + .../testdata/python2/data/absimp/string.py | 3 + .../tests/testdata/python2/data/absimport.py | 3 + .../tests/testdata/python2/data/all.py | 9 + .../testdata/python2/data/appl/__init__.py | 3 + .../python2/data/appl/myConnection.py | 12 + .../python2/data/clientmodule_test.py | 32 + .../testdata/python2/data/descriptor_crash.py | 11 + .../tests/testdata/python2/data/email.py | 1 + .../python2/data/find_test/__init__.py | 0 .../testdata/python2/data/find_test/module.py | 0 .../python2/data/find_test/module2.py | 0 .../python2/data/find_test/noendingnewline.py | 0 .../python2/data/find_test/nonregr.py | 0 .../tests/testdata/python2/data/format.py | 34 + .../testdata/python2/data/joined_strings.py | 1051 ++++++ .../testdata/python2/data/lmfp/__init__.py | 2 + .../tests/testdata/python2/data/lmfp/foo.py | 6 + .../tests/testdata/python2/data/module.py | 89 + .../python2/data/module1abs/__init__.py | 4 + .../testdata/python2/data/module1abs/core.py | 1 + .../tests/testdata/python2/data/module2.py | 143 + .../testdata/python2/data/noendingnewline.py | 36 + .../tests/testdata/python2/data/nonregr.py | 57 + .../tests/testdata/python2/data/notall.py | 7 + .../testdata/python2/data/package/__init__.py | 4 + .../python2/data/package/absimport.py | 6 + .../testdata/python2/data/package/hello.py | 2 + .../import_package_subpackage_module.py | 49 + .../data/package/subpackage/__init__.py | 1 + .../python2/data/package/subpackage/module.py | 1 + .../tests/testdata/python2/data/recursion.py | 3 + .../python2/data/suppliermodule_test.py | 13 + .../python2/data/unicode_package/__init__.py | 1 + .../data/unicode_package/core/__init__.py | 0 .../python3/data/MyPyPa-0.1.0-py2.5.egg | Bin 0 -> 1222 bytes .../python3/data/MyPyPa-0.1.0-py2.5.zip | Bin 0 -> 1222 bytes .../testdata/python3/data/SSL1/Connection1.py | 14 + .../testdata/python3/data/SSL1/__init__.py | 1 + .../tests/testdata/python3/data/__init__.py | 1 + .../testdata/python3/data/absimp/__init__.py | 5 + .../data/absimp/sidepackage/__init__.py | 3 + .../testdata/python3/data/absimp/string.py | 3 + .../tests/testdata/python3/data/absimport.py | 3 + .../tests/testdata/python3/data/all.py | 9 + .../testdata/python3/data/appl/__init__.py | 3 + .../python3/data/appl/myConnection.py | 11 + .../python3/data/clientmodule_test.py | 32 + .../testdata/python3/data/descriptor_crash.py | 11 + .../tests/testdata/python3/data/email.py | 1 + .../python3/data/find_test/__init__.py | 0 .../testdata/python3/data/find_test/module.py | 0 .../python3/data/find_test/module2.py | 0 .../python3/data/find_test/noendingnewline.py | 0 .../python3/data/find_test/nonregr.py | 0 .../tests/testdata/python3/data/format.py | 34 + .../testdata/python3/data/joined_strings.py | 1051 ++++++ .../testdata/python3/data/lmfp/__init__.py | 2 + .../tests/testdata/python3/data/lmfp/foo.py | 6 + .../tests/testdata/python3/data/module.py | 88 + .../python3/data/module1abs/__init__.py | 4 + .../testdata/python3/data/module1abs/core.py | 1 + .../tests/testdata/python3/data/module2.py | 143 + .../testdata/python3/data/noendingnewline.py | 36 + .../tests/testdata/python3/data/nonregr.py | 57 + .../tests/testdata/python3/data/notall.py | 8 + .../testdata/python3/data/package/__init__.py | 4 + .../python3/data/package/absimport.py | 6 + .../testdata/python3/data/package/hello.py | 2 + .../import_package_subpackage_module.py | 49 + .../data/package/subpackage/__init__.py | 1 + .../python3/data/package/subpackage/module.py | 1 + .../tests/testdata/python3/data/recursion.py | 3 + .../python3/data/suppliermodule_test.py | 13 + .../python3/data/unicode_package/__init__.py | 1 + .../data/unicode_package/core/__init__.py | 0 pymode/libs/astroid/tests/unittest_brain.py | 506 +++ pymode/libs/astroid/tests/unittest_builder.py | 774 ++++ .../libs/astroid/tests/unittest_inference.py | 2130 +++++++++++ pymode/libs/astroid/tests/unittest_lookup.py | 352 ++ pymode/libs/astroid/tests/unittest_manager.py | 216 ++ .../libs/astroid/tests/unittest_modutils.py | 269 ++ pymode/libs/astroid/tests/unittest_nodes.py | 764 ++++ pymode/libs/astroid/tests/unittest_objects.py | 530 +++ .../libs/astroid/tests/unittest_peephole.py | 121 + .../libs/astroid/tests/unittest_protocols.py | 176 + pymode/libs/astroid/tests/unittest_python3.py | 254 ++ .../astroid/tests/unittest_raw_building.py | 85 + .../libs/astroid/tests/unittest_regrtest.py | 364 ++ .../astroid/tests/unittest_scoped_nodes.py | 1583 +++++++++ .../libs/astroid/tests/unittest_transforms.py | 245 ++ pymode/libs/astroid/tests/unittest_utils.py | 124 + pymode/libs/astroid/transforms.py | 96 + pymode/libs/astroid/util.py | 89 + pymode/libs/astroid/utils.py | 239 -- ...ts.functools_lru_cache-1.3-py3.5-nspkg.pth | 1 + .../DESCRIPTION.rst | 28 + .../INSTALLER | 1 + .../METADATA | 44 + .../RECORD | 11 + .../WHEEL | 6 + .../metadata.json | 1 + .../namespace_packages.txt | 1 + .../top_level.txt | 1 + .../libs/backports/configparser/__init__.py | 1390 ++++++++ pymode/libs/backports/configparser/helpers.py | 171 + pymode/libs/backports/functools_lru_cache.py | 184 + .../libs/configparser-3.5.0-py2.7-nspkg.pth | 1 + .../DESCRIPTION.rst | 305 ++ .../configparser-3.5.0.dist-info/INSTALLER | 1 + .../configparser-3.5.0.dist-info/METADATA | 330 ++ .../libs/configparser-3.5.0.dist-info/RECORD | 15 + .../libs/configparser-3.5.0.dist-info/WHEEL | 5 + .../metadata.json | 1 + .../namespace_packages.txt | 1 + .../top_level.txt | 2 + pymode/libs/configparser.py | 52 + pymode/libs/easy_install.py | 5 - .../isort-4.2.5.dist-info/DESCRIPTION.rst | 606 ++++ pymode/libs/isort-4.2.5.dist-info/INSTALLER | 1 + pymode/libs/isort-4.2.5.dist-info/METADATA | 636 ++++ pymode/libs/isort-4.2.5.dist-info/RECORD | 25 + pymode/libs/isort-4.2.5.dist-info/WHEEL | 6 + .../isort-4.2.5.dist-info/entry_points.txt | 9 + .../libs/isort-4.2.5.dist-info/metadata.json | 1 + .../libs/isort-4.2.5.dist-info/top_level.txt | 1 + pymode/libs/isort/__init__.py | 28 + pymode/libs/isort/hooks.py | 82 + pymode/libs/isort/isort.py | 878 +++++ pymode/libs/isort/main.py | 287 ++ pymode/libs/isort/natural.py | 47 + pymode/libs/isort/pie_slice.py | 528 +++ pymode/libs/isort/pylama_isort.py | 29 + pymode/libs/isort/settings.py | 221 ++ .../DESCRIPTION.rst | 70 + .../INSTALLER | 1 + .../METADATA | 96 + .../lazy_object_proxy-1.2.2.dist-info/RECORD | 19 + .../lazy_object_proxy-1.2.2.dist-info/WHEEL | 5 + .../metadata.json | 1 + .../top_level.txt | 1 + pymode/libs/lazy_object_proxy/__init__.py | 20 + pymode/libs/lazy_object_proxy/cext.c | 1421 ++++++++ pymode/libs/lazy_object_proxy/cext.so | Bin 0 -> 36084 bytes pymode/libs/lazy_object_proxy/compat.py | 9 + pymode/libs/lazy_object_proxy/simple.py | 246 ++ pymode/libs/lazy_object_proxy/slots.py | 414 +++ pymode/libs/lazy_object_proxy/utils.py | 13 + pymode/libs/logilab/common/__init__.py | 184 - pymode/libs/logilab/common/cache.py | 114 - pymode/libs/logilab/common/changelog.py | 238 -- pymode/libs/logilab/common/clcommands.py | 334 -- pymode/libs/logilab/common/compat.py | 78 - pymode/libs/logilab/common/configuration.py | 1105 ------ pymode/libs/logilab/common/daemon.py | 101 - pymode/libs/logilab/common/date.py | 335 -- pymode/libs/logilab/common/debugger.py | 214 -- pymode/libs/logilab/common/decorators.py | 281 -- pymode/libs/logilab/common/deprecation.py | 189 - pymode/libs/logilab/common/fileutils.py | 404 --- pymode/libs/logilab/common/graph.py | 282 -- pymode/libs/logilab/common/interface.py | 71 - pymode/libs/logilab/common/logging_ext.py | 195 -- pymode/libs/logilab/common/modutils.py | 713 ---- pymode/libs/logilab/common/optik_ext.py | 392 --- pymode/libs/logilab/common/optparser.py | 92 - pymode/libs/logilab/common/proc.py | 277 -- pymode/libs/logilab/common/pytest.py | 1202 ------- pymode/libs/logilab/common/registry.py | 1125 ------ pymode/libs/logilab/common/shellutils.py | 462 --- pymode/libs/logilab/common/sphinx_ext.py | 87 - pymode/libs/logilab/common/sphinxutils.py | 122 - pymode/libs/logilab/common/table.py | 929 ----- pymode/libs/logilab/common/tasksqueue.py | 101 - pymode/libs/logilab/common/testlib.py | 1338 ------- pymode/libs/logilab/common/textutils.py | 537 --- pymode/libs/logilab/common/tree.py | 369 -- pymode/libs/logilab/common/umessage.py | 194 - .../libs/logilab/common/ureports/__init__.py | 172 - .../logilab/common/ureports/docbook_writer.py | 140 - .../logilab/common/ureports/html_writer.py | 133 - .../logilab/common/ureports/text_writer.py | 145 - pymode/libs/logilab/common/urllib2ext.py | 89 - pymode/libs/logilab/common/visitor.py | 109 - pymode/libs/logilab/common/xmlutils.py | 61 - .../libs/logilab_common-1.0.2-py2.7-nspkg.pth | 1 - .../DESCRIPTION.rst | 153 - .../logilab_common-1.0.2.dist-info/METADATA | 169 - .../logilab_common-1.0.2.dist-info/RECORD | 87 - .../metadata.json | 1 - .../namespace_packages.txt | 1 - .../top_level.txt | 1 - .../mccabe-0.5.3.dist-info/DESCRIPTION.rst | 139 + pymode/libs/mccabe-0.5.3.dist-info/INSTALLER | 1 + pymode/libs/mccabe-0.5.3.dist-info/METADATA | 164 + pymode/libs/mccabe-0.5.3.dist-info/RECORD | 10 + pymode/libs/mccabe-0.5.3.dist-info/WHEEL | 6 + .../mccabe-0.5.3.dist-info/entry_points.txt | 3 + .../libs/mccabe-0.5.3.dist-info/metadata.json | 1 + .../libs/mccabe-0.5.3.dist-info/top_level.txt | 1 + pymode/libs/mccabe.py | 65 +- pymode/libs/pkg_resources/__init__.py | 3113 ----------------- .../_vendor/packaging/__about__.py | 31 - .../_vendor/packaging/__init__.py | 24 - .../_vendor/packaging/_compat.py | 40 - .../_vendor/packaging/_structures.py | 78 - .../_vendor/packaging/specifiers.py | 784 ----- .../_vendor/packaging/version.py | 403 --- .../DESCRIPTION.rst | 870 +++++ .../pycodestyle-2.2.0.dist-info/INSTALLER | 1 + .../libs/pycodestyle-2.2.0.dist-info/METADATA | 890 +++++ .../libs/pycodestyle-2.2.0.dist-info/RECORD | 12 + pymode/libs/pycodestyle-2.2.0.dist-info/WHEEL | 6 + .../entry_points.txt | 3 + .../pycodestyle-2.2.0.dist-info/metadata.json | 1 + .../namespace_packages.txt} | 0 .../pycodestyle-2.2.0.dist-info/top_level.txt | 1 + pymode/libs/{pep8.py => pycodestyle.py} | 289 +- .../DESCRIPTION.rst | 57 + .../libs/pydocstyle-1.1.1.dist-info/INSTALLER | 1 + .../libs/pydocstyle-1.1.1.dist-info/METADATA | 74 + pymode/libs/pydocstyle-1.1.1.dist-info/RECORD | 28 + pymode/libs/pydocstyle-1.1.1.dist-info/WHEEL | 6 + .../entry_points.txt | 4 + .../pydocstyle-1.1.1.dist-info/metadata.json | 1 + .../pydocstyle-1.1.1.dist-info/top_level.txt | 1 + pymode/libs/{pep257.py => pydocstyle.py} | 1060 ++++-- pymode/libs/pydocstyle/__init__.py | 7 + pymode/libs/pydocstyle/__main__.py | 19 + pymode/libs/pydocstyle/checker.py | 450 +++ pymode/libs/pydocstyle/cli.py | 105 + pymode/libs/pydocstyle/config.py | 538 +++ pymode/libs/pydocstyle/parser.py | 562 +++ pymode/libs/pydocstyle/utils.py | 11 + pymode/libs/pydocstyle/violations.py | 221 ++ .../pyflakes-1.3.0.dist-info/DESCRIPTION.rst | 86 + .../libs/pyflakes-1.3.0.dist-info/INSTALLER | 1 + pymode/libs/pyflakes-1.3.0.dist-info/METADATA | 105 + pymode/libs/pyflakes-1.3.0.dist-info/RECORD | 43 + pymode/libs/pyflakes-1.3.0.dist-info/WHEEL | 6 + .../pyflakes-1.3.0.dist-info/entry_points.txt | 3 + .../pyflakes-1.3.0.dist-info/metadata.json | 1 + .../pyflakes-1.3.0.dist-info/top_level.txt | 1 + pymode/libs/pyflakes/__init__.py | 2 +- pymode/libs/pyflakes/api.py | 16 +- pymode/libs/pyflakes/checker.py | 532 ++- pymode/libs/pyflakes/messages.py | 105 +- pymode/libs/pyflakes/reporter.py | 2 +- pymode/libs/pyflakes/scripts/__init__.py | 0 pymode/libs/pyflakes/scripts/pyflakes.py | 8 + pymode/libs/pyflakes/test/__init__.py | 0 pymode/libs/pyflakes/test/harness.py | 72 + pymode/libs/pyflakes/test/test_api.py | 744 ++++ pymode/libs/pyflakes/test/test_dict.py | 217 ++ pymode/libs/pyflakes/test/test_doctests.py | 442 +++ pymode/libs/pyflakes/test/test_imports.py | 1180 +++++++ pymode/libs/pyflakes/test/test_other.py | 1802 ++++++++++ ..._return_with_arguments_inside_generator.py | 34 + .../pyflakes/test/test_undefined_names.py | 806 +++++ pymode/libs/pylama/__init__.py | 5 +- pymode/libs/pylama/async.py | 13 +- pymode/libs/pylama/config.py | 41 +- pymode/libs/pylama/core.py | 31 +- pymode/libs/pylama/errors.py | 24 +- pymode/libs/pylama/hook.py | 15 +- pymode/libs/pylama/lint/extensions.py | 16 +- pymode/libs/pylama/lint/pylama_mccabe.py | 5 +- pymode/libs/pylama/lint/pylama_pep257.py | 21 - .../{pylama_pep8.py => pylama_pycodestyle.py} | 18 +- pymode/libs/pylama/lint/pylama_pydocstyle.py | 25 + pymode/libs/pylama/lint/pylama_pyflakes.py | 12 +- .../pylama/lint/pylama_pylint/__init__.py | 2 +- pymode/libs/pylama/lint/pylama_pylint/main.py | 25 +- pymode/libs/pylama/lint/pylama_radon.py | 31 + pymode/libs/pylama/main.py | 10 +- pymode/libs/pylama/pytest.py | 2 +- .../pylint-1.6.4.dist-info/DESCRIPTION.rst | 3 + pymode/libs/pylint-1.6.4.dist-info/INSTALLER | 1 + pymode/libs/pylint-1.6.4.dist-info/METADATA | 31 + pymode/libs/pylint-1.6.4.dist-info/RECORD | 1236 +++++++ pymode/libs/pylint-1.6.4.dist-info/WHEEL | 6 + .../pylint-1.6.4.dist-info/entry_points.txt | 7 + .../libs/pylint-1.6.4.dist-info/metadata.json | 1 + .../libs/pylint-1.6.4.dist-info/top_level.txt | 1 + pymode/libs/pylint/__init__.py | 18 +- pymode/libs/pylint/__main__.py | 5 + pymode/libs/pylint/__pkginfo__.py | 46 +- pymode/libs/pylint/checkers/__init__.py | 23 +- pymode/libs/pylint/checkers/async.py | 72 + pymode/libs/pylint/checkers/base.py | 1101 +++++- pymode/libs/pylint/checkers/classes.py | 588 ++-- .../libs/pylint/checkers/design_analysis.py | 103 +- pymode/libs/pylint/checkers/exceptions.py | 148 +- pymode/libs/pylint/checkers/format.py | 131 +- pymode/libs/pylint/checkers/imports.py | 498 ++- pymode/libs/pylint/checkers/logging.py | 56 +- pymode/libs/pylint/checkers/misc.py | 28 +- pymode/libs/pylint/checkers/newstyle.py | 111 +- pymode/libs/pylint/checkers/python3.py | 69 +- pymode/libs/pylint/checkers/raw_metrics.py | 25 +- pymode/libs/pylint/checkers/similar.py | 30 +- pymode/libs/pylint/checkers/spelling.py | 57 +- pymode/libs/pylint/checkers/stdlib.py | 195 +- pymode/libs/pylint/checkers/strings.py | 97 +- pymode/libs/pylint/checkers/typecheck.py | 651 +++- pymode/libs/pylint/checkers/utils.py | 522 ++- pymode/libs/pylint/checkers/variables.py | 709 ++-- pymode/libs/pylint/config.py | 844 ++++- pymode/libs/pylint/epylint.py | 38 +- pymode/libs/pylint/extensions/__init__.py | 0 .../pylint/extensions/_check_docs_utils.py | 418 +++ pymode/libs/pylint/extensions/bad_builtin.py | 67 + pymode/libs/pylint/extensions/check_docs.py | 19 + pymode/libs/pylint/extensions/check_elif.py | 67 + pymode/libs/pylint/extensions/docparams.py | 297 ++ pymode/libs/pylint/extensions/docstyle.py | 74 + pymode/libs/pylint/extensions/mccabe.py | 170 + pymode/libs/pylint/graph.py | 169 + pymode/libs/pylint/gui.py | 22 +- pymode/libs/pylint/interfaces.py | 49 +- pymode/libs/pylint/lint.py | 302 +- pymode/libs/pylint/pyreverse/__init__.py | 5 + pymode/libs/pylint/pyreverse/diadefslib.py | 49 +- pymode/libs/pylint/pyreverse/diagrams.py | 37 +- pymode/libs/pylint/pyreverse/inspector.py | 361 ++ pymode/libs/pylint/pyreverse/main.py | 55 +- pymode/libs/pylint/pyreverse/utils.py | 100 +- .../common => pylint/pyreverse}/vcgutils.py | 50 +- pymode/libs/pylint/pyreverse/writer.py | 26 +- pymode/libs/pylint/reporters/__init__.py | 76 +- pymode/libs/pylint/reporters/guireporter.py | 5 +- pymode/libs/pylint/reporters/html.py | 33 +- pymode/libs/pylint/reporters/json.py | 32 +- pymode/libs/pylint/reporters/text.py | 115 +- .../pylint/reporters/ureports/__init__.py | 94 + .../pylint/reporters/ureports/html_writer.py | 81 + .../reporters}/ureports/nodes.py | 138 +- .../pylint/reporters/ureports/text_writer.py | 87 + pymode/libs/pylint/testutils.py | 76 +- pymode/libs/pylint/utils.py | 349 +- .../libs/six-1.10.0.dist-info/DESCRIPTION.rst | 18 + pymode/libs/six-1.10.0.dist-info/INSTALLER | 1 + pymode/libs/six-1.10.0.dist-info/METADATA | 34 + pymode/libs/six-1.10.0.dist-info/RECORD | 9 + pymode/libs/six-1.10.0.dist-info/WHEEL | 6 + .../libs/six-1.10.0.dist-info/metadata.json | 1 + .../libs/six-1.10.0.dist-info/top_level.txt | 1 + pymode/libs/six.py | 74 +- .../wrapt-1.10.8.dist-info/DESCRIPTION.rst | 19 + pymode/libs/wrapt-1.10.8.dist-info/INSTALLER | 1 + pymode/libs/wrapt-1.10.8.dist-info/METADATA | 29 + pymode/libs/wrapt-1.10.8.dist-info/RECORD | 18 + pymode/libs/wrapt-1.10.8.dist-info/WHEEL | 5 + .../libs/wrapt-1.10.8.dist-info/metadata.json | 1 + .../libs/wrapt-1.10.8.dist-info/top_level.txt | 1 + pymode/libs/wrapt/__init__.py | 19 + pymode/libs/wrapt/_wrappers.so | Bin 0 -> 47640 bytes pymode/libs/wrapt/arguments.py | 95 + pymode/libs/wrapt/decorators.py | 512 +++ pymode/libs/wrapt/importer.py | 228 ++ pymode/libs/wrapt/wrappers.py | 899 +++++ 415 files changed, 46433 insertions(+), 23459 deletions(-) delete mode 100644 pymode/libs/_markerlib/__init__.py delete mode 100644 pymode/libs/_markerlib/markers.py create mode 100644 pymode/libs/astroid-1.4.9.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/astroid-1.4.9.dist-info/INSTALLER create mode 100644 pymode/libs/astroid-1.4.9.dist-info/METADATA create mode 100644 pymode/libs/astroid-1.4.9.dist-info/RECORD rename pymode/libs/{logilab_common-1.0.2.dist-info => astroid-1.4.9.dist-info}/WHEEL (54%) create mode 100644 pymode/libs/astroid-1.4.9.dist-info/metadata.json create mode 100644 pymode/libs/astroid-1.4.9.dist-info/top_level.txt create mode 100644 pymode/libs/astroid/arguments.py rename pymode/libs/astroid/brain/{builtin_inference.py => brain_builtin_inference.py} (63%) create mode 100644 pymode/libs/astroid/brain/brain_dateutil.py rename pymode/libs/astroid/brain/{py2gi.py => brain_gi.py} (72%) rename pymode/libs/astroid/brain/{py2mechanize.py => brain_mechanize.py} (100%) rename pymode/libs/astroid/brain/{pynose.py => brain_nose.py} (92%) create mode 100644 pymode/libs/astroid/brain/brain_numpy.py create mode 100644 pymode/libs/astroid/brain/brain_pytest.py create mode 100644 pymode/libs/astroid/brain/brain_qt.py rename pymode/libs/astroid/brain/{pysix_moves.py => brain_six.py} (91%) create mode 100644 pymode/libs/astroid/brain/brain_ssl.py create mode 100644 pymode/libs/astroid/brain/brain_stdlib.py delete mode 100644 pymode/libs/astroid/brain/py2pytest.py delete mode 100644 pymode/libs/astroid/brain/py2qt4.py delete mode 100644 pymode/libs/astroid/brain/py2stdlib.py create mode 100644 pymode/libs/astroid/context.py create mode 100644 pymode/libs/astroid/decorators.py delete mode 100644 pymode/libs/astroid/inspector.py create mode 100644 pymode/libs/astroid/objects.py rename pymode/libs/{pkg_resources/_vendor => astroid/tests}/__init__.py (100%) create mode 100644 pymode/libs/astroid/tests/resources.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/SSL1/Connection1.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/SSL1/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/absimp/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/absimp/string.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/absimport.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/all.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/appl/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/appl/myConnection.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/clientmodule_test.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/descriptor_crash.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/email.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/find_test/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/find_test/module.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/find_test/module2.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/find_test/noendingnewline.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/find_test/nonregr.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/format.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/joined_strings.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/lmfp/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/lmfp/foo.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/module.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/module1abs/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/module1abs/core.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/module2.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/noendingnewline.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/nonregr.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/notall.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/package/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/package/absimport.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/package/hello.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/module.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/recursion.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/suppliermodule_test.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/unicode_package/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python2/data/unicode_package/core/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/SSL1/Connection1.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/SSL1/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/absimp/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/absimp/string.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/absimport.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/all.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/appl/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/appl/myConnection.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/clientmodule_test.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/descriptor_crash.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/email.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/find_test/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/find_test/module.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/find_test/module2.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/find_test/noendingnewline.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/find_test/nonregr.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/format.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/joined_strings.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/lmfp/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/lmfp/foo.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/module.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/module1abs/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/module1abs/core.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/module2.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/noendingnewline.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/nonregr.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/notall.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/package/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/package/absimport.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/package/hello.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/module.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/recursion.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/suppliermodule_test.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/unicode_package/__init__.py create mode 100644 pymode/libs/astroid/tests/testdata/python3/data/unicode_package/core/__init__.py create mode 100644 pymode/libs/astroid/tests/unittest_brain.py create mode 100644 pymode/libs/astroid/tests/unittest_builder.py create mode 100644 pymode/libs/astroid/tests/unittest_inference.py create mode 100644 pymode/libs/astroid/tests/unittest_lookup.py create mode 100644 pymode/libs/astroid/tests/unittest_manager.py create mode 100644 pymode/libs/astroid/tests/unittest_modutils.py create mode 100644 pymode/libs/astroid/tests/unittest_nodes.py create mode 100644 pymode/libs/astroid/tests/unittest_objects.py create mode 100644 pymode/libs/astroid/tests/unittest_peephole.py create mode 100644 pymode/libs/astroid/tests/unittest_protocols.py create mode 100644 pymode/libs/astroid/tests/unittest_python3.py create mode 100644 pymode/libs/astroid/tests/unittest_raw_building.py create mode 100644 pymode/libs/astroid/tests/unittest_regrtest.py create mode 100644 pymode/libs/astroid/tests/unittest_scoped_nodes.py create mode 100644 pymode/libs/astroid/tests/unittest_transforms.py create mode 100644 pymode/libs/astroid/tests/unittest_utils.py create mode 100644 pymode/libs/astroid/transforms.py create mode 100644 pymode/libs/astroid/util.py delete mode 100644 pymode/libs/astroid/utils.py create mode 100644 pymode/libs/backports.functools_lru_cache-1.3-py3.5-nspkg.pth create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/INSTALLER create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/METADATA create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/RECORD create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/WHEEL create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/metadata.json create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/namespace_packages.txt create mode 100644 pymode/libs/backports.functools_lru_cache-1.3.dist-info/top_level.txt create mode 100644 pymode/libs/backports/configparser/__init__.py create mode 100644 pymode/libs/backports/configparser/helpers.py create mode 100644 pymode/libs/backports/functools_lru_cache.py create mode 100644 pymode/libs/configparser-3.5.0-py2.7-nspkg.pth create mode 100644 pymode/libs/configparser-3.5.0.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/configparser-3.5.0.dist-info/INSTALLER create mode 100644 pymode/libs/configparser-3.5.0.dist-info/METADATA create mode 100644 pymode/libs/configparser-3.5.0.dist-info/RECORD create mode 100644 pymode/libs/configparser-3.5.0.dist-info/WHEEL create mode 100644 pymode/libs/configparser-3.5.0.dist-info/metadata.json create mode 100644 pymode/libs/configparser-3.5.0.dist-info/namespace_packages.txt create mode 100644 pymode/libs/configparser-3.5.0.dist-info/top_level.txt create mode 100644 pymode/libs/configparser.py delete mode 100644 pymode/libs/easy_install.py create mode 100644 pymode/libs/isort-4.2.5.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/isort-4.2.5.dist-info/INSTALLER create mode 100644 pymode/libs/isort-4.2.5.dist-info/METADATA create mode 100644 pymode/libs/isort-4.2.5.dist-info/RECORD create mode 100644 pymode/libs/isort-4.2.5.dist-info/WHEEL create mode 100644 pymode/libs/isort-4.2.5.dist-info/entry_points.txt create mode 100644 pymode/libs/isort-4.2.5.dist-info/metadata.json create mode 100644 pymode/libs/isort-4.2.5.dist-info/top_level.txt create mode 100644 pymode/libs/isort/__init__.py create mode 100644 pymode/libs/isort/hooks.py create mode 100644 pymode/libs/isort/isort.py create mode 100644 pymode/libs/isort/main.py create mode 100644 pymode/libs/isort/natural.py create mode 100644 pymode/libs/isort/pie_slice.py create mode 100644 pymode/libs/isort/pylama_isort.py create mode 100644 pymode/libs/isort/settings.py create mode 100644 pymode/libs/lazy_object_proxy-1.2.2.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/lazy_object_proxy-1.2.2.dist-info/INSTALLER create mode 100644 pymode/libs/lazy_object_proxy-1.2.2.dist-info/METADATA create mode 100644 pymode/libs/lazy_object_proxy-1.2.2.dist-info/RECORD create mode 100644 pymode/libs/lazy_object_proxy-1.2.2.dist-info/WHEEL create mode 100644 pymode/libs/lazy_object_proxy-1.2.2.dist-info/metadata.json create mode 100644 pymode/libs/lazy_object_proxy-1.2.2.dist-info/top_level.txt create mode 100644 pymode/libs/lazy_object_proxy/__init__.py create mode 100644 pymode/libs/lazy_object_proxy/cext.c create mode 100755 pymode/libs/lazy_object_proxy/cext.so create mode 100644 pymode/libs/lazy_object_proxy/compat.py create mode 100644 pymode/libs/lazy_object_proxy/simple.py create mode 100644 pymode/libs/lazy_object_proxy/slots.py create mode 100644 pymode/libs/lazy_object_proxy/utils.py delete mode 100644 pymode/libs/logilab/common/__init__.py delete mode 100644 pymode/libs/logilab/common/cache.py delete mode 100644 pymode/libs/logilab/common/changelog.py delete mode 100644 pymode/libs/logilab/common/clcommands.py delete mode 100644 pymode/libs/logilab/common/compat.py delete mode 100644 pymode/libs/logilab/common/configuration.py delete mode 100644 pymode/libs/logilab/common/daemon.py delete mode 100644 pymode/libs/logilab/common/date.py delete mode 100644 pymode/libs/logilab/common/debugger.py delete mode 100644 pymode/libs/logilab/common/decorators.py delete mode 100644 pymode/libs/logilab/common/deprecation.py delete mode 100644 pymode/libs/logilab/common/fileutils.py delete mode 100644 pymode/libs/logilab/common/graph.py delete mode 100644 pymode/libs/logilab/common/interface.py delete mode 100644 pymode/libs/logilab/common/logging_ext.py delete mode 100644 pymode/libs/logilab/common/modutils.py delete mode 100644 pymode/libs/logilab/common/optik_ext.py delete mode 100644 pymode/libs/logilab/common/optparser.py delete mode 100644 pymode/libs/logilab/common/proc.py delete mode 100644 pymode/libs/logilab/common/pytest.py delete mode 100644 pymode/libs/logilab/common/registry.py delete mode 100644 pymode/libs/logilab/common/shellutils.py delete mode 100644 pymode/libs/logilab/common/sphinx_ext.py delete mode 100644 pymode/libs/logilab/common/sphinxutils.py delete mode 100644 pymode/libs/logilab/common/table.py delete mode 100644 pymode/libs/logilab/common/tasksqueue.py delete mode 100644 pymode/libs/logilab/common/testlib.py delete mode 100644 pymode/libs/logilab/common/textutils.py delete mode 100644 pymode/libs/logilab/common/tree.py delete mode 100644 pymode/libs/logilab/common/umessage.py delete mode 100644 pymode/libs/logilab/common/ureports/__init__.py delete mode 100644 pymode/libs/logilab/common/ureports/docbook_writer.py delete mode 100644 pymode/libs/logilab/common/ureports/html_writer.py delete mode 100644 pymode/libs/logilab/common/ureports/text_writer.py delete mode 100644 pymode/libs/logilab/common/urllib2ext.py delete mode 100644 pymode/libs/logilab/common/visitor.py delete mode 100644 pymode/libs/logilab/common/xmlutils.py delete mode 100644 pymode/libs/logilab_common-1.0.2-py2.7-nspkg.pth delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/DESCRIPTION.rst delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/METADATA delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/RECORD delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/metadata.json delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/namespace_packages.txt delete mode 100644 pymode/libs/logilab_common-1.0.2.dist-info/top_level.txt create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/INSTALLER create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/METADATA create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/RECORD create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/WHEEL create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/entry_points.txt create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/metadata.json create mode 100644 pymode/libs/mccabe-0.5.3.dist-info/top_level.txt delete mode 100644 pymode/libs/pkg_resources/__init__.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/__about__.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/__init__.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/_compat.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/_structures.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/specifiers.py delete mode 100644 pymode/libs/pkg_resources/_vendor/packaging/version.py create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/INSTALLER create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/METADATA create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/RECORD create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/WHEEL create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/entry_points.txt create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/metadata.json rename pymode/libs/{logilab/__init__.py => pycodestyle-2.2.0.dist-info/namespace_packages.txt} (100%) create mode 100644 pymode/libs/pycodestyle-2.2.0.dist-info/top_level.txt rename pymode/libs/{pep8.py => pycodestyle.py} (90%) create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/INSTALLER create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/METADATA create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/RECORD create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/WHEEL create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/entry_points.txt create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/metadata.json create mode 100644 pymode/libs/pydocstyle-1.1.1.dist-info/top_level.txt rename pymode/libs/{pep257.py => pydocstyle.py} (50%) create mode 100644 pymode/libs/pydocstyle/__init__.py create mode 100644 pymode/libs/pydocstyle/__main__.py create mode 100644 pymode/libs/pydocstyle/checker.py create mode 100644 pymode/libs/pydocstyle/cli.py create mode 100644 pymode/libs/pydocstyle/config.py create mode 100644 pymode/libs/pydocstyle/parser.py create mode 100644 pymode/libs/pydocstyle/utils.py create mode 100644 pymode/libs/pydocstyle/violations.py create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/INSTALLER create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/METADATA create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/RECORD create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/WHEEL create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/entry_points.txt create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/metadata.json create mode 100644 pymode/libs/pyflakes-1.3.0.dist-info/top_level.txt create mode 100644 pymode/libs/pyflakes/scripts/__init__.py create mode 100644 pymode/libs/pyflakes/scripts/pyflakes.py create mode 100644 pymode/libs/pyflakes/test/__init__.py create mode 100644 pymode/libs/pyflakes/test/harness.py create mode 100644 pymode/libs/pyflakes/test/test_api.py create mode 100644 pymode/libs/pyflakes/test/test_dict.py create mode 100644 pymode/libs/pyflakes/test/test_doctests.py create mode 100644 pymode/libs/pyflakes/test/test_imports.py create mode 100644 pymode/libs/pyflakes/test/test_other.py create mode 100644 pymode/libs/pyflakes/test/test_return_with_arguments_inside_generator.py create mode 100644 pymode/libs/pyflakes/test/test_undefined_names.py delete mode 100644 pymode/libs/pylama/lint/pylama_pep257.py rename pymode/libs/pylama/lint/{pylama_pep8.py => pylama_pycodestyle.py} (76%) create mode 100644 pymode/libs/pylama/lint/pylama_pydocstyle.py create mode 100644 pymode/libs/pylama/lint/pylama_radon.py create mode 100644 pymode/libs/pylint-1.6.4.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/pylint-1.6.4.dist-info/INSTALLER create mode 100644 pymode/libs/pylint-1.6.4.dist-info/METADATA create mode 100644 pymode/libs/pylint-1.6.4.dist-info/RECORD create mode 100644 pymode/libs/pylint-1.6.4.dist-info/WHEEL create mode 100644 pymode/libs/pylint-1.6.4.dist-info/entry_points.txt create mode 100644 pymode/libs/pylint-1.6.4.dist-info/metadata.json create mode 100644 pymode/libs/pylint-1.6.4.dist-info/top_level.txt create mode 100644 pymode/libs/pylint/checkers/async.py create mode 100644 pymode/libs/pylint/extensions/__init__.py create mode 100644 pymode/libs/pylint/extensions/_check_docs_utils.py create mode 100644 pymode/libs/pylint/extensions/bad_builtin.py create mode 100644 pymode/libs/pylint/extensions/check_docs.py create mode 100644 pymode/libs/pylint/extensions/check_elif.py create mode 100644 pymode/libs/pylint/extensions/docparams.py create mode 100644 pymode/libs/pylint/extensions/docstyle.py create mode 100644 pymode/libs/pylint/extensions/mccabe.py create mode 100644 pymode/libs/pylint/graph.py create mode 100644 pymode/libs/pylint/pyreverse/inspector.py rename pymode/libs/{logilab/common => pylint/pyreverse}/vcgutils.py (82%) create mode 100644 pymode/libs/pylint/reporters/ureports/__init__.py create mode 100644 pymode/libs/pylint/reporters/ureports/html_writer.py rename pymode/libs/{logilab/common => pylint/reporters}/ureports/nodes.py (53%) create mode 100644 pymode/libs/pylint/reporters/ureports/text_writer.py create mode 100644 pymode/libs/six-1.10.0.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/six-1.10.0.dist-info/INSTALLER create mode 100644 pymode/libs/six-1.10.0.dist-info/METADATA create mode 100644 pymode/libs/six-1.10.0.dist-info/RECORD create mode 100644 pymode/libs/six-1.10.0.dist-info/WHEEL create mode 100644 pymode/libs/six-1.10.0.dist-info/metadata.json create mode 100644 pymode/libs/six-1.10.0.dist-info/top_level.txt create mode 100644 pymode/libs/wrapt-1.10.8.dist-info/DESCRIPTION.rst create mode 100644 pymode/libs/wrapt-1.10.8.dist-info/INSTALLER create mode 100644 pymode/libs/wrapt-1.10.8.dist-info/METADATA create mode 100644 pymode/libs/wrapt-1.10.8.dist-info/RECORD create mode 100644 pymode/libs/wrapt-1.10.8.dist-info/WHEEL create mode 100644 pymode/libs/wrapt-1.10.8.dist-info/metadata.json create mode 100644 pymode/libs/wrapt-1.10.8.dist-info/top_level.txt create mode 100644 pymode/libs/wrapt/__init__.py create mode 100755 pymode/libs/wrapt/_wrappers.so create mode 100644 pymode/libs/wrapt/arguments.py create mode 100644 pymode/libs/wrapt/decorators.py create mode 100644 pymode/libs/wrapt/importer.py create mode 100644 pymode/libs/wrapt/wrappers.py diff --git a/pylama.ini b/pylama.ini index 0394772f..9579796e 100644 --- a/pylama.ini +++ b/pylama.ini @@ -1,4 +1,5 @@ [pylama] +ignore=D213 linters=pep8,pyflakes,pylint [pylama:pymode/libs*] diff --git a/pymode/libs/_markerlib/__init__.py b/pymode/libs/_markerlib/__init__.py deleted file mode 100644 index e2b237b1..00000000 --- a/pymode/libs/_markerlib/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -try: - import ast - from _markerlib.markers import default_environment, compile, interpret -except ImportError: - if 'ast' in globals(): - raise - def default_environment(): - return {} - def compile(marker): - def marker_fn(environment=None, override=None): - # 'empty markers are True' heuristic won't install extra deps. - return not marker.strip() - marker_fn.__doc__ = marker - return marker_fn - def interpret(marker, environment=None, override=None): - return compile(marker)() diff --git a/pymode/libs/_markerlib/markers.py b/pymode/libs/_markerlib/markers.py deleted file mode 100644 index fa837061..00000000 --- a/pymode/libs/_markerlib/markers.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -"""Interpret PEP 345 environment markers. - -EXPR [in|==|!=|not in] EXPR [or|and] ... - -where EXPR belongs to any of those: - - python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) - python_full_version = sys.version.split()[0] - os.name = os.name - sys.platform = sys.platform - platform.version = platform.version() - platform.machine = platform.machine() - platform.python_implementation = platform.python_implementation() - a free string, like '2.6', or 'win32' -""" - -__all__ = ['default_environment', 'compile', 'interpret'] - -import ast -import os -import platform -import sys -import weakref - -_builtin_compile = compile - -try: - from platform import python_implementation -except ImportError: - if os.name == "java": - # Jython 2.5 has ast module, but not platform.python_implementation() function. - def python_implementation(): - return "Jython" - else: - raise - - -# restricted set of variables -_VARS = {'sys.platform': sys.platform, - 'python_version': '%s.%s' % sys.version_info[:2], - # FIXME parsing sys.platform is not reliable, but there is no other - # way to get e.g. 2.7.2+, and the PEP is defined with sys.version - 'python_full_version': sys.version.split(' ', 1)[0], - 'os.name': os.name, - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'platform.python_implementation': python_implementation(), - 'extra': None # wheel extension - } - -for var in list(_VARS.keys()): - if '.' in var: - _VARS[var.replace('.', '_')] = _VARS[var] - -def default_environment(): - """Return copy of default PEP 385 globals dictionary.""" - return dict(_VARS) - -class ASTWhitelist(ast.NodeTransformer): - def __init__(self, statement): - self.statement = statement # for error messages - - ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str) - # Bool operations - ALLOWED += (ast.And, ast.Or) - # Comparison operations - ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn) - - def visit(self, node): - """Ensure statement only contains allowed nodes.""" - if not isinstance(node, self.ALLOWED): - raise SyntaxError('Not allowed in environment markers.\n%s\n%s' % - (self.statement, - (' ' * node.col_offset) + '^')) - return ast.NodeTransformer.visit(self, node) - - def visit_Attribute(self, node): - """Flatten one level of attribute access.""" - new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx) - return ast.copy_location(new_node, node) - -def parse_marker(marker): - tree = ast.parse(marker, mode='eval') - new_tree = ASTWhitelist(marker).generic_visit(tree) - return new_tree - -def compile_marker(parsed_marker): - return _builtin_compile(parsed_marker, '', 'eval', - dont_inherit=True) - -_cache = weakref.WeakValueDictionary() - -def compile(marker): - """Return compiled marker as a function accepting an environment dict.""" - try: - return _cache[marker] - except KeyError: - pass - if not marker.strip(): - def marker_fn(environment=None, override=None): - """""" - return True - else: - compiled_marker = compile_marker(parse_marker(marker)) - def marker_fn(environment=None, override=None): - """override updates environment""" - if override is None: - override = {} - if environment is None: - environment = default_environment() - environment.update(override) - return eval(compiled_marker, environment) - marker_fn.__doc__ = marker - _cache[marker] = marker_fn - return _cache[marker] - -def interpret(marker, environment=None): - return compile(marker)(environment) diff --git a/pymode/libs/astroid-1.4.9.dist-info/DESCRIPTION.rst b/pymode/libs/astroid-1.4.9.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..09162a6e --- /dev/null +++ b/pymode/libs/astroid-1.4.9.dist-info/DESCRIPTION.rst @@ -0,0 +1,66 @@ +.. image:: https://drone.io/bitbucket.org/logilab/astroid/status.png + :alt: drone.io Build Status + :target: https://drone.io/bitbucket.org/logilab/astroid + +Astroid +======= + +What's this? +------------ + +The aim of this module is to provide a common base representation of +python source code for projects such as pychecker, pyreverse, +pylint... Well, actually the development of this library is essentially +governed by pylint's needs. It used to be called logilab-astng. + +It provides a compatible representation which comes from the `_ast` +module. It rebuilds the tree generated by the builtin _ast module by +recursively walking down the AST and building an extended ast. The new +node classes have additional methods and attributes for different +usages. They include some support for static inference and local name +scopes. Furthermore, astroid builds partial trees by inspecting living +objects. + +Main modules are: + +* `bases`, `node_classses` and `scoped_nodes` contain the classes for the + different type of nodes of the tree. + +* the `manager` contains a high level object to get astroid trees from + source files and living objects. It maintains a cache of previously + constructed tree for quick access. + + +Installation +------------ + +Extract the tarball, jump into the created directory and run:: + + python setup.py install + +For installation options, see:: + + python setup.py install --help + + +If you have any questions, please mail the code-quality@python.org +mailing list for support. See +http://mail.python.org/mailman/listinfo/code-quality for subscription +information and archives. You may find older archives at +http://lists.logilab.org/mailman/listinfo/python-projects . + +Python Versions +--------------- + +astroid is compatible with Python 2.7 as well as 3.3 and later. astroid uses +the same code base for both Python versions, using six. + +Test +---- + +Tests are in the 'test' subdirectory. To launch the whole tests suite +at once, you can use unittest discover:: + + python -m unittest discover -p "unittest*.py" + + diff --git a/pymode/libs/astroid-1.4.9.dist-info/INSTALLER b/pymode/libs/astroid-1.4.9.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pymode/libs/astroid-1.4.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pymode/libs/astroid-1.4.9.dist-info/METADATA b/pymode/libs/astroid-1.4.9.dist-info/METADATA new file mode 100644 index 00000000..9512196b --- /dev/null +++ b/pymode/libs/astroid-1.4.9.dist-info/METADATA @@ -0,0 +1,84 @@ +Metadata-Version: 2.0 +Name: astroid +Version: 1.4.9 +Summary: A abstract syntax tree for Python with inference support. +Home-page: https://github.com/PyCQA/astroid +Author: Python Code Quality Authority +Author-email: code-quality@python.org +License: LGPL +Platform: UNKNOWN +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Quality Assurance +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Requires-Dist: lazy-object-proxy +Requires-Dist: six +Requires-Dist: wrapt + +.. image:: https://drone.io/bitbucket.org/logilab/astroid/status.png + :alt: drone.io Build Status + :target: https://drone.io/bitbucket.org/logilab/astroid + +Astroid +======= + +What's this? +------------ + +The aim of this module is to provide a common base representation of +python source code for projects such as pychecker, pyreverse, +pylint... Well, actually the development of this library is essentially +governed by pylint's needs. It used to be called logilab-astng. + +It provides a compatible representation which comes from the `_ast` +module. It rebuilds the tree generated by the builtin _ast module by +recursively walking down the AST and building an extended ast. The new +node classes have additional methods and attributes for different +usages. They include some support for static inference and local name +scopes. Furthermore, astroid builds partial trees by inspecting living +objects. + +Main modules are: + +* `bases`, `node_classses` and `scoped_nodes` contain the classes for the + different type of nodes of the tree. + +* the `manager` contains a high level object to get astroid trees from + source files and living objects. It maintains a cache of previously + constructed tree for quick access. + + +Installation +------------ + +Extract the tarball, jump into the created directory and run:: + + python setup.py install + +For installation options, see:: + + python setup.py install --help + + +If you have any questions, please mail the code-quality@python.org +mailing list for support. See +http://mail.python.org/mailman/listinfo/code-quality for subscription +information and archives. You may find older archives at +http://lists.logilab.org/mailman/listinfo/python-projects . + +Python Versions +--------------- + +astroid is compatible with Python 2.7 as well as 3.3 and later. astroid uses +the same code base for both Python versions, using six. + +Test +---- + +Tests are in the 'test' subdirectory. To launch the whole tests suite +at once, you can use unittest discover:: + + python -m unittest discover -p "unittest*.py" + + diff --git a/pymode/libs/astroid-1.4.9.dist-info/RECORD b/pymode/libs/astroid-1.4.9.dist-info/RECORD new file mode 100644 index 00000000..c6842648 --- /dev/null +++ b/pymode/libs/astroid-1.4.9.dist-info/RECORD @@ -0,0 +1,272 @@ +astroid/__init__.py,sha256=uX2A4l2glYHVDxjeQENRAOv0GucqEvFx5LdzlI5Xq3s,4985 +astroid/__pkginfo__.py,sha256=S4rpTRFZpkqeiyW_M-pn6sLLmTEJGA4Uyvn96RrPkAw,1607 +astroid/arguments.py,sha256=wLUt8FfHOHSsupWRasZdRjNn7h2LAu39yK1Xg0hN33Y,9508 +astroid/as_string.py,sha256=SaaordMAHt_6a7mHLzUqOfFs3QCICWnkyDwNCEQmI-o,20789 +astroid/astpeephole.py,sha256=LjUBUYoxXSz7yBrTfaSTL9SEHOAwcdTjHE0BEaMJUyI,2952 +astroid/bases.py,sha256=oFxrjU-ueu3fTZBKmq5PMbCHYEew_cCBf0wHJMr4gkU,22197 +astroid/builder.py,sha256=c6F5rAbmntA-IM9O7KFGlec7gwnj8s2mVcShDODQcYM,10469 +astroid/context.py,sha256=1IBKkxJdmFXnjxYNoi2yJNSL-rXK77R-GT6aXejTzO8,2475 +astroid/decorators.py,sha256=FHYnm0Xkoo2SwdrztHdoBPW4auP2ZKxV3qbkeu14LkI,2494 +astroid/exceptions.py,sha256=C93us5nqsz6_Stt9buMl7JjR1xHjGnePt_hSNSzYNS4,2350 +astroid/inference.py,sha256=_sqh_TyGlJd1-zHFnK-mr0wEfk7kH5pfFzNkxV5I-7s,12086 +astroid/manager.py,sha256=6iyEeKjlml6Nde3dmuiwyE5hweVhwGHq2GB2lAJwg1U,11120 +astroid/mixins.py,sha256=8RhbWYZfmQVVBJRCHJyTbcrLwKKUxkjKRipi1lm35wo,5508 +astroid/modutils.py,sha256=jgaUFy79e6iF4Re8xYcvYQhbVSp7kSaVKeRU2dcHPAI,25506 +astroid/node_classes.py,sha256=urnKe8NcYLmZ2I31A6O_ATbPXsu4xxtEM-udTemswIM,32957 +astroid/nodes.py,sha256=xXopyFJ8fRhyZg-ETmQgGwaIW2-7jowz0z0HEARG494,3248 +astroid/objects.py,sha256=cipb8qZAaAKAbL3BQGmFH38UDhzM1ZeXXhUNOhhmzNE,6403 +astroid/protocols.py,sha256=gnIsx24PkeKLPUxVQ4lc8GZt0mnfichSTwVsfB9AFt8,16984 +astroid/raw_building.py,sha256=t-3LdclM8PcQCIzh5OaS6c1cYynGgnbErd8vwoQ9w-4,14884 +astroid/rebuilder.py,sha256=n_SwvjWLxC7NM_gOx0BYywX3SujJSL2vM_3PVaaXxXs,41977 +astroid/scoped_nodes.py,sha256=YWzxkW_wlq1f1O5cJEKoFzYMBnRLvB-Rafl9zsrqwac,62402 +astroid/test_utils.py,sha256=lPKQV3_iiIswfKFOT6p9YDsEbULH29fShK2JzS8NJfc,6989 +astroid/transforms.py,sha256=7b9eOnHrC4RxJdoZOcyiWbwBR2nbFu7PeuUnR6IBVPE,3830 +astroid/util.py,sha256=T9YHxUo-xy8jn05MPS7hVm2HZC7WG8TPVKD1zqiNZ2g,3041 +astroid/brain/brain_builtin_inference.py,sha256=eQkCn1SgjKiws_6NzEPo1JgRYgOvAEiYw73D0na_t-M,11120 +astroid/brain/brain_dateutil.py,sha256=RACrraY_cnkgkgwCWnDD4VicXLiaobRn6OGecBdPFEs,448 +astroid/brain/brain_gi.py,sha256=kDXzvmLkF8-ze53bzaajHDs9nX-vx-VCH-MrqiKyrJI,6029 +astroid/brain/brain_mechanize.py,sha256=H-PiUMsKUfqK-N8Kc6Ub_9MLb4J4TYNhh7IZQgSz7cE,486 +astroid/brain/brain_nose.py,sha256=psOQtV0UJFXzeVz7tqBZjTGIhsgE2KQS20-oSu9EufQ,2766 +astroid/brain/brain_numpy.py,sha256=jn99dLQwjCjwLEReXRv1DEx-4hANj2JnFNnErpnttDo,2210 +astroid/brain/brain_pytest.py,sha256=p5tyi9jzkxQqYJzR-63JWJy3zoeLzIbdlNNJkk29SEw,1943 +astroid/brain/brain_qt.py,sha256=a7ncWD4Iu4Nxh4YPiHqx6fXaxsrNcSnv8QZyM1g7POM,1275 +astroid/brain/brain_six.py,sha256=_LxHuomOfTNYjnOniwDBFbhpbD-AapTwO_af7md1Oxg,10175 +astroid/brain/brain_ssl.py,sha256=yeh-dwWUDqkU0IZ00Q-jVp6J7kCfEWm2m0IKmnON-Yg,3271 +astroid/brain/brain_stdlib.py,sha256=CYn_f-Sl5t7ZX61CdtS5iuNXXUZWXm1kJSTT71hzUaA,16349 +astroid/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/resources.py,sha256=Kpw_3IcQ_s5Rspo1VO_nIHN3-4PUbCjFlGrG-vIwodc,2372 +astroid/tests/unittest_brain.py,sha256=qwKuOsGv6Xd6r0WUr1YgLrJTk8gLf1h_dvdAPxhmfXM,18406 +astroid/tests/unittest_builder.py,sha256=IojJ8O81wdumy-ygiYSKYqnETAotGpTUmSf3ftR8FcU,28890 +astroid/tests/unittest_inference.py,sha256=qqYW7V5in6ULEbgdAYlmg2TJV1i9KVCFmUGNcgmau-E,74714 +astroid/tests/unittest_lookup.py,sha256=fPeBIHxEkHJ2wIrqljX5BElnUbYuAsYLl8Zd5d6qGXs,12870 +astroid/tests/unittest_manager.py,sha256=Ch7RCT-dHoQis8FuaPFS2bGUwLyxVCA56LCEgD-l6GQ,8780 +astroid/tests/unittest_modutils.py,sha256=UUw-v9XBQF9r46pO2Nxpi97xQkAInbyjJt5s5Foa0FA,10958 +astroid/tests/unittest_nodes.py,sha256=6yzHjb5W_DCgzv5KgJe-gQT65fNjtif2GLD0et9XMZc,26681 +astroid/tests/unittest_objects.py,sha256=I1_2FAbOz9bQm2DwYe820iU28Fhbft_zBEYZhmxyR7o,20666 +astroid/tests/unittest_peephole.py,sha256=XbzmRIaGgmLjpX-mM6ocdaEkk4hPB_zuGKFdzV4u8D0,3862 +astroid/tests/unittest_protocols.py,sha256=l6czE8jGJjSRIa93N2p5g4UvpkgbrFKcoPBfspuro-Y,6748 +astroid/tests/unittest_python3.py,sha256=irRwDy_crq8F_lIiTnAeqW9YXLwxPFZj7MzMNpJoFj4,9030 +astroid/tests/unittest_raw_building.py,sha256=g5CzqavC4x5y89uuZZTThfbd6GCuPVoyxyu4BLzqO3w,3075 +astroid/tests/unittest_regrtest.py,sha256=EOI6ekqzeXTCDlkB_4KWVP7kt9HEjAOYEVWP9ggnMzc,12203 +astroid/tests/unittest_scoped_nodes.py,sha256=n2cteqr-jB5GvwIXFnrkIhnPjTTBntTMBnRCWFBj-j4,58717 +astroid/tests/unittest_transforms.py,sha256=lc9JbwoO3Hul6jDukK48Fz_KO7bS9L4Q6LwOx85M-dI,8870 +astroid/tests/unittest_utils.py,sha256=Ni7cT0RCSMTbZa-3b-h-2FYlx3SAszyeUmitJ7iS69U,4441 +astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 +astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 +astroid/tests/testdata/python2/data/__init__.py,sha256=UUgQFilI5GXd3tVo42wvC99xr-OWdiFwd4AToVWMKJg,68 +astroid/tests/testdata/python2/data/absimport.py,sha256=Dl1v3sCTUuy5NjWsvk6xfXDGqG8dJxYky66oH_16y1U,78 +astroid/tests/testdata/python2/data/all.py,sha256=9hzh93N-w2OoWmuWFFPe4NfLPtN0CcQUWyJU9G2kki8,106 +astroid/tests/testdata/python2/data/clientmodule_test.py,sha256=jiqYWxRNJPU8xPNNVDcb5OwHzq73cRevErmzX5q_pVY,800 +astroid/tests/testdata/python2/data/descriptor_crash.py,sha256=c9dmcN0XSB1WiDINWLjfA0SYY87UzMIpETXHBdcgJ0Y,217 +astroid/tests/testdata/python2/data/email.py,sha256=bA18WU0kAWGxsPlWJjD6LgXj9NK4RDLjqaN5-EievLw,70 +astroid/tests/testdata/python2/data/format.py,sha256=Se18tU4br95nCnBg7DIYonoRIXAZi3u2RvyoKwupAXk,421 +astroid/tests/testdata/python2/data/joined_strings.py,sha256=5nO3HMS9TAB0jZml1cSBv_b-1m4GTJ_12hD8WYMugBw,72168 +astroid/tests/testdata/python2/data/module.py,sha256=jaS47E_rOtpGIECwWYYl3ZBzBUZt0fvyCs7tG99SxgU,1804 +astroid/tests/testdata/python2/data/module2.py,sha256=gNaybt93hMTRFCnOh3gjW0niEDP5nVO8TrpixkHWW5o,1960 +astroid/tests/testdata/python2/data/noendingnewline.py,sha256=cVu_K7C5NnjnEvmMUxVGeeguyFcHBuNFEO3ueF9X9LI,503 +astroid/tests/testdata/python2/data/nonregr.py,sha256=0M3kW2tiTQdfuIUU9CNZHDBd1qC6Sxms6b_QZLLGtro,1150 +astroid/tests/testdata/python2/data/notall.py,sha256=Jg0X_GfNZyAnDxHLeGUEa4f9m761kCAUySpqbCeUweM,74 +astroid/tests/testdata/python2/data/recursion.py,sha256=ZuYyd9K4DyZxXg3L-B1Dl7k9q8OpIfVDwN9kJ52xLDk,52 +astroid/tests/testdata/python2/data/suppliermodule_test.py,sha256=t_C4IIivrAtXzdpRdxWxPB4Ii74r0gK-xFoyXhk_ikg,236 +astroid/tests/testdata/python2/data/SSL1/Connection1.py,sha256=rOKmOG_JTouiVawzB5kty493I64pBM9WJDinQn-_Y5c,343 +astroid/tests/testdata/python2/data/SSL1/__init__.py,sha256=ZlvNUty1pEZy7wHMAM83YwYcdE4ypNHh0W2ijB3mqO8,35 +astroid/tests/testdata/python2/data/absimp/__init__.py,sha256=CTlFm8G4kKecaa0NpFb4X25NNZ9FNorigSG65GAvvYA,89 +astroid/tests/testdata/python2/data/absimp/string.py,sha256=liyEyorFV0OJFr-HcECPPRfVmLd0lO4YrGFnZz0_T0M,83 +astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py,sha256=9E8Vj_jbaQ7tm80sIxyruqZPjzlVLNbd3qQxbvj39rI,42 +astroid/tests/testdata/python2/data/appl/__init__.py,sha256=9OoDa7y4MPXKZenN5CA2wmwsG7vUqiO4ImtTjsNs6YY,13 +astroid/tests/testdata/python2/data/appl/myConnection.py,sha256=Zc3RQ_GjoZ91k3LkaIfV4_1SePpwKUU2cOFAzN5Iq6Y,303 +astroid/tests/testdata/python2/data/find_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python2/data/find_test/module.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python2/data/find_test/module2.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python2/data/find_test/noendingnewline.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python2/data/find_test/nonregr.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python2/data/lmfp/__init__.py,sha256=JmYecBTypWpPdKNy76pDWYliy-gWg3PPOOMcUdMAvzo,51 +astroid/tests/testdata/python2/data/lmfp/foo.py,sha256=ePynel7303gG6wq8wb6kRmaV75Q7mR9A_X7SZVP0YWM,170 +astroid/tests/testdata/python2/data/module1abs/__init__.py,sha256=RTMiBz8OgkD3dy2Sehwv6am35Xzlf6X8SQJcfo-m2sA,113 +astroid/tests/testdata/python2/data/module1abs/core.py,sha256=xRdXeFHEieRauuJZElbEBASgXG0ZzU1a5_0isAhM7Gw,11 +astroid/tests/testdata/python2/data/package/__init__.py,sha256=U50oVo2CraRtPYheia534Z0iPVQMDT2C6Qwj2ZWAmO0,57 +astroid/tests/testdata/python2/data/package/absimport.py,sha256=cTkLoSR4oIJtQ8yVLAgdopJXro0qFsehlMGYLCfiPvo,172 +astroid/tests/testdata/python2/data/package/hello.py,sha256=sTddKXRfLNAysty0r625S8QysSDOmtF8oXDvbl3Cywk,20 +astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py,sha256=U6BsMb_ygFb8RqImsTrWEGJihU7nJgELPH6AvWM-zaU,2242 +astroid/tests/testdata/python2/data/package/subpackage/__init__.py,sha256=XtKilaAqziUI-ImaSw4V6Aic40domt4v_If7lAZYhSE,25 +astroid/tests/testdata/python2/data/package/subpackage/module.py,sha256=WAtPIk13pW6tYI6rSgNHcCgTu0EXhX6i5CugdHPH8N0,32 +astroid/tests/testdata/python2/data/unicode_package/__init__.py,sha256=Qq8Rv1-47xfh9UMnDqtU6MYCoZbK2DF1zxAvxlkhCNU,17 +astroid/tests/testdata/python2/data/unicode_package/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 +astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 +astroid/tests/testdata/python3/data/__init__.py,sha256=UUgQFilI5GXd3tVo42wvC99xr-OWdiFwd4AToVWMKJg,68 +astroid/tests/testdata/python3/data/absimport.py,sha256=-CKa6uxNJwTox5JoeWFe_hnxPcp1BT_vgPrXjsk4c-w,40 +astroid/tests/testdata/python3/data/all.py,sha256=96OFTf0wN5cad6Zt4WvJ6OxHTUncQyPyghPMRxGV9B8,107 +astroid/tests/testdata/python3/data/clientmodule_test.py,sha256=jiqYWxRNJPU8xPNNVDcb5OwHzq73cRevErmzX5q_pVY,800 +astroid/tests/testdata/python3/data/descriptor_crash.py,sha256=c9dmcN0XSB1WiDINWLjfA0SYY87UzMIpETXHBdcgJ0Y,217 +astroid/tests/testdata/python3/data/email.py,sha256=bA18WU0kAWGxsPlWJjD6LgXj9NK4RDLjqaN5-EievLw,70 +astroid/tests/testdata/python3/data/format.py,sha256=Se18tU4br95nCnBg7DIYonoRIXAZi3u2RvyoKwupAXk,421 +astroid/tests/testdata/python3/data/joined_strings.py,sha256=5nO3HMS9TAB0jZml1cSBv_b-1m4GTJ_12hD8WYMugBw,72168 +astroid/tests/testdata/python3/data/module.py,sha256=gmtEr1dRdtYP5oyUwvl-Bmk498D3q9fpPSMcEGeoPPc,1799 +astroid/tests/testdata/python3/data/module2.py,sha256=A3c7169M8pPIBi8U6mnLjNQnTOPg_en9qVW18yEGNCs,1978 +astroid/tests/testdata/python3/data/noendingnewline.py,sha256=PaqOTMH1fn703GRn8_lZox2ByExWci0LiXfEKZjKgGU,506 +astroid/tests/testdata/python3/data/nonregr.py,sha256=oCCrE6UTcDUmFcLnde2N34Fxv1PQ8Ck3WqE0or1Jqqk,1101 +astroid/tests/testdata/python3/data/notall.py,sha256=DftFceOP1cQfe2imrwTWcsbuxugJx9mDFFM57cCPUnA,75 +astroid/tests/testdata/python3/data/recursion.py,sha256=ZuYyd9K4DyZxXg3L-B1Dl7k9q8OpIfVDwN9kJ52xLDk,52 +astroid/tests/testdata/python3/data/suppliermodule_test.py,sha256=t_C4IIivrAtXzdpRdxWxPB4Ii74r0gK-xFoyXhk_ikg,236 +astroid/tests/testdata/python3/data/SSL1/Connection1.py,sha256=bvnJLQ3Ey3FzNDCR2mEeU8G44-c4iw9vOHBKOXHuGJM,306 +astroid/tests/testdata/python3/data/SSL1/__init__.py,sha256=3Flw6M01FPCVMhiVC_yk-NQbOaQW6K4H_H9wqx6c1do,36 +astroid/tests/testdata/python3/data/absimp/__init__.py,sha256=CTlFm8G4kKecaa0NpFb4X25NNZ9FNorigSG65GAvvYA,89 +astroid/tests/testdata/python3/data/absimp/string.py,sha256=liyEyorFV0OJFr-HcECPPRfVmLd0lO4YrGFnZz0_T0M,83 +astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py,sha256=9E8Vj_jbaQ7tm80sIxyruqZPjzlVLNbd3qQxbvj39rI,42 +astroid/tests/testdata/python3/data/appl/__init__.py,sha256=9OoDa7y4MPXKZenN5CA2wmwsG7vUqiO4ImtTjsNs6YY,13 +astroid/tests/testdata/python3/data/appl/myConnection.py,sha256=mWi72c6yYuIXoyRXo-uKFwY7NSj-lok_NRlNc9N2hfM,261 +astroid/tests/testdata/python3/data/find_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python3/data/find_test/module.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python3/data/find_test/module2.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python3/data/find_test/noendingnewline.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python3/data/find_test/nonregr.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid/tests/testdata/python3/data/lmfp/__init__.py,sha256=JmYecBTypWpPdKNy76pDWYliy-gWg3PPOOMcUdMAvzo,51 +astroid/tests/testdata/python3/data/lmfp/foo.py,sha256=ePynel7303gG6wq8wb6kRmaV75Q7mR9A_X7SZVP0YWM,170 +astroid/tests/testdata/python3/data/module1abs/__init__.py,sha256=qeBmkE-gZ07oAuq_fgcaMP8217AdA-FGOR73iB5lltg,59 +astroid/tests/testdata/python3/data/module1abs/core.py,sha256=xRdXeFHEieRauuJZElbEBASgXG0ZzU1a5_0isAhM7Gw,11 +astroid/tests/testdata/python3/data/package/__init__.py,sha256=U50oVo2CraRtPYheia534Z0iPVQMDT2C6Qwj2ZWAmO0,57 +astroid/tests/testdata/python3/data/package/absimport.py,sha256=cTkLoSR4oIJtQ8yVLAgdopJXro0qFsehlMGYLCfiPvo,172 +astroid/tests/testdata/python3/data/package/hello.py,sha256=sTddKXRfLNAysty0r625S8QysSDOmtF8oXDvbl3Cywk,20 +astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py,sha256=U6BsMb_ygFb8RqImsTrWEGJihU7nJgELPH6AvWM-zaU,2242 +astroid/tests/testdata/python3/data/package/subpackage/__init__.py,sha256=XtKilaAqziUI-ImaSw4V6Aic40domt4v_If7lAZYhSE,25 +astroid/tests/testdata/python3/data/package/subpackage/module.py,sha256=WAtPIk13pW6tYI6rSgNHcCgTu0EXhX6i5CugdHPH8N0,32 +astroid/tests/testdata/python3/data/unicode_package/__init__.py,sha256=Qq8Rv1-47xfh9UMnDqtU6MYCoZbK2DF1zxAvxlkhCNU,17 +astroid/tests/testdata/python3/data/unicode_package/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +astroid-1.4.9.dist-info/DESCRIPTION.rst,sha256=xc_zc3CC4NAakMv6bAqAlAk6NC7ScAC-G4wxGLuOq7k,2024 +astroid-1.4.9.dist-info/METADATA,sha256=52pYWwlyD3d1oGcKbpa8T0JsYWgdF-7uYkm-uV64bvQ,2643 +astroid-1.4.9.dist-info/RECORD,, +astroid-1.4.9.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +astroid-1.4.9.dist-info/metadata.json,sha256=dpnAZsv48TonGcqaiI1OL1LfpzJ1p-wG1IPQD-4T-QA,789 +astroid-1.4.9.dist-info/top_level.txt,sha256=HsdW4O2x7ZXRj6k-agi3RaQybGLobI3VSE-jt4vQUXM,8 +astroid-1.4.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +astroid/brain/brain_qt.pyc,, +astroid/tests/testdata/python3/data/format.pyc,, +astroid/tests/unittest_manager.pyc,, +astroid/tests/testdata/python3/data/descriptor_crash.pyc,, +astroid/tests/testdata/python3/data/find_test/__init__.pyc,, +astroid/modutils.pyc,, +astroid/tests/unittest_builder.pyc,, +astroid/tests/testdata/python2/data/absimp/__init__.pyc,, +astroid/tests/testdata/python2/data/appl/__init__.pyc,, +astroid/brain/brain_six.pyc,, +astroid/tests/testdata/python2/data/find_test/nonregr.pyc,, +astroid/tests/testdata/python2/data/descriptor_crash.pyc,, +astroid/tests/unittest_inference.pyc,, +astroid/objects.pyc,, +astroid/tests/testdata/python2/data/format.pyc,, +astroid/tests/unittest_utils.pyc,, +astroid/tests/testdata/python3/data/package/__init__.pyc,, +astroid/tests/testdata/python3/data/suppliermodule_test.pyc,, +astroid/tests/testdata/python3/data/noendingnewline.pyc,, +astroid/tests/testdata/python3/data/lmfp/__init__.pyc,, +astroid/tests/testdata/python2/data/lmfp/__init__.pyc,, +astroid/__pkginfo__.pyc,, +astroid/tests/testdata/python3/data/module1abs/__init__.pyc,, +astroid/transforms.pyc,, +astroid/tests/testdata/python3/data/email.pyc,, +astroid/brain/brain_builtin_inference.pyc,, +astroid/tests/testdata/python2/data/lmfp/foo.pyc,, +astroid/tests/testdata/python3/data/package/hello.pyc,, +astroid/tests/testdata/python2/data/find_test/noendingnewline.pyc,, +astroid/tests/testdata/python3/data/absimport.pyc,, +astroid/tests/testdata/python2/data/module1abs/__init__.pyc,, +astroid/tests/unittest_scoped_nodes.pyc,, +astroid/tests/testdata/python2/data/unicode_package/core/__init__.pyc,, +astroid/mixins.pyc,, +astroid/tests/testdata/python3/data/SSL1/__init__.pyc,, +astroid/tests/unittest_transforms.pyc,, +astroid/tests/__init__.pyc,, +astroid/decorators.pyc,, +astroid/tests/testdata/python3/data/find_test/module.pyc,, +astroid/tests/testdata/python2/data/module2.pyc,, +astroid/tests/unittest_modutils.pyc,, +astroid/tests/testdata/python3/data/absimp/string.pyc,, +astroid/brain/brain_pytest.pyc,, +astroid/tests/testdata/python2/data/module1abs/core.pyc,, +astroid/tests/testdata/python3/data/module.pyc,, +astroid/tests/testdata/python2/data/recursion.pyc,, +astroid/tests/testdata/python2/data/noendingnewline.pyc,, +astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.pyc,, +astroid/tests/testdata/python2/data/package/hello.pyc,, +astroid/tests/testdata/python3/data/package/subpackage/__init__.pyc,, +astroid/brain/brain_dateutil.pyc,, +astroid/tests/testdata/python3/data/all.pyc,, +astroid/inference.pyc,, +astroid/tests/testdata/python2/data/find_test/module2.pyc,, +astroid/brain/brain_stdlib.pyc,, +astroid/tests/testdata/python2/data/appl/myConnection.pyc,, +astroid/tests/testdata/python3/data/recursion.pyc,, +astroid/util.pyc,, +astroid/tests/testdata/python2/data/module.pyc,, +astroid/tests/testdata/python2/data/SSL1/__init__.pyc,, +astroid/tests/unittest_brain.pyc,, +astroid/test_utils.pyc,, +astroid/node_classes.pyc,, +astroid/tests/testdata/python2/data/clientmodule_test.pyc,, +astroid/tests/testdata/python3/data/unicode_package/__init__.pyc,, +astroid/tests/unittest_peephole.pyc,, +astroid/tests/testdata/python2/data/package/import_package_subpackage_module.pyc,, +astroid/tests/testdata/python3/data/package/subpackage/module.pyc,, +astroid/context.pyc,, +astroid/brain/brain_mechanize.pyc,, +astroid/bases.pyc,, +astroid/__init__.pyc,, +astroid/tests/testdata/python3/data/nonregr.pyc,, +astroid/tests/testdata/python3/data/find_test/nonregr.pyc,, +astroid/tests/testdata/python3/data/lmfp/foo.pyc,, +astroid/tests/testdata/python2/data/unicode_package/__init__.pyc,, +astroid/rebuilder.pyc,, +astroid/tests/testdata/python2/data/all.pyc,, +astroid/brain/brain_numpy.pyc,, +astroid/tests/testdata/python2/data/absimport.pyc,, +astroid/tests/testdata/python2/data/SSL1/Connection1.pyc,, +astroid/brain/brain_nose.pyc,, +astroid/scoped_nodes.pyc,, +astroid/manager.pyc,, +astroid/tests/testdata/python3/data/find_test/module2.pyc,, +astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.pyc,, +astroid/tests/testdata/python2/data/package/absimport.pyc,, +astroid/tests/testdata/python3/data/unicode_package/core/__init__.pyc,, +astroid/tests/resources.pyc,, +astroid/tests/testdata/python3/data/absimp/__init__.pyc,, +astroid/tests/testdata/python3/data/module1abs/core.pyc,, +astroid/tests/testdata/python2/data/find_test/__init__.pyc,, +astroid/astpeephole.pyc,, +astroid/tests/testdata/python2/data/package/subpackage/__init__.pyc,, +astroid/raw_building.pyc,, +astroid/brain/brain_gi.pyc,, +astroid/brain/brain_ssl.pyc,, +astroid/tests/unittest_raw_building.pyc,, +astroid/tests/testdata/python3/data/notall.pyc,, +astroid/nodes.pyc,, +astroid/builder.pyc,, +astroid/tests/testdata/python3/data/joined_strings.pyc,, +astroid/tests/testdata/python3/data/appl/myConnection.pyc,, +astroid/tests/testdata/python3/data/package/import_package_subpackage_module.pyc,, +astroid/tests/testdata/python2/data/package/subpackage/module.pyc,, +astroid/tests/testdata/python2/data/absimp/string.pyc,, +astroid/tests/unittest_regrtest.pyc,, +astroid/tests/testdata/python2/data/nonregr.pyc,, +astroid/as_string.pyc,, +astroid/tests/unittest_objects.pyc,, +astroid/tests/unittest_protocols.pyc,, +astroid/tests/testdata/python2/data/suppliermodule_test.pyc,, +astroid/tests/testdata/python2/data/__init__.pyc,, +astroid/protocols.pyc,, +astroid/tests/unittest_nodes.pyc,, +astroid/tests/testdata/python2/data/joined_strings.pyc,, +astroid/exceptions.pyc,, +astroid/tests/unittest_python3.pyc,, +astroid/tests/testdata/python3/data/clientmodule_test.pyc,, +astroid/tests/testdata/python3/data/__init__.pyc,, +astroid/tests/testdata/python3/data/appl/__init__.pyc,, +astroid/arguments.pyc,, +astroid/tests/testdata/python3/data/find_test/noendingnewline.pyc,, +astroid/tests/testdata/python3/data/SSL1/Connection1.pyc,, +astroid/tests/testdata/python2/data/find_test/module.pyc,, +astroid/tests/testdata/python2/data/notall.pyc,, +astroid/tests/unittest_lookup.pyc,, +astroid/tests/testdata/python3/data/package/absimport.pyc,, +astroid/tests/testdata/python2/data/package/__init__.pyc,, +astroid/tests/testdata/python2/data/email.pyc,, diff --git a/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL b/pymode/libs/astroid-1.4.9.dist-info/WHEEL similarity index 54% rename from pymode/libs/logilab_common-1.0.2.dist-info/WHEEL rename to pymode/libs/astroid-1.4.9.dist-info/WHEEL index 45a0cd88..8b6dd1b5 100644 --- a/pymode/libs/logilab_common-1.0.2.dist-info/WHEEL +++ b/pymode/libs/astroid-1.4.9.dist-info/WHEEL @@ -1,5 +1,6 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.24.0) +Generator: bdist_wheel (0.29.0) Root-Is-Purelib: true Tag: py2-none-any +Tag: py3-none-any diff --git a/pymode/libs/astroid-1.4.9.dist-info/metadata.json b/pymode/libs/astroid-1.4.9.dist-info/metadata.json new file mode 100644 index 00000000..ae263d92 --- /dev/null +++ b/pymode/libs/astroid-1.4.9.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Quality Assurance", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "code-quality@python.org", "name": "Python Code Quality Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/PyCQA/astroid"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "license": "LGPL", "metadata_version": "2.0", "name": "astroid", "run_requires": [{"requires": ["lazy-object-proxy", "six", "wrapt"]}], "summary": "A abstract syntax tree for Python with inference support.", "version": "1.4.9"} \ No newline at end of file diff --git a/pymode/libs/astroid-1.4.9.dist-info/top_level.txt b/pymode/libs/astroid-1.4.9.dist-info/top_level.txt new file mode 100644 index 00000000..450d4fe9 --- /dev/null +++ b/pymode/libs/astroid-1.4.9.dist-info/top_level.txt @@ -0,0 +1 @@ +astroid diff --git a/pymode/libs/astroid/__init__.py b/pymode/libs/astroid/__init__.py index d4fd12c5..175dcb5e 100644 --- a/pymode/libs/astroid/__init__.py +++ b/pymode/libs/astroid/__init__.py @@ -58,13 +58,15 @@ # more stuff available from astroid import raw_building -from astroid.bases import YES, Instance, BoundMethod, UnboundMethod +from astroid.bases import Instance, BoundMethod, UnboundMethod from astroid.node_classes import are_exclusive, unpack_infer from astroid.scoped_nodes import builtin_lookup +from astroid.builder import parse +from astroid.util import YES # make a manager instance (borg) as well as Project and Package classes # accessible from astroid package -from astroid.manager import AstroidManager, Project +from astroid.manager import AstroidManager MANAGER = AstroidManager() del AstroidManager @@ -100,7 +102,7 @@ def inference_tip(infer_function): .. sourcecode:: python - MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple), + MANAGER.register_transform(Call, inference_tip(infer_named_tuple), predicate) """ def transform(node, infer_function=infer_function): @@ -112,8 +114,11 @@ def transform(node, infer_function=infer_function): def register_module_extender(manager, module_name, get_extension_mod): def transform(node): extension_module = get_extension_mod() - for name, obj in extension_module.locals.items(): - node.locals[name] = obj + for name, objs in extension_module._locals.items(): + node._locals[name] = objs + for obj in objs: + if obj.parent is extension_module: + obj.parent = node manager.register_transform(Module, transform, lambda n: n.name == module_name) diff --git a/pymode/libs/astroid/__pkginfo__.py b/pymode/libs/astroid/__pkginfo__.py index 3fb45aa4..7a5acfa5 100644 --- a/pymode/libs/astroid/__pkginfo__.py +++ b/pymode/libs/astroid/__pkginfo__.py @@ -20,17 +20,17 @@ modname = 'astroid' -numversion = (1, 3, 8) +numversion = (1, 4, 9) version = '.'.join([str(num) for num in numversion]) -install_requires = ['logilab-common>=0.63.0', 'six'] +install_requires = ['six', 'lazy_object_proxy', 'wrapt'] license = 'LGPL' -author = 'Logilab' -author_email = 'pylint-dev@lists.logilab.org' +author = 'Python Code Quality Authority' +author_email = 'code-quality@python.org' mailinglist = "mailto://%s" % author_email -web = 'http://bitbucket.org/logilab/astroid' +web = 'https://github.com/PyCQA/astroid' description = "A abstract syntax tree for Python with inference support." diff --git a/pymode/libs/astroid/arguments.py b/pymode/libs/astroid/arguments.py new file mode 100644 index 00000000..f05d48a3 --- /dev/null +++ b/pymode/libs/astroid/arguments.py @@ -0,0 +1,233 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import nodes +from astroid import util + +import six + + +class CallSite(object): + """Class for understanding arguments passed into a call site + + It needs a call context, which contains the arguments and the + keyword arguments that were passed into a given call site. + In order to infer what an argument represents, call + :meth:`infer_argument` with the corresponding function node + and the argument name. + """ + + def __init__(self, callcontext): + args = callcontext.args + keywords = callcontext.keywords + self.duplicated_keywords = set() + self._unpacked_args = self._unpack_args(args) + self._unpacked_kwargs = self._unpack_keywords(keywords) + + self.positional_arguments = [ + arg for arg in self._unpacked_args + if arg is not util.YES + ] + self.keyword_arguments = { + key: value for key, value in self._unpacked_kwargs.items() + if value is not util.YES + } + + @classmethod + def from_call(cls, call_node): + """Get a CallSite object from the given Call node.""" + callcontext = contextmod.CallContext(call_node.args, + call_node.keywords) + return cls(callcontext) + + def has_invalid_arguments(self): + """Check if in the current CallSite were passed *invalid* arguments + + This can mean multiple things. For instance, if an unpacking + of an invalid object was passed, then this method will return True. + Other cases can be when the arguments can't be inferred by astroid, + for example, by passing objects which aren't known statically. + """ + return len(self.positional_arguments) != len(self._unpacked_args) + + def has_invalid_keywords(self): + """Check if in the current CallSite were passed *invalid* keyword arguments + + For instance, unpacking a dictionary with integer keys is invalid + (**{1:2}), because the keys must be strings, which will make this + method to return True. Other cases where this might return True if + objects which can't be inferred were passed. + """ + return len(self.keyword_arguments) != len(self._unpacked_kwargs) + + def _unpack_keywords(self, keywords): + values = {} + context = contextmod.InferenceContext() + for name, value in keywords: + if name is None: + # Then it's an unpacking operation (**) + try: + inferred = next(value.infer(context=context)) + except exceptions.InferenceError: + values[name] = util.YES + continue + + if not isinstance(inferred, nodes.Dict): + # Not something we can work with. + values[name] = util.YES + continue + + for dict_key, dict_value in inferred.items: + try: + dict_key = next(dict_key.infer(context=context)) + except exceptions.InferenceError: + values[name] = util.YES + continue + if not isinstance(dict_key, nodes.Const): + values[name] = util.YES + continue + if not isinstance(dict_key.value, six.string_types): + values[name] = util.YES + continue + if dict_key.value in values: + # The name is already in the dictionary + values[dict_key.value] = util.YES + self.duplicated_keywords.add(dict_key.value) + continue + values[dict_key.value] = dict_value + else: + values[name] = value + return values + + @staticmethod + def _unpack_args(args): + values = [] + context = contextmod.InferenceContext() + for arg in args: + if isinstance(arg, nodes.Starred): + try: + inferred = next(arg.value.infer(context=context)) + except exceptions.InferenceError: + values.append(util.YES) + continue + + if inferred is util.YES: + values.append(util.YES) + continue + if not hasattr(inferred, 'elts'): + values.append(util.YES) + continue + values.extend(inferred.elts) + else: + values.append(arg) + return values + + def infer_argument(self, funcnode, name, context): + """infer a function argument value according to the call context""" + if name in self.duplicated_keywords: + raise exceptions.InferenceError(name) + + # Look into the keywords first, maybe it's already there. + try: + return self.keyword_arguments[name].infer(context) + except KeyError: + pass + + # Too many arguments given and no variable arguments. + if len(self.positional_arguments) > len(funcnode.args.args): + if not funcnode.args.vararg: + raise exceptions.InferenceError(name) + + positional = self.positional_arguments[:len(funcnode.args.args)] + vararg = self.positional_arguments[len(funcnode.args.args):] + argindex = funcnode.args.find_argname(name)[0] + kwonlyargs = set(arg.name for arg in funcnode.args.kwonlyargs) + kwargs = { + key: value for key, value in self.keyword_arguments.items() + if key not in kwonlyargs + } + # If there are too few positionals compared to + # what the function expects to receive, check to see + # if the missing positional arguments were passed + # as keyword arguments and if so, place them into the + # positional args list. + if len(positional) < len(funcnode.args.args): + for func_arg in funcnode.args.args: + if func_arg.name in kwargs: + arg = kwargs.pop(func_arg.name) + positional.append(arg) + + if argindex is not None: + # 2. first argument of instance/class method + if argindex == 0 and funcnode.type in ('method', 'classmethod'): + if context.boundnode is not None: + boundnode = context.boundnode + else: + # XXX can do better ? + boundnode = funcnode.parent.frame() + if funcnode.type == 'method': + if not isinstance(boundnode, bases.Instance): + boundnode = bases.Instance(boundnode) + return iter((boundnode,)) + if funcnode.type == 'classmethod': + return iter((boundnode,)) + # if we have a method, extract one position + # from the index, so we'll take in account + # the extra parameter represented by `self` or `cls` + if funcnode.type in ('method', 'classmethod'): + argindex -= 1 + # 2. search arg index + try: + return self.positional_arguments[argindex].infer(context) + except IndexError: + pass + + if funcnode.args.kwarg == name: + # It wants all the keywords that were passed into + # the call site. + if self.has_invalid_keywords(): + raise exceptions.InferenceError + kwarg = nodes.Dict() + kwarg.lineno = funcnode.args.lineno + kwarg.col_offset = funcnode.args.col_offset + kwarg.parent = funcnode.args + items = [(nodes.const_factory(key), value) + for key, value in kwargs.items()] + kwarg.items = items + return iter((kwarg, )) + elif funcnode.args.vararg == name: + # It wants all the args that were passed into + # the call site. + if self.has_invalid_arguments(): + raise exceptions.InferenceError + args = nodes.Tuple() + args.lineno = funcnode.args.lineno + args.col_offset = funcnode.args.col_offset + args.parent = funcnode.args + args.elts = vararg + return iter((args, )) + + # Check if it's a default parameter. + try: + return funcnode.args.default_value(name).infer(context) + except exceptions.NoDefault: + pass + raise exceptions.InferenceError(name) diff --git a/pymode/libs/astroid/as_string.py b/pymode/libs/astroid/as_string.py index f627f9e8..2b07200c 100644 --- a/pymode/libs/astroid/as_string.py +++ b/pymode/libs/astroid/as_string.py @@ -22,9 +22,10 @@ * :func:`dump` function return an internal representation of nodes found in the tree, useful for debugging or understanding the tree structure """ - import sys +import six + INDENT = ' ' # 4 spaces ; keep indentation variable @@ -89,9 +90,9 @@ def visit_arguments(self, node): """return an astroid.Function node as string""" return node.format_args() - def visit_assattr(self, node): + def visit_assignattr(self, node): """return an astroid.AssAttr node as string""" - return self.visit_getattr(node) + return self.visit_attribute(node) def visit_assert(self, node): """return an astroid.Assert node as string""" @@ -100,7 +101,7 @@ def visit_assert(self, node): node.fail.accept(self)) return 'assert %s' % node.test.accept(self) - def visit_assname(self, node): + def visit_assignname(self, node): """return an astroid.AssName node as string""" return node.name @@ -113,8 +114,8 @@ def visit_augassign(self, node): """return an astroid.AugAssign node as string""" return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self)) - def visit_backquote(self, node): - """return an astroid.Backquote node as string""" + def visit_repr(self, node): + """return an astroid.Repr node as string""" return '`%s`' % node.value.accept(self) def visit_binop(self, node): @@ -130,18 +131,20 @@ def visit_break(self, node): """return an astroid.Break node as string""" return 'break' - def visit_callfunc(self, node): - """return an astroid.CallFunc node as string""" + def visit_call(self, node): + """return an astroid.Call node as string""" expr_str = node.func.accept(self) args = [arg.accept(self) for arg in node.args] - if node.starargs: - args.append('*' + node.starargs.accept(self)) - if node.kwargs: - args.append('**' + node.kwargs.accept(self)) + if node.keywords: + keywords = [kwarg.accept(self) for kwarg in node.keywords] + else: + keywords = [] + + args.extend(keywords) return '%s(%s)' % (expr_str, ', '.join(args)) - def visit_class(self, node): - """return an astroid.Class node as string""" + def visit_classdef(self, node): + """return an astroid.ClassDef node as string""" decorate = node.decorators and node.decorators.accept(self) or '' bases = ', '.join([n.accept(self) for n in node.bases]) if sys.version_info[0] == 2: @@ -186,7 +189,7 @@ def visit_delete(self, node): # XXX check if correct def visit_delattr(self, node): """return an astroid.DelAttr node as string""" - return self.visit_getattr(node) + return self.visit_attribute(node) def visit_delname(self, node): """return an astroid.DelName node as string""" @@ -198,16 +201,27 @@ def visit_decorators(self, node): def visit_dict(self, node): """return an astroid.Dict node as string""" - return '{%s}' % ', '.join(['%s: %s' % (key.accept(self), - value.accept(self)) - for key, value in node.items]) + return '{%s}' % ', '.join(self._visit_dict(node)) + + def _visit_dict(self, node): + for key, value in node.items: + key = key.accept(self) + value = value.accept(self) + if key == '**': + # It can only be a DictUnpack node. + yield key + value + else: + yield '%s: %s' % (key, value) + + def visit_dictunpack(self, node): + return '**' def visit_dictcomp(self, node): """return an astroid.DictComp node as string""" return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self), ' '.join([n.accept(self) for n in node.generators])) - def visit_discard(self, node): + def visit_expr(self, node): """return an astroid.Discard node as string""" return node.value.accept(self) @@ -258,24 +272,33 @@ def visit_for(self, node): fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse)) return fors - def visit_from(self, node): - """return an astroid.From node as string""" + def visit_importfrom(self, node): + """return an astroid.ImportFrom node as string""" return 'from %s import %s' % ('.' * (node.level or 0) + node.modname, _import_string(node.names)) - def visit_function(self, node): + def visit_functiondef(self, node): """return an astroid.Function node as string""" decorate = node.decorators and node.decorators.accept(self) or '' docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' - return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self), - docs, self._stmt_list(node.body)) - - def visit_genexpr(self, node): - """return an astroid.GenExpr node as string""" + return_annotation = '' + if six.PY3 and node.returns: + return_annotation = '->' + node.returns.as_string() + trailer = return_annotation + ":" + else: + trailer = ":" + def_format = "\n%sdef %s(%s)%s%s\n%s" + return def_format % (decorate, node.name, + node.args.accept(self), + trailer, docs, + self._stmt_list(node.body)) + + def visit_generatorexp(self, node): + """return an astroid.GeneratorExp node as string""" return '(%s %s)' % (node.elt.accept(self), ' '.join([n.accept(self) for n in node.generators])) - def visit_getattr(self, node): + def visit_attribute(self, node): """return an astroid.Getattr node as string""" return '%s.%s' % (node.expr.accept(self), node.attrname) @@ -302,6 +325,8 @@ def visit_import(self, node): def visit_keyword(self, node): """return an astroid.Keyword node as string""" + if node.arg is None: + return '**%s' % node.value.accept(self) return '%s=%s' % (node.arg, node.value.accept(self)) def visit_lambda(self, node): @@ -438,6 +463,22 @@ def visit_yield(self, node): else: return "(%s)" % (expr,) + def visit_starred(self, node): + """return Starred node as string""" + return "*" + node.value.accept(self) + + + # These aren't for real AST nodes, but for inference objects. + + def visit_frozenset(self, node): + return node.parent.accept(self) + + def visit_super(self, node): + return node.parent.accept(self) + + def visit_yes(self, node): + return "Uninferable" + class AsStringVisitor3k(AsStringVisitor): """AsStringVisitor3k overwrites some AsStringVisitor methods""" @@ -466,10 +507,6 @@ def visit_raise(self, node): return 'raise %s' % node.exc.accept(self) return 'raise' - def visit_starred(self, node): - """return Starred node as string""" - return "*" + node.value.accept(self) - def visit_yieldfrom(self, node): """ Return an astroid.YieldFrom node as string. """ yi_val = node.value and (" " + node.value.accept(self)) or "" @@ -479,6 +516,19 @@ def visit_yieldfrom(self, node): else: return "(%s)" % (expr,) + def visit_asyncfunctiondef(self, node): + function = super(AsStringVisitor3k, self).visit_functiondef(node) + return 'async ' + function.strip() + + def visit_await(self, node): + return 'await %s' % node.value.accept(self) + + def visit_asyncwith(self, node): + return 'async %s' % self.visit_with(node) + + def visit_asyncfor(self, node): + return 'async %s' % self.visit_for(node) + def _import_string(names): """return a list of (name, asname) formatted as a string""" @@ -496,4 +546,3 @@ def _import_string(names): # this visitor is stateless, thus it can be reused to_code = AsStringVisitor() - diff --git a/pymode/libs/astroid/bases.py b/pymode/libs/astroid/bases.py index ee8ee1c3..8dfa8126 100644 --- a/pymode/libs/astroid/bases.py +++ b/pymode/libs/astroid/bases.py @@ -18,22 +18,44 @@ """This module contains base classes and functions for the nodes and some inference utils. """ - -__docformat__ = "restructuredtext en" - +import functools import sys -from contextlib import contextmanager +import warnings -from logilab.common.decorators import cachedproperty +import wrapt -from astroid.exceptions import (InferenceError, AstroidError, NotFoundError, - UnresolvableName, UseInferenceDefault) +from astroid import context as contextmod +from astroid import decorators as decoratorsmod +from astroid import exceptions +from astroid import util if sys.version_info >= (3, 0): BUILTINS = 'builtins' else: BUILTINS = '__builtin__' +PROPERTIES = {BUILTINS + '.property', 'abc.abstractproperty'} +# List of possible property names. We use this list in order +# to see if a method is a property or not. This should be +# pretty reliable and fast, the alternative being to check each +# decorator to see if its a real property-like descriptor, which +# can be too complicated. +# Also, these aren't qualified, because each project can +# define them, we shouldn't expect to know every possible +# property-like decorator! +# TODO(cpopa): just implement descriptors already. +POSSIBLE_PROPERTIES = {"cached_property", "cachedproperty", + "lazyproperty", "lazy_property", "reify", + "lazyattribute", "lazy_attribute", + "LazyProperty", "lazy"} + + +def _is_property(meth): + if PROPERTIES.intersection(meth.decoratornames()): + return True + stripped = {name.split(".")[-1] for name in meth.decoratornames() + if name is not util.YES} + return any(name in stripped for name in POSSIBLE_PROPERTIES) class Proxy(object): @@ -56,101 +78,34 @@ def infer(self, context=None): yield self -# Inference ################################################################## - -class InferenceContext(object): - __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'infered') - - def __init__(self, path=None, infered=None): - self.path = path or set() - self.lookupname = None - self.callcontext = None - self.boundnode = None - self.infered = infered or {} - - def push(self, node): - name = self.lookupname - if (node, name) in self.path: - raise StopIteration() - self.path.add((node, name)) - - def clone(self): - # XXX copy lookupname/callcontext ? - clone = InferenceContext(self.path, infered=self.infered) - clone.callcontext = self.callcontext - clone.boundnode = self.boundnode - return clone - - def cache_generator(self, key, generator): - results = [] - for result in generator: - results.append(result) - yield result - - self.infered[key] = tuple(results) - return - - @contextmanager - def restore_path(self): - path = set(self.path) - yield - self.path = path - -def copy_context(context): - if context is not None: - return context.clone() - else: - return InferenceContext() - - def _infer_stmts(stmts, context, frame=None): - """return an iterator on statements inferred by each statement in - """ + """Return an iterator on statements inferred by each statement in *stmts*.""" stmt = None - infered = False + inferred = False if context is not None: name = context.lookupname context = context.clone() else: name = None - context = InferenceContext() + context = contextmod.InferenceContext() + for stmt in stmts: - if stmt is YES: + if stmt is util.YES: yield stmt - infered = True + inferred = True continue context.lookupname = stmt._infer_name(frame, name) try: - for infered in stmt.infer(context): - yield infered - infered = True - except UnresolvableName: + for inferred in stmt.infer(context=context): + yield inferred + inferred = True + except exceptions.UnresolvableName: continue - except InferenceError: - yield YES - infered = True - if not infered: - raise InferenceError(str(stmt)) - - -# special inference objects (e.g. may be returned as nodes by .infer()) ####### - -class _Yes(object): - """a yes object""" - def __repr__(self): - return 'YES' - def __getattribute__(self, name): - if name == 'next': - raise AttributeError('next method should not be called') - if name.startswith('__') and name.endswith('__'): - # to avoid inspection pb - return super(_Yes, self).__getattribute__(name) - return self - def __call__(self, *args, **kwargs): - return self - - -YES = _Yes() + except exceptions.InferenceError: + yield util.YES + inferred = True + if not inferred: + raise exceptions.InferenceError(str(stmt)) class Instance(Proxy): @@ -158,7 +113,7 @@ class Instance(Proxy): def getattr(self, name, context=None, lookupclass=True): try: values = self._proxied.instance_attr(name, context) - except NotFoundError: + except exceptions.NotFoundError: if name == '__class__': return [self._proxied] if lookupclass: @@ -167,23 +122,22 @@ def getattr(self, name, context=None, lookupclass=True): if name in ('__name__', '__bases__', '__mro__', '__subclasses__'): return self._proxied.local_attr(name) return self._proxied.getattr(name, context) - raise NotFoundError(name) + raise exceptions.NotFoundError(name) # since we've no context information, return matching class members as # well if lookupclass: try: return values + self._proxied.getattr(name, context) - except NotFoundError: + except exceptions.NotFoundError: pass return values def igetattr(self, name, context=None): """inferred getattr""" if not context: - context = InferenceContext() + context = contextmod.InferenceContext() try: # avoid recursively inferring the same attr on the same class - context.push((self._proxied, name)) # XXX frame should be self._proxied, or not ? get_attr = self.getattr(name, context, lookupclass=False) @@ -192,38 +146,49 @@ def igetattr(self, name, context=None): context, frame=self, ) - except NotFoundError: + except exceptions.NotFoundError: try: # fallback to class'igetattr since it has some logic to handle # descriptors return self._wrap_attr(self._proxied.igetattr(name, context), context) - except NotFoundError: - raise InferenceError(name) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) def _wrap_attr(self, attrs, context=None): """wrap bound methods of attrs in a InstanceMethod proxies""" for attr in attrs: if isinstance(attr, UnboundMethod): - if BUILTINS + '.property' in attr.decoratornames(): - for infered in attr.infer_call_result(self, context): - yield infered + if _is_property(attr): + for inferred in attr.infer_call_result(self, context): + yield inferred else: yield BoundMethod(attr, self) + elif hasattr(attr, 'name') and attr.name == '': + # This is a lambda function defined at class level, + # since its scope is the underlying _proxied class. + # Unfortunately, we can't do an isinstance check here, + # because of the circular dependency between astroid.bases + # and astroid.scoped_nodes. + if attr.statement().scope() == self._proxied: + if attr.args.args and attr.args.args[0].name == 'self': + yield BoundMethod(attr, self) + continue + yield attr else: yield attr def infer_call_result(self, caller, context=None): """infer what a class instance is returning when called""" - infered = False + inferred = False for node in self._proxied.igetattr('__call__', context): - if node is YES: + if node is util.YES or not node.callable(): continue for res in node.infer_call_result(caller, context): - infered = True + inferred = True yield res - if not infered: - raise InferenceError() + if not inferred: + raise exceptions.InferenceError() def __repr__(self): return '' % (self._proxied.root().name, @@ -237,7 +202,7 @@ def callable(self): try: self._proxied.getattr('__call__') return True - except NotFoundError: + except exceptions.NotFoundError: return False def pytype(self): @@ -247,6 +212,12 @@ def display_type(self): return 'Instance of' + # TODO(cpopa): this is set in inference.py + # The circular dependency hell goes deeper and deeper. + # pylint: disable=unused-argument + def getitem(self, index, context=None): + pass + class UnboundMethod(Proxy): """a special node representing a method not bound to an instance""" def __repr__(self): @@ -261,12 +232,12 @@ def is_bound(self): def getattr(self, name, context=None): if name == 'im_func': return [self._proxied] - return super(UnboundMethod, self).getattr(name, context) + return self._proxied.getattr(name, context) def igetattr(self, name, context=None): if name == 'im_func': return iter((self._proxied,)) - return super(UnboundMethod, self).igetattr(name, context) + return self._proxied.igetattr(name, context) def infer_call_result(self, caller, context): # If we're unbound method __new__ of builtin object, the result is an @@ -274,7 +245,7 @@ def infer_call_result(self, caller, context): if (self._proxied.name == '__new__' and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): infer = caller.args[0].infer() if caller.args else [] - return ((x is YES and x or Instance(x)) for x in infer) + return ((x is util.YES and x or Instance(x)) for x in infer) return self._proxied.infer_call_result(caller, context) @@ -287,10 +258,13 @@ def __init__(self, proxy, bound): def is_bound(self): return True - def infer_call_result(self, caller, context): + def infer_call_result(self, caller, context=None): + + if context is None: + context = contextmod.InferenceContext() context = context.clone() context.boundnode = self.bound - return self._proxied.infer_call_result(caller, context) + return super(BoundMethod, self).infer_call_result(caller, context) class Generator(Instance): @@ -318,10 +292,11 @@ def __str__(self): def path_wrapper(func): """return the given infer function wrapped to handle the path""" + @functools.wraps(func) def wrapped(node, context=None, _func=func, **kwargs): """wrapper function handling context""" if context is None: - context = InferenceContext() + context = contextmod.InferenceContext() context.push(node) yielded = set() for res in _func(node, context, **kwargs): @@ -330,30 +305,28 @@ def wrapped(node, context=None, _func=func, **kwargs): ares = res._proxied else: ares = res - if not ares in yielded: + if ares not in yielded: yield res yielded.add(ares) return wrapped -def yes_if_nothing_infered(func): - def wrapper(*args, **kwargs): - infered = False - for node in func(*args, **kwargs): - infered = True - yield node - if not infered: - yield YES - return wrapper - -def raise_if_nothing_infered(func): - def wrapper(*args, **kwargs): - infered = False - for node in func(*args, **kwargs): - infered = True - yield node - if not infered: - raise InferenceError() - return wrapper +@wrapt.decorator +def yes_if_nothing_inferred(func, instance, args, kwargs): + inferred = False + for node in func(*args, **kwargs): + inferred = True + yield node + if not inferred: + yield util.YES + +@wrapt.decorator +def raise_if_nothing_inferred(func, instance, args, kwargs): + inferred = False + for node in func(*args, **kwargs): + inferred = True + yield node + if not inferred: + raise exceptions.InferenceError() # Node ###################################################################### @@ -364,8 +337,8 @@ class NodeNG(object): It represents a node of the new abstract syntax tree. """ is_statement = False - optional_assign = False # True for For (and for Comprehension if py <3.0) - is_function = False # True for Function nodes + optional_assign = False # True for For (and for Comprehension if py <3.0) + is_function = False # True for FunctionDef nodes # attributes below are set by the builder module or by raw factories lineno = None fromlineno = None @@ -389,7 +362,7 @@ def infer(self, context=None, **kwargs): # explicit_inference is not bound, give it self explicitly try: return self._explicit_inference(self, context, **kwargs) - except UseInferenceDefault: + except exceptions.UseInferenceDefault: pass if not context: @@ -397,8 +370,8 @@ def infer(self, context=None, **kwargs): key = (self, context.lookupname, context.callcontext, context.boundnode) - if key in context.infered: - return iter(context.infered[key]) + if key in context.inferred: + return iter(context.inferred[key]) return context.cache_generator(key, self._infer(context, **kwargs)) @@ -438,7 +411,7 @@ def last_child(self): attr = getattr(self, field) if not attr: # None or empty listy / tuple continue - if attr.__class__ in (list, tuple): + if isinstance(attr, (list, tuple)): return attr[-1] else: return attr @@ -460,13 +433,16 @@ def statement(self): return self.parent.statement() def frame(self): - """return the first parent frame node (i.e. Module, Function or Class) + """return the first parent frame node (i.e. Module, FunctionDef or + ClassDef) + """ return self.parent.frame() def scope(self): - """return the first node defining a new scope (i.e. Module, Function, - Class, Lambda but also GenExpr) + """return the first node defining a new scope (i.e. Module, + FunctionDef, ClassDef, Lambda but also GenExpr) + """ return self.parent.scope() @@ -483,11 +459,12 @@ def child_sequence(self, child): if node_or_sequence is child: return [node_or_sequence] # /!\ compiler.ast Nodes have an __iter__ walking over child nodes - if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence: + if (isinstance(node_or_sequence, (tuple, list)) + and child in node_or_sequence): return node_or_sequence - else: - msg = 'Could not find %s in %s\'s children' - raise AstroidError(msg % (repr(child), repr(self))) + + msg = 'Could not find %s in %s\'s children' + raise exceptions.AstroidError(msg % (repr(child), repr(self))) def locate_child(self, child): """return a 2-uple (child attribute name, sequence or node)""" @@ -499,7 +476,7 @@ def locate_child(self, child): if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence: return field, node_or_sequence msg = 'Could not find %s in %s\'s children' - raise AstroidError(msg % (repr(child), repr(self))) + raise exceptions.AstroidError(msg % (repr(child), repr(self))) # FIXME : should we merge child_sequence and locate_child ? locate_child # is only used in are_exclusive, child_sequence one time in pylint. @@ -532,14 +509,14 @@ def nearest(self, nodes): # these are lazy because they're relatively expensive to compute for every # single node, and they rarely get looked at - @cachedproperty + @decoratorsmod.cachedproperty def fromlineno(self): if self.lineno is None: return self._fixed_source_line() else: return self.lineno - @cachedproperty + @decoratorsmod.cachedproperty def tolineno(self): if not self._astroid_fields: # can't have children @@ -597,20 +574,27 @@ def nodes_of_class(self, klass, skip_klass=None): yield matching def _infer_name(self, frame, name): - # overridden for From, Import, Global, TryExcept and Arguments + # overridden for ImportFrom, Import, Global, TryExcept and Arguments return None def _infer(self, context=None): """we don't know how to resolve a statement by default""" # this method is overridden by most concrete classes - raise InferenceError(self.__class__.__name__) + raise exceptions.InferenceError(self.__class__.__name__) - def infered(self): - '''return list of infered values for a more simple inference usage''' + def inferred(self): + '''return list of inferred values for a more simple inference usage''' return list(self.infer()) + def infered(self): + warnings.warn('%s.infered() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.inferred() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.inferred() + def instanciate_class(self): - """instanciate a node if it is a Class node, else return self""" + """instanciate a node if it is a ClassDef node, else return self""" return self def has_base(self, node): diff --git a/pymode/libs/astroid/brain/builtin_inference.py b/pymode/libs/astroid/brain/brain_builtin_inference.py similarity index 63% rename from pymode/libs/astroid/brain/builtin_inference.py rename to pymode/libs/astroid/brain/brain_builtin_inference.py index f60e7913..ed78111f 100644 --- a/pymode/libs/astroid/brain/builtin_inference.py +++ b/pymode/libs/astroid/brain/brain_builtin_inference.py @@ -7,9 +7,11 @@ import six from astroid import (MANAGER, UseInferenceDefault, inference_tip, YES, InferenceError, UnresolvableName) +from astroid import arguments from astroid import nodes +from astroid import objects from astroid.builder import AstroidBuilder - +from astroid import util def _extend_str(class_node, rvalue): """function to extend builtin str/unicode class""" @@ -51,7 +53,7 @@ def lstrip(self, chars=None): def rstrip(self, chars=None): return {rvalue} def rjust(self, width, fillchar=None): - return {rvalue} + return {rvalue} def center(self, width, fillchar=None): return {rvalue} def ljust(self, width, fillchar=None): @@ -60,7 +62,7 @@ def ljust(self, width, fillchar=None): code = code.format(rvalue=rvalue) fake = AstroidBuilder(MANAGER).string_build(code)['whatever'] for method in fake.mymethods(): - class_node.locals[method.name] = [method] + class_node._locals[method.name] = [method] method.parent = class_node def extend_builtins(class_transforms): @@ -86,12 +88,17 @@ def register_builtin_transform(transform, builtin_name): def _transform_wrapper(node, context=None): result = transform(node, context=context) if result: - result.parent = node + if not result.parent: + # Let the transformation function determine + # the parent for its result. Otherwise, + # we set it to be the node we transformed from. + result.parent = node + result.lineno = node.lineno result.col_offset = node.col_offset return iter([result]) - MANAGER.register_transform(nodes.CallFunc, + MANAGER.register_transform(nodes.Call, inference_tip(_transform_wrapper), lambda n: (isinstance(n.func, nodes.Name) and n.func.name == builtin_name)) @@ -108,13 +115,13 @@ def _generic_inference(node, context, node_type, transform): transformed = transform(arg) if not transformed: try: - infered = next(arg.infer(context=context)) + inferred = next(arg.infer(context=context)) except (InferenceError, StopIteration): raise UseInferenceDefault() - if infered is YES: + if inferred is util.YES: raise UseInferenceDefault() - transformed = transform(infered) - if not transformed or transformed is YES: + transformed = transform(inferred) + if not transformed or transformed is util.YES: raise UseInferenceDefault() return transformed @@ -172,19 +179,25 @@ def _infer_builtin(node, context, iterables=(nodes.List, nodes.Tuple), build_elts=set) +infer_frozenset = partial( + _infer_builtin, + klass=objects.FrozenSet, + iterables=(nodes.List, nodes.Tuple, nodes.Set), + build_elts=frozenset) + def _get_elts(arg, context): is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set)) try: - infered = next(arg.infer(context)) + inferred = next(arg.infer(context)) except (InferenceError, UnresolvableName): raise UseInferenceDefault() - if isinstance(infered, nodes.Dict): - items = infered.items - elif is_iterable(infered): + if isinstance(inferred, nodes.Dict): + items = inferred.items + elif is_iterable(inferred): items = [] - for elt in infered.elts: + for elt in inferred.elts: # If an item is not a pair of two items, # then fallback to the default inference. # Also, take in consideration only hashable items, @@ -213,24 +226,28 @@ def infer_dict(node, context=None): * dict(mapping, **kwargs) * dict(**kwargs) - If a case can't be infered, we'll fallback to default inference. + If a case can't be inferred, we'll fallback to default inference. """ - has_keywords = lambda args: all(isinstance(arg, nodes.Keyword) - for arg in args) - if not node.args and not node.kwargs: + call = arguments.CallSite.from_call(node) + if call.has_invalid_arguments() or call.has_invalid_keywords(): + raise UseInferenceDefault + + args = call.positional_arguments + kwargs = list(call.keyword_arguments.items()) + + if not args and not kwargs: # dict() return nodes.Dict() - elif has_keywords(node.args) and node.args: + elif kwargs and not args: # dict(a=1, b=2, c=4) - items = [(nodes.Const(arg.arg), arg.value) for arg in node.args] - elif (len(node.args) >= 2 and - has_keywords(node.args[1:])): + items = [(nodes.Const(key), value) for key, value in kwargs] + elif len(args) == 1 and kwargs: # dict(some_iterable, b=2, c=4) - elts = _get_elts(node.args[0], context) - keys = [(nodes.Const(arg.arg), arg.value) for arg in node.args[1:]] + elts = _get_elts(args[0], context) + keys = [(nodes.Const(key), value) for key, value in kwargs] items = elts + keys - elif len(node.args) == 1: - items = _get_elts(node.args[0], context) + elif len(args) == 1: + items = _get_elts(args[0], context) else: raise UseInferenceDefault() @@ -238,8 +255,82 @@ def infer_dict(node, context=None): empty.items = items return empty + +def _node_class(node): + klass = node.frame() + while klass is not None and not isinstance(klass, nodes.ClassDef): + if klass.parent is None: + klass = None + else: + klass = klass.parent.frame() + return klass + + +def infer_super(node, context=None): + """Understand super calls. + + There are some restrictions for what can be understood: + + * unbounded super (one argument form) is not understood. + + * if the super call is not inside a function (classmethod or method), + then the default inference will be used. + + * if the super arguments can't be infered, the default inference + will be used. + """ + if len(node.args) == 1: + # Ignore unbounded super. + raise UseInferenceDefault + + scope = node.scope() + if not isinstance(scope, nodes.FunctionDef): + # Ignore non-method uses of super. + raise UseInferenceDefault + if scope.type not in ('classmethod', 'method'): + # Not interested in staticmethods. + raise UseInferenceDefault + + cls = _node_class(scope) + if not len(node.args): + mro_pointer = cls + # In we are in a classmethod, the interpreter will fill + # automatically the class as the second argument, not an instance. + if scope.type == 'classmethod': + mro_type = cls + else: + mro_type = cls.instantiate_class() + else: + # TODO(cpopa): support flow control (multiple inference values). + try: + mro_pointer = next(node.args[0].infer(context=context)) + except InferenceError: + raise UseInferenceDefault + try: + mro_type = next(node.args[1].infer(context=context)) + except InferenceError: + raise UseInferenceDefault + + if mro_pointer is YES or mro_type is YES: + # No way we could understand this. + raise UseInferenceDefault + + super_obj = objects.Super(mro_pointer=mro_pointer, + mro_type=mro_type, + self_class=cls, + scope=scope) + super_obj.parent = node + return iter([super_obj]) + + # Builtins inference +MANAGER.register_transform(nodes.Call, + inference_tip(infer_super), + lambda n: (isinstance(n.func, nodes.Name) and + n.func.name == 'super')) + register_builtin_transform(infer_tuple, 'tuple') register_builtin_transform(infer_set, 'set') register_builtin_transform(infer_list, 'list') register_builtin_transform(infer_dict, 'dict') +register_builtin_transform(infer_frozenset, 'frozenset') diff --git a/pymode/libs/astroid/brain/brain_dateutil.py b/pymode/libs/astroid/brain/brain_dateutil.py new file mode 100644 index 00000000..d077327b --- /dev/null +++ b/pymode/libs/astroid/brain/brain_dateutil.py @@ -0,0 +1,15 @@ +"""Astroid hooks for dateutil""" + +import textwrap + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + +def dateutil_transform(): + return AstroidBuilder(MANAGER).string_build(textwrap.dedent(''' + import datetime + def parse(timestr, parserinfo=None, **kwargs): + return datetime.datetime() + ''')) + +register_module_extender(MANAGER, 'dateutil.parser', dateutil_transform) diff --git a/pymode/libs/astroid/brain/py2gi.py b/pymode/libs/astroid/brain/brain_gi.py similarity index 72% rename from pymode/libs/astroid/brain/py2gi.py rename to pymode/libs/astroid/brain/brain_gi.py index 6747898d..d9fc1b45 100644 --- a/pymode/libs/astroid/brain/py2gi.py +++ b/pymode/libs/astroid/brain/brain_gi.py @@ -7,8 +7,9 @@ import itertools import sys import re +import warnings -from astroid import MANAGER, AstroidBuildingException +from astroid import MANAGER, AstroidBuildingException, nodes from astroid.builder import AstroidBuilder @@ -46,13 +47,13 @@ def _gi_build_stub(parent): elif (inspect.ismethod(obj) or inspect.ismethoddescriptor(obj)): methods[name] = obj - elif type(obj) in [int, str]: - constants[name] = obj elif (str(obj).startswith(", ) + # Only accept function calls with two constant arguments + if len(node.args) != 2: + return False -MANAGER.register_failed_import_hook(_import_gi_module) + if not all(isinstance(arg, nodes.Const) for arg in node.args): + return False + + func = node.func + if isinstance(func, nodes.Attribute): + if func.attrname != 'require_version': + return False + if isinstance(func.expr, nodes.Name) and func.expr.name == 'gi': + return True + + return False + if isinstance(func, nodes.Name): + return func.name == 'require_version' + + return False + +def _register_require_version(node): + # Load the gi.require_version locally + try: + import gi + gi.require_version(node.args[0].value, node.args[1].value) + except Exception: + pass + + return node + +MANAGER.register_failed_import_hook(_import_gi_module) +MANAGER.register_transform(nodes.Call, _register_require_version, _looks_like_require_version) diff --git a/pymode/libs/astroid/brain/py2mechanize.py b/pymode/libs/astroid/brain/brain_mechanize.py similarity index 100% rename from pymode/libs/astroid/brain/py2mechanize.py rename to pymode/libs/astroid/brain/brain_mechanize.py diff --git a/pymode/libs/astroid/brain/pynose.py b/pymode/libs/astroid/brain/brain_nose.py similarity index 92% rename from pymode/libs/astroid/brain/pynose.py rename to pymode/libs/astroid/brain/brain_nose.py index 67a6fb8f..4b077843 100644 --- a/pymode/libs/astroid/brain/pynose.py +++ b/pymode/libs/astroid/brain/brain_nose.py @@ -48,11 +48,14 @@ class Test(unittest.TestCase): if method.name.startswith('assert') and '_' not in method.name: pep8_name = _pep8(method.name) yield pep8_name, astroid.BoundMethod(method, case) + if method.name == 'assertEqual': + # nose also exports assert_equals. + yield 'assert_equals', astroid.BoundMethod(method, case) def _nose_tools_transform(node): for method_name, method in _nose_tools_functions(): - node.locals[method_name] = [method] + node._locals[method_name] = [method] def _nose_tools_trivial_transform(): diff --git a/pymode/libs/astroid/brain/brain_numpy.py b/pymode/libs/astroid/brain/brain_numpy.py new file mode 100644 index 00000000..75f4f18f --- /dev/null +++ b/pymode/libs/astroid/brain/brain_numpy.py @@ -0,0 +1,62 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# astroid is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Astroid hooks for numpy.""" + +import astroid + + +# TODO(cpopa): drop when understanding augmented assignments + +def numpy_core_transform(): + return astroid.parse(''' + from numpy.core import numeric + from numpy.core import fromnumeric + from numpy.core import defchararray + from numpy.core import records + from numpy.core import function_base + from numpy.core import machar + from numpy.core import getlimits + from numpy.core import shape_base + __all__ = (['char', 'rec', 'memmap', 'chararray'] + numeric.__all__ + + fromnumeric.__all__ + + records.__all__ + + function_base.__all__ + + machar.__all__ + + getlimits.__all__ + + shape_base.__all__) + ''') + + +def numpy_transform(): + return astroid.parse(''' + from numpy import core + from numpy import matrixlib as _mat + from numpy import lib + __all__ = ['add_newdocs', + 'ModuleDeprecationWarning', + 'VisibleDeprecationWarning', 'linalg', 'fft', 'random', + 'ctypeslib', 'ma', + '__version__', 'pkgload', 'PackageLoader', + 'show_config'] + core.__all__ + _mat.__all__ + lib.__all__ + + ''') + + +astroid.register_module_extender(astroid.MANAGER, 'numpy.core', numpy_core_transform) +astroid.register_module_extender(astroid.MANAGER, 'numpy', numpy_transform) diff --git a/pymode/libs/astroid/brain/brain_pytest.py b/pymode/libs/astroid/brain/brain_pytest.py new file mode 100644 index 00000000..1859b985 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_pytest.py @@ -0,0 +1,76 @@ +"""Astroid hooks for pytest.""" +from __future__ import absolute_import +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + + +def pytest_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +try: + import _pytest.mark + import _pytest.recwarn + import _pytest.runner + import _pytest.python + import _pytest.skipping + import _pytest.assertion +except ImportError: + pass +else: + deprecated_call = _pytest.recwarn.deprecated_call + warns = _pytest.recwarn.warns + + exit = _pytest.runner.exit + fail = _pytest.runner.fail + skip = _pytest.runner.skip + importorskip = _pytest.runner.importorskip + + xfail = _pytest.skipping.xfail + mark = _pytest.mark.MarkGenerator() + raises = _pytest.python.raises + + # New in pytest 3.0 + try: + approx = _pytest.python.approx + register_assert_rewrite = _pytest.assertion.register_assert_rewrite + except AttributeError: + pass + + +# Moved in pytest 3.0 + +try: + import _pytest.freeze_support + freeze_includes = _pytest.freeze_support.freeze_includes +except ImportError: + try: + import _pytest.genscript + freeze_includes = _pytest.genscript.freeze_includes + except ImportError: + pass + +try: + import _pytest.debugging + set_trace = _pytest.debugging.pytestPDB().set_trace +except ImportError: + try: + import _pytest.pdb + set_trace = _pytest.pdb.pytestPDB().set_trace + except ImportError: + pass + +try: + import _pytest.fixtures + fixture = _pytest.fixtures.fixture + yield_fixture = _pytest.fixtures.yield_fixture +except ImportError: + try: + import _pytest.python + fixture = _pytest.python.fixture + yield_fixture = _pytest.python.yield_fixture + except ImportError: + pass +''') + +register_module_extender(MANAGER, 'pytest', pytest_transform) +register_module_extender(MANAGER, 'py.test', pytest_transform) diff --git a/pymode/libs/astroid/brain/brain_qt.py b/pymode/libs/astroid/brain/brain_qt.py new file mode 100644 index 00000000..1a03b2be --- /dev/null +++ b/pymode/libs/astroid/brain/brain_qt.py @@ -0,0 +1,44 @@ +"""Astroid hooks for the PyQT library.""" + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder +from astroid import nodes +from astroid import parse + + +def _looks_like_signal(node, signal_name='pyqtSignal'): + if '__class__' in node._instance_attrs: + cls = node._instance_attrs['__class__'][0] + return cls.name == signal_name + return False + + +def transform_pyqt_signal(node): + module = parse(''' + class pyqtSignal(object): + def connect(self, slot, type=None, no_receiver_check=False): + pass + def disconnect(self, slot): + pass + def emit(self, *args): + pass + ''') + signal_cls = module['pyqtSignal'] + node._instance_attrs['emit'] = signal_cls['emit'] + node._instance_attrs['disconnect'] = signal_cls['disconnect'] + node._instance_attrs['connect'] = signal_cls['connect'] + + +def pyqt4_qtcore_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +def SIGNAL(signal_name): pass + +class QObject(object): + def emit(self, signal): pass +''') + + +register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform) +MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, + _looks_like_signal) \ No newline at end of file diff --git a/pymode/libs/astroid/brain/pysix_moves.py b/pymode/libs/astroid/brain/brain_six.py similarity index 91% rename from pymode/libs/astroid/brain/pysix_moves.py rename to pymode/libs/astroid/brain/brain_six.py index 548d9761..9596a6c8 100644 --- a/pymode/libs/astroid/brain/pysix_moves.py +++ b/pymode/libs/astroid/brain/brain_six.py @@ -23,7 +23,12 @@ from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder -from astroid.exceptions import AstroidBuildingException +from astroid.exceptions import AstroidBuildingException, InferenceError +from astroid import nodes + + +SIX_ADD_METACLASS = 'six.add_metaclass' + def _indent(text, prefix, predicate=None): """Adds 'prefix' to the beginning of selected lines in 'text'. @@ -254,8 +259,30 @@ def _six_fail_hook(modname): module.name = 'six.moves' return module +def transform_six_add_metaclass(node): + """Check if the given class node is decorated with *six.add_metaclass* + + If so, inject its argument as the metaclass of the underlying class. + """ + if not node.decorators: + return + + for decorator in node.decorators.nodes: + if not isinstance(decorator, nodes.Call): + continue + + try: + func = next(decorator.func.infer()) + except InferenceError: + continue + if func.qname() == SIX_ADD_METACLASS and decorator.args: + metaclass = decorator.args[0] + node._metaclass = metaclass + return node + register_module_extender(MANAGER, 'six', six_moves_transform) register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six', six_moves_transform) MANAGER.register_failed_import_hook(_six_fail_hook) +MANAGER.register_transform(nodes.ClassDef, transform_six_add_metaclass) diff --git a/pymode/libs/astroid/brain/brain_ssl.py b/pymode/libs/astroid/brain/brain_ssl.py new file mode 100644 index 00000000..1cf8d1b8 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_ssl.py @@ -0,0 +1,65 @@ +"""Astroid hooks for the ssl library.""" + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder +from astroid import nodes +from astroid import parse + + +def ssl_transform(): + return parse(''' + from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION + from _ssl import _SSLContext, MemoryBIO + from _ssl import ( + SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError, + SSLSyscallError, SSLEOFError, + ) + from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED + from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj + from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes + try: + from _ssl import RAND_egd + except ImportError: + # LibreSSL does not provide RAND_egd + pass + from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE, + OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3, + OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2, + OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE) + + from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE, + ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE, + ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE, + ALERT_DESCRIPTION_BAD_RECORD_MAC, + ALERT_DESCRIPTION_CERTIFICATE_EXPIRED, + ALERT_DESCRIPTION_CERTIFICATE_REVOKED, + ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN, + ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE, + ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR, + ALERT_DESCRIPTION_DECOMPRESSION_FAILURE, + ALERT_DESCRIPTION_DECRYPT_ERROR, + ALERT_DESCRIPTION_HANDSHAKE_FAILURE, + ALERT_DESCRIPTION_ILLEGAL_PARAMETER, + ALERT_DESCRIPTION_INSUFFICIENT_SECURITY, + ALERT_DESCRIPTION_INTERNAL_ERROR, + ALERT_DESCRIPTION_NO_RENEGOTIATION, + ALERT_DESCRIPTION_PROTOCOL_VERSION, + ALERT_DESCRIPTION_RECORD_OVERFLOW, + ALERT_DESCRIPTION_UNEXPECTED_MESSAGE, + ALERT_DESCRIPTION_UNKNOWN_CA, + ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY, + ALERT_DESCRIPTION_UNRECOGNIZED_NAME, + ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE, + ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION, + ALERT_DESCRIPTION_USER_CANCELLED) + from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL, + SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ, + SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN) + from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT + from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN + from _ssl import _OPENSSL_API_VERSION + from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2 + ''') + + +register_module_extender(MANAGER, 'ssl', ssl_transform) diff --git a/pymode/libs/astroid/brain/brain_stdlib.py b/pymode/libs/astroid/brain/brain_stdlib.py new file mode 100644 index 00000000..ad395a27 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_stdlib.py @@ -0,0 +1,473 @@ + +"""Astroid hooks for the Python 2 standard library. + +Currently help understanding of : + +* hashlib.md5 and hashlib.sha1 +""" + +import functools +import sys +from textwrap import dedent + +from astroid import ( + MANAGER, UseInferenceDefault, inference_tip, BoundMethod, + InferenceError, register_module_extender) +from astroid import exceptions +from astroid import nodes +from astroid.builder import AstroidBuilder +from astroid import util +from astroid import test_utils + +PY3K = sys.version_info > (3, 0) +PY33 = sys.version_info >= (3, 3) +PY34 = sys.version_info >= (3, 4) + +# general function + +def infer_func_form(node, base_type, context=None, enum=False): + """Specific inference function for namedtuple or Python 3 enum. """ + def infer_first(node): + if node is util.YES: + raise UseInferenceDefault + try: + value = next(node.infer(context=context)) + if value is util.YES: + raise UseInferenceDefault() + else: + return value + except StopIteration: + raise InferenceError() + + # node is a Call node, class name as first argument and generated class + # attributes as second argument + if len(node.args) != 2: + # something weird here, go back to class implementation + raise UseInferenceDefault() + # namedtuple or enums list of attributes can be a list of strings or a + # whitespace-separate string + try: + name = infer_first(node.args[0]).value + names = infer_first(node.args[1]) + try: + attributes = names.value.replace(',', ' ').split() + except AttributeError: + if not enum: + attributes = [infer_first(const).value for const in names.elts] + else: + # Enums supports either iterator of (name, value) pairs + # or mappings. + # TODO: support only list, tuples and mappings. + if hasattr(names, 'items') and isinstance(names.items, list): + attributes = [infer_first(const[0]).value + for const in names.items + if isinstance(const[0], nodes.Const)] + elif hasattr(names, 'elts'): + # Enums can support either ["a", "b", "c"] + # or [("a", 1), ("b", 2), ...], but they can't + # be mixed. + if all(isinstance(const, nodes.Tuple) + for const in names.elts): + attributes = [infer_first(const.elts[0]).value + for const in names.elts + if isinstance(const, nodes.Tuple)] + else: + attributes = [infer_first(const).value + for const in names.elts] + else: + raise AttributeError + if not attributes: + raise AttributeError + except (AttributeError, exceptions.InferenceError): + raise UseInferenceDefault() + + # If we can't iner the name of the class, don't crash, up to this point + # we know it is a namedtuple anyway. + name = name or 'Uninferable' + # we want to return a Class node instance with proper attributes set + class_node = nodes.ClassDef(name, 'docstring') + class_node.parent = node.parent + # set base class=tuple + class_node.bases.append(base_type) + # XXX add __init__(*attributes) method + for attr in attributes: + fake_node = nodes.EmptyNode() + fake_node.parent = class_node + fake_node.attrname = attr + class_node._instance_attrs[attr] = [fake_node] + return class_node, name, attributes + + +# module specific transformation functions ##################################### + +def hashlib_transform(): + template = ''' + +class %(name)s(object): + def __init__(self, value=''): pass + def digest(self): + return %(digest)s + def copy(self): + return self + def update(self, value): pass + def hexdigest(self): + return '' + @property + def name(self): + return %(name)r + @property + def block_size(self): + return 1 + @property + def digest_size(self): + return 1 +''' + algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') + classes = "".join( + template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'} + for hashfunc in algorithms) + return AstroidBuilder(MANAGER).string_build(classes) + + +def collections_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +class defaultdict(dict): + default_factory = None + def __missing__(self, key): pass + +class deque(object): + maxlen = 0 + def __init__(self, iterable=None, maxlen=None): + self.iterable = iterable + def append(self, x): pass + def appendleft(self, x): pass + def clear(self): pass + def count(self, x): return 0 + def extend(self, iterable): pass + def extendleft(self, iterable): pass + def pop(self): pass + def popleft(self): pass + def remove(self, value): pass + def reverse(self): pass + def rotate(self, n): pass + def __iter__(self): return self + def __reversed__(self): return self.iterable[::-1] + def __getitem__(self, index): pass + def __setitem__(self, index, value): pass + def __delitem__(self, index): pass +''') + + +def pkg_resources_transform(): + return AstroidBuilder(MANAGER).string_build(''' +def require(*requirements): + return pkg_resources.working_set.require(*requirements) + +def run_script(requires, script_name): + return pkg_resources.working_set.run_script(requires, script_name) + +def iter_entry_points(group, name=None): + return pkg_resources.working_set.iter_entry_points(group, name) + +def resource_exists(package_or_requirement, resource_name): + return get_provider(package_or_requirement).has_resource(resource_name) + +def resource_isdir(package_or_requirement, resource_name): + return get_provider(package_or_requirement).resource_isdir( + resource_name) + +def resource_filename(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name) + +def resource_stream(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name) + +def resource_string(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_string( + self, resource_name) + +def resource_listdir(package_or_requirement, resource_name): + return get_provider(package_or_requirement).resource_listdir( + resource_name) + +def extraction_error(): + pass + +def get_cache_path(archive_name, names=()): + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + return target_path + +def postprocess(tempname, filename): + pass + +def set_extraction_path(path): + pass + +def cleanup_resources(force=False): + pass + +''') + + +def subprocess_transform(): + if PY3K: + communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) + communicate_signature = 'def communicate(self, input=None, timeout=None)' + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0, restore_signals=True, + start_new_session=False, pass_fds=()): + pass + """ + else: + communicate = ('string', 'string') + communicate_signature = 'def communicate(self, input=None)' + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + pass + """ + if PY33: + wait_signature = 'def wait(self, timeout=None)' + else: + wait_signature = 'def wait(self)' + if PY3K: + ctx_manager = ''' + def __enter__(self): return self + def __exit__(self, *args): pass + ''' + else: + ctx_manager = '' + code = dedent(''' + + class Popen(object): + returncode = pid = 0 + stdin = stdout = stderr = file() + + %(init)s + + %(communicate_signature)s: + return %(communicate)r + %(wait_signature)s: + return self.returncode + def poll(self): + return self.returncode + def send_signal(self, signal): + pass + def terminate(self): + pass + def kill(self): + pass + %(ctx_manager)s + ''' % {'init': init, + 'communicate': communicate, + 'communicate_signature': communicate_signature, + 'wait_signature': wait_signature, + 'ctx_manager': ctx_manager}) + return AstroidBuilder(MANAGER).string_build(code) + + +# namedtuple support ########################################################### + +def _looks_like(node, name): + func = node.func + if isinstance(func, nodes.Attribute): + return func.attrname == name + if isinstance(func, nodes.Name): + return func.name == name + return False + +_looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple') +_looks_like_enum = functools.partial(_looks_like, name='Enum') + + +def infer_named_tuple(node, context=None): + """Specific inference function for namedtuple Call node""" + class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, + context=context) + fake = AstroidBuilder(MANAGER).string_build(''' +class %(name)s(tuple): + _fields = %(fields)r + def _asdict(self): + return self.__dict__ + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + return new(cls, iterable) + def _replace(self, **kwds): + return self + ''' % {'name': name, 'fields': attributes}) + class_node._locals['_asdict'] = fake.body[0]._locals['_asdict'] + class_node._locals['_make'] = fake.body[0]._locals['_make'] + class_node._locals['_replace'] = fake.body[0]._locals['_replace'] + class_node._locals['_fields'] = fake.body[0]._locals['_fields'] + # we use UseInferenceDefault, we can't be a generator so return an iterator + return iter([class_node]) + + +def infer_enum(node, context=None): + """ Specific inference function for enum Call node. """ + enum_meta = test_utils.extract_node(''' + class EnumMeta(object): + 'docstring' + def __call__(self, node): + class EnumAttribute(object): + name = '' + value = 0 + return EnumAttribute() + ''') + class_node = infer_func_form(node, enum_meta, + context=context, enum=True)[0] + return iter([class_node.instantiate_class()]) + + +def infer_enum_class(node): + """ Specific inference for enums. """ + names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) + for basename in node.basenames: + # TODO: doesn't handle subclasses yet. This implementation + # is a hack to support enums. + if basename not in names: + continue + if node.root().name == 'enum': + # Skip if the class is directly from enum module. + break + for local, values in node._locals.items(): + if any(not isinstance(value, nodes.AssignName) + for value in values): + continue + + stmt = values[0].statement() + if isinstance(stmt.targets[0], nodes.Tuple): + targets = stmt.targets[0].itered() + else: + targets = stmt.targets + + new_targets = [] + for target in targets: + # Replace all the assignments with our mocked class. + classdef = dedent(''' + class %(name)s(%(types)s): + @property + def value(self): + # Not the best return. + return None + @property + def name(self): + return %(name)r + ''' % {'name': target.name, 'types': ', '.join(node.basenames)}) + fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] + fake.parent = target.parent + for method in node.mymethods(): + fake._locals[method.name] = [method] + new_targets.append(fake.instantiate_class()) + node._locals[local] = new_targets + break + return node + +def multiprocessing_transform(): + module = AstroidBuilder(MANAGER).string_build(dedent(''' + from multiprocessing.managers import SyncManager + def Manager(): + return SyncManager() + ''')) + if not PY34: + return module + + # On Python 3.4, multiprocessing uses a getattr lookup inside contexts, + # in order to get the attributes they need. Since it's extremely + # dynamic, we use this approach to fake it. + node = AstroidBuilder(MANAGER).string_build(dedent(''' + from multiprocessing.context import DefaultContext, BaseContext + default = DefaultContext() + base = BaseContext() + ''')) + try: + context = next(node['default'].infer()) + base = next(node['base'].infer()) + except InferenceError: + return module + + for node in (context, base): + for key, value in node._locals.items(): + if key.startswith("_"): + continue + + value = value[0] + if isinstance(value, nodes.FunctionDef): + # We need to rebound this, since otherwise + # it will have an extra argument (self). + value = BoundMethod(value, node) + module[key] = value + return module + +def multiprocessing_managers_transform(): + return AstroidBuilder(MANAGER).string_build(dedent(''' + import array + import threading + import multiprocessing.pool as pool + + import six + + class Namespace(object): + pass + + class Value(object): + def __init__(self, typecode, value, lock=True): + self._typecode = typecode + self._value = value + def get(self): + return self._value + def set(self, value): + self._value = value + def __repr__(self): + return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value) + value = property(get, set) + + def Array(typecode, sequence, lock=True): + return array.array(typecode, sequence) + + class SyncManager(object): + Queue = JoinableQueue = six.moves.queue.Queue + Event = threading.Event + RLock = threading.RLock + BoundedSemaphore = threading.BoundedSemaphore + Condition = threading.Condition + Barrier = threading.Barrier + Pool = pool.Pool + list = list + dict = dict + Value = Value + Array = Array + Namespace = Namespace + __enter__ = lambda self: self + __exit__ = lambda *args: args + + def start(self, initializer=None, initargs=None): + pass + def shutdown(self): + pass + ''')) + + +MANAGER.register_transform(nodes.Call, inference_tip(infer_named_tuple), + _looks_like_namedtuple) +MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), + _looks_like_enum) +MANAGER.register_transform(nodes.ClassDef, infer_enum_class) +register_module_extender(MANAGER, 'hashlib', hashlib_transform) +register_module_extender(MANAGER, 'collections', collections_transform) +register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) +register_module_extender(MANAGER, 'subprocess', subprocess_transform) +register_module_extender(MANAGER, 'multiprocessing.managers', + multiprocessing_managers_transform) +register_module_extender(MANAGER, 'multiprocessing', multiprocessing_transform) diff --git a/pymode/libs/astroid/brain/py2pytest.py b/pymode/libs/astroid/brain/py2pytest.py deleted file mode 100644 index e24d449c..00000000 --- a/pymode/libs/astroid/brain/py2pytest.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Astroid hooks for pytest.""" - -from astroid import MANAGER, register_module_extender -from astroid.builder import AstroidBuilder - - -def pytest_transform(): - return AstroidBuilder(MANAGER).string_build(''' - -try: - import _pytest.mark - import _pytest.recwarn - import _pytest.runner - import _pytest.python -except ImportError: - pass -else: - deprecated_call = _pytest.recwarn.deprecated_call - exit = _pytest.runner.exit - fail = _pytest.runner.fail - fixture = _pytest.python.fixture - importorskip = _pytest.runner.importorskip - mark = _pytest.mark.MarkGenerator() - raises = _pytest.python.raises - skip = _pytest.runner.skip - yield_fixture = _pytest.python.yield_fixture - -''') - -register_module_extender(MANAGER, 'pytest', pytest_transform) -register_module_extender(MANAGER, 'py.test', pytest_transform) diff --git a/pymode/libs/astroid/brain/py2qt4.py b/pymode/libs/astroid/brain/py2qt4.py deleted file mode 100644 index d5578097..00000000 --- a/pymode/libs/astroid/brain/py2qt4.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Astroid hooks for the Python 2 qt4 module. - -Currently help understanding of : - -* PyQT4.QtCore -""" - -from astroid import MANAGER, register_module_extender -from astroid.builder import AstroidBuilder - - -def pyqt4_qtcore_transform(): - return AstroidBuilder(MANAGER).string_build(''' - -def SIGNAL(signal_name): pass - -class QObject(object): - def emit(self, signal): pass -''') - - -register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform) diff --git a/pymode/libs/astroid/brain/py2stdlib.py b/pymode/libs/astroid/brain/py2stdlib.py deleted file mode 100644 index 2bfcbcd3..00000000 --- a/pymode/libs/astroid/brain/py2stdlib.py +++ /dev/null @@ -1,334 +0,0 @@ - -"""Astroid hooks for the Python 2 standard library. - -Currently help understanding of : - -* hashlib.md5 and hashlib.sha1 -""" - -import sys -from functools import partial -from textwrap import dedent - -from astroid import ( - MANAGER, AsStringRegexpPredicate, - UseInferenceDefault, inference_tip, - YES, InferenceError, register_module_extender) -from astroid import exceptions -from astroid import nodes -from astroid.builder import AstroidBuilder - -PY3K = sys.version_info > (3, 0) -PY33 = sys.version_info >= (3, 3) - -# general function - -def infer_func_form(node, base_type, context=None, enum=False): - """Specific inference function for namedtuple or Python 3 enum. """ - def infer_first(node): - try: - value = next(node.infer(context=context)) - if value is YES: - raise UseInferenceDefault() - else: - return value - except StopIteration: - raise InferenceError() - - # node is a CallFunc node, class name as first argument and generated class - # attributes as second argument - if len(node.args) != 2: - # something weird here, go back to class implementation - raise UseInferenceDefault() - # namedtuple or enums list of attributes can be a list of strings or a - # whitespace-separate string - try: - name = infer_first(node.args[0]).value - names = infer_first(node.args[1]) - try: - attributes = names.value.replace(',', ' ').split() - except AttributeError: - if not enum: - attributes = [infer_first(const).value for const in names.elts] - else: - # Enums supports either iterator of (name, value) pairs - # or mappings. - # TODO: support only list, tuples and mappings. - if hasattr(names, 'items') and isinstance(names.items, list): - attributes = [infer_first(const[0]).value - for const in names.items - if isinstance(const[0], nodes.Const)] - elif hasattr(names, 'elts'): - # Enums can support either ["a", "b", "c"] - # or [("a", 1), ("b", 2), ...], but they can't - # be mixed. - if all(isinstance(const, nodes.Tuple) - for const in names.elts): - attributes = [infer_first(const.elts[0]).value - for const in names.elts - if isinstance(const, nodes.Tuple)] - else: - attributes = [infer_first(const).value - for const in names.elts] - else: - raise AttributeError - if not attributes: - raise AttributeError - except (AttributeError, exceptions.InferenceError) as exc: - raise UseInferenceDefault() - # we want to return a Class node instance with proper attributes set - class_node = nodes.Class(name, 'docstring') - class_node.parent = node.parent - # set base class=tuple - class_node.bases.append(base_type) - # XXX add __init__(*attributes) method - for attr in attributes: - fake_node = nodes.EmptyNode() - fake_node.parent = class_node - class_node.instance_attrs[attr] = [fake_node] - return class_node, name, attributes - - -# module specific transformation functions ##################################### - -def hashlib_transform(): - template = ''' - -class %(name)s(object): - def __init__(self, value=''): pass - def digest(self): - return %(digest)s - def copy(self): - return self - def update(self, value): pass - def hexdigest(self): - return '' - @property - def name(self): - return %(name)r - @property - def block_size(self): - return 1 - @property - def digest_size(self): - return 1 -''' - algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') - classes = "".join( - template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'} - for hashfunc in algorithms) - return AstroidBuilder(MANAGER).string_build(classes) - - -def collections_transform(): - return AstroidBuilder(MANAGER).string_build(''' - -class defaultdict(dict): - default_factory = None - def __missing__(self, key): pass - -class deque(object): - maxlen = 0 - def __init__(self, iterable=None, maxlen=None): pass - def append(self, x): pass - def appendleft(self, x): pass - def clear(self): pass - def count(self, x): return 0 - def extend(self, iterable): pass - def extendleft(self, iterable): pass - def pop(self): pass - def popleft(self): pass - def remove(self, value): pass - def reverse(self): pass - def rotate(self, n): pass - def __iter__(self): return self - -''') - - -def pkg_resources_transform(): - return AstroidBuilder(MANAGER).string_build(''' - -def resource_exists(package_or_requirement, resource_name): - pass - -def resource_isdir(package_or_requirement, resource_name): - pass - -def resource_filename(package_or_requirement, resource_name): - pass - -def resource_stream(package_or_requirement, resource_name): - pass - -def resource_string(package_or_requirement, resource_name): - pass - -def resource_listdir(package_or_requirement, resource_name): - pass - -def extraction_error(): - pass - -def get_cache_path(archive_name, names=()): - pass - -def postprocess(tempname, filename): - pass - -def set_extraction_path(path): - pass - -def cleanup_resources(force=False): - pass - -''') - - -def subprocess_transform(): - if PY3K: - communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) - init = """ - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0, restore_signals=True, - start_new_session=False, pass_fds=()): - pass - """ - else: - communicate = ('string', 'string') - init = """ - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - pass - """ - if PY33: - wait_signature = 'def wait(self, timeout=None)' - else: - wait_signature = 'def wait(self)' - return AstroidBuilder(MANAGER).string_build(''' - -class Popen(object): - returncode = pid = 0 - stdin = stdout = stderr = file() - - %(init)s - - def communicate(self, input=None): - return %(communicate)r - %(wait_signature)s: - return self.returncode - def poll(self): - return self.returncode - def send_signal(self, signal): - pass - def terminate(self): - pass - def kill(self): - pass - ''' % {'init': init, - 'communicate': communicate, - 'wait_signature': wait_signature}) - - -# namedtuple support ########################################################### - -def looks_like_namedtuple(node): - func = node.func - if type(func) is nodes.Getattr: - return func.attrname == 'namedtuple' - if type(func) is nodes.Name: - return func.name == 'namedtuple' - return False - -def infer_named_tuple(node, context=None): - """Specific inference function for namedtuple CallFunc node""" - class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, - context=context) - fake = AstroidBuilder(MANAGER).string_build(''' -class %(name)s(tuple): - _fields = %(fields)r - def _asdict(self): - return self.__dict__ - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - return new(cls, iterable) - def _replace(_self, **kwds): - result = _self._make(map(kwds.pop, %(fields)r, _self)) - if kwds: - raise ValueError('Got unexpected field names: %%r' %% list(kwds)) - return result - ''' % {'name': name, 'fields': attributes}) - class_node.locals['_asdict'] = fake.body[0].locals['_asdict'] - class_node.locals['_make'] = fake.body[0].locals['_make'] - class_node.locals['_replace'] = fake.body[0].locals['_replace'] - class_node.locals['_fields'] = fake.body[0].locals['_fields'] - # we use UseInferenceDefault, we can't be a generator so return an iterator - return iter([class_node]) - -def infer_enum(node, context=None): - """ Specific inference function for enum CallFunc node. """ - enum_meta = nodes.Class("EnumMeta", 'docstring') - class_node = infer_func_form(node, enum_meta, - context=context, enum=True)[0] - return iter([class_node.instanciate_class()]) - -def infer_enum_class(node): - """ Specific inference for enums. """ - names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) - for basename in node.basenames: - # TODO: doesn't handle subclasses yet. This implementation - # is a hack to support enums. - if basename not in names: - continue - if node.root().name == 'enum': - # Skip if the class is directly from enum module. - break - for local, values in node.locals.items(): - if any(not isinstance(value, nodes.AssName) - for value in values): - continue - - stmt = values[0].statement() - if isinstance(stmt.targets[0], nodes.Tuple): - targets = stmt.targets[0].itered() - else: - targets = stmt.targets - - new_targets = [] - for target in targets: - # Replace all the assignments with our mocked class. - classdef = dedent(''' - class %(name)s(object): - @property - def value(self): - # Not the best return. - return None - @property - def name(self): - return %(name)r - ''' % {'name': target.name}) - fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] - fake.parent = target.parent - for method in node.mymethods(): - fake.locals[method.name] = [method] - new_targets.append(fake.instanciate_class()) - node.locals[local] = new_targets - break - return node - - -MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple), - looks_like_namedtuple) -MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum), - AsStringRegexpPredicate('Enum', 'func')) -MANAGER.register_transform(nodes.Class, infer_enum_class) -register_module_extender(MANAGER, 'hashlib', hashlib_transform) -register_module_extender(MANAGER, 'collections', collections_transform) -register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) -register_module_extender(MANAGER, 'subprocess', subprocess_transform) diff --git a/pymode/libs/astroid/builder.py b/pymode/libs/astroid/builder.py index 1fe7a36d..63c156a1 100644 --- a/pymode/libs/astroid/builder.py +++ b/pymode/libs/astroid/builder.py @@ -22,23 +22,26 @@ """ from __future__ import with_statement -__docformat__ = "restructuredtext en" - +import _ast +import os import sys -from os.path import splitext, basename, exists, abspath +import textwrap + +from astroid import bases +from astroid import exceptions +from astroid import manager +from astroid import modutils +from astroid import raw_building +from astroid import rebuilder +from astroid import util -from astroid.exceptions import AstroidBuildingException, InferenceError -from astroid.raw_building import InspectBuilder -from astroid.rebuilder import TreeRebuilder -from astroid.manager import AstroidManager -from astroid.bases import YES, Instance -from astroid.modutils import modpath_from_file -from _ast import PyCF_ONLY_AST -def parse(string): - return compile(string, "", 'exec', PyCF_ONLY_AST) +def _parse(string): + return compile(string, "", 'exec', _ast.PyCF_ONLY_AST) + if sys.version_info >= (3, 0): + # pylint: disable=no-name-in-module; We don't understand flows yet. from tokenize import detect_encoding def open_source_file(filename): @@ -47,10 +50,10 @@ def open_source_file(filename): stream = open(filename, 'r', newline=None, encoding=encoding) try: data = stream.read() - except UnicodeError: # wrong encodingg + except UnicodeError: # wrong encoding # detect_encoding returns utf-8 if no encoding specified msg = 'Wrong (%s) or no encoding specified' % encoding - raise AstroidBuildingException(msg) + raise exceptions.AstroidBuildingException(msg) return stream, encoding, data else: @@ -59,8 +62,7 @@ def open_source_file(filename): _ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)") def _guess_encoding(string): - """get encoding from a python file as string or return None if not found - """ + """get encoding from a python file as string or return None if not found""" # check for UTF-8 byte-order mark if string.startswith('\xef\xbb\xbf'): return 'UTF-8' @@ -77,95 +79,101 @@ def open_source_file(filename): encoding = _guess_encoding(data) return stream, encoding, data -# ast NG builder ############################################################## -MANAGER = AstroidManager() +MANAGER = manager.AstroidManager() -class AstroidBuilder(InspectBuilder): - """provide astroid building methods""" - def __init__(self, manager=None): - InspectBuilder.__init__(self) +class AstroidBuilder(raw_building.InspectBuilder): + """Class for building an astroid tree from source code or from a live module. + + The param *manager* specifies the manager class which should be used. + If no manager is given, then the default one will be used. The + param *apply_transforms* determines if the transforms should be + applied after the tree was built from source or from a live object, + by default being True. + """ + + def __init__(self, manager=None, apply_transforms=True): + super(AstroidBuilder, self).__init__() self._manager = manager or MANAGER + self._apply_transforms = apply_transforms def module_build(self, module, modname=None): - """build an astroid from a living module instance - """ + """Build an astroid from a living module instance.""" node = None path = getattr(module, '__file__', None) if path is not None: - path_, ext = splitext(module.__file__) - if ext in ('.py', '.pyc', '.pyo') and exists(path_ + '.py'): + path_, ext = os.path.splitext(modutils._path_from_filename(path)) + if ext in ('.py', '.pyc', '.pyo') and os.path.exists(path_ + '.py'): node = self.file_build(path_ + '.py', modname) if node is None: # this is a built-in module # get a partial representation by introspection node = self.inspect_build(module, modname=modname, path=path) - # we have to handle transformation by ourselves since the rebuilder - # isn't called for builtin nodes - # - # XXX it's then only called for Module nodes, not for underlying - # nodes - node = self._manager.transform(node) + if self._apply_transforms: + # We have to handle transformation by ourselves since the + # rebuilder isn't called for builtin nodes + node = self._manager.visit_transforms(node) return node def file_build(self, path, modname=None): - """build astroid from a source code file (i.e. from an ast) + """Build astroid from a source code file (i.e. from an ast) - path is expected to be a python source file + *path* is expected to be a python source file """ try: stream, encoding, data = open_source_file(path) except IOError as exc: msg = 'Unable to load file %r (%s)' % (path, exc) - raise AstroidBuildingException(msg) - except SyntaxError as exc: # py3k encoding specification error - raise AstroidBuildingException(exc) - except LookupError as exc: # unknown encoding - raise AstroidBuildingException(exc) + raise exceptions.AstroidBuildingException(msg) + except SyntaxError as exc: # py3k encoding specification error + raise exceptions.AstroidBuildingException(exc) + except LookupError as exc: # unknown encoding + raise exceptions.AstroidBuildingException(exc) with stream: # get module name if necessary if modname is None: try: - modname = '.'.join(modpath_from_file(path)) + modname = '.'.join(modutils.modpath_from_file(path)) except ImportError: - modname = splitext(basename(path))[0] + modname = os.path.splitext(os.path.basename(path))[0] # build astroid representation module = self._data_build(data, modname, path) return self._post_build(module, encoding) def string_build(self, data, modname='', path=None): - """build astroid from source code string and return rebuilded astroid""" + """Build astroid from source code string.""" module = self._data_build(data, modname, path) - module.file_bytes = data.encode('utf-8') + module.source_code = data.encode('utf-8') return self._post_build(module, 'utf-8') def _post_build(self, module, encoding): - """handles encoding and delayed nodes - after a module has been built - """ + """Handles encoding and delayed nodes after a module has been built""" module.file_encoding = encoding self._manager.cache_module(module) # post tree building steps after we stored the module in the cache: - for from_node in module._from_nodes: + for from_node in module._import_from_nodes: if from_node.modname == '__future__': for symbol, _ in from_node.names: - module.future_imports.add(symbol) + module._future_imports.add(symbol) self.add_from_names_to_locals(from_node) # handle delayed assattr nodes for delayed in module._delayed_assattr: self.delayed_assattr(delayed) + + # Visit the transforms + if self._apply_transforms: + module = self._manager.visit_transforms(module) return module def _data_build(self, data, modname, path): - """build tree node from data and add some informations""" - # this method could be wrapped with a pickle/cache function + """Build tree node from data and add some informations""" try: - node = parse(data + '\n') - except TypeError as exc: - raise AstroidBuildingException(exc) + node = _parse(data + '\n') + except (TypeError, ValueError, SyntaxError) as exc: + raise exceptions.AstroidBuildingException(exc) if path is not None: - node_file = abspath(path) + node_file = os.path.abspath(path) else: node_file = '' if modname.endswith('.__init__'): @@ -173,68 +181,83 @@ def _data_build(self, data, modname, path): package = True else: package = path and path.find('__init__.py') > -1 or False - rebuilder = TreeRebuilder(self._manager) - module = rebuilder.visit_module(node, modname, node_file, package) - module._from_nodes = rebuilder._from_nodes - module._delayed_assattr = rebuilder._delayed_assattr + builder = rebuilder.TreeRebuilder(self._manager) + module = builder.visit_module(node, modname, node_file, package) + module._import_from_nodes = builder._import_from_nodes + module._delayed_assattr = builder._delayed_assattr return module def add_from_names_to_locals(self, node): - """store imported names to the locals; - resort the locals if coming from a delayed node - """ + """Store imported names to the locals + Resort the locals if coming from a delayed node + """ _key_func = lambda node: node.fromlineno def sort_locals(my_list): my_list.sort(key=_key_func) + for (name, asname) in node.names: if name == '*': try: imported = node.do_import_module() - except InferenceError: + except exceptions.InferenceError: continue - for name in imported.wildcard_import_names(): + for name in imported._public_names(): node.parent.set_local(name, node) - sort_locals(node.parent.scope().locals[name]) + sort_locals(node.parent.scope()._locals[name]) else: node.parent.set_local(asname or name, node) - sort_locals(node.parent.scope().locals[asname or name]) + sort_locals(node.parent.scope()._locals[asname or name]) def delayed_assattr(self, node): - """visit a AssAttr node -> add name to locals, handle members - definition + """Visit a AssAttr node + + This adds name to locals and handle members definition. """ try: frame = node.frame() - for infered in node.expr.infer(): - if infered is YES: + for inferred in node.expr.infer(): + if inferred is util.YES: continue try: - if infered.__class__ is Instance: - infered = infered._proxied - iattrs = infered.instance_attrs - elif isinstance(infered, Instance): + if inferred.__class__ is bases.Instance: + inferred = inferred._proxied + iattrs = inferred._instance_attrs + elif isinstance(inferred, bases.Instance): # Const, Tuple, ... we may be wrong, may be not, but # anyway we don't want to pollute builtin's namespace continue - elif infered.is_function: - iattrs = infered.instance_attrs + elif inferred.is_function: + iattrs = inferred._instance_attrs else: - iattrs = infered.locals + iattrs = inferred._locals except AttributeError: # XXX log error - #import traceback - #traceback.print_exc() continue values = iattrs.setdefault(node.attrname, []) if node in values: continue # get assign in __init__ first XXX useful ? - if frame.name == '__init__' and values and not \ - values[0].frame().name == '__init__': + if (frame.name == '__init__' and values and + not values[0].frame().name == '__init__'): values.insert(0, node) else: values.append(node) - except InferenceError: + except exceptions.InferenceError: pass + +def parse(code, module_name='', path=None, apply_transforms=True): + """Parses a source string in order to obtain an astroid AST from it + + :param str code: The code for the module. + :param str module_name: The name for the module, if any + :param str path: The path for the module + :param bool apply_transforms: + Apply the transforms for the give code. Use it if you + don't want the default transforms to be applied. + """ + code = textwrap.dedent(code) + builder = AstroidBuilder(manager=MANAGER, + apply_transforms=apply_transforms) + return builder.string_build(code, modname=module_name, path=path) diff --git a/pymode/libs/astroid/context.py b/pymode/libs/astroid/context.py new file mode 100644 index 00000000..284dfa18 --- /dev/null +++ b/pymode/libs/astroid/context.py @@ -0,0 +1,81 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Various context related utilities, including inference and call contexts.""" + +import contextlib + + +class InferenceContext(object): + __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'inferred') + + def __init__(self, path=None, inferred=None): + self.path = path or set() + self.lookupname = None + self.callcontext = None + self.boundnode = None + self.inferred = inferred or {} + + def push(self, node): + name = self.lookupname + if (node, name) in self.path: + raise StopIteration() + self.path.add((node, name)) + + def clone(self): + # XXX copy lookupname/callcontext ? + clone = InferenceContext(self.path, inferred=self.inferred) + clone.callcontext = self.callcontext + clone.boundnode = self.boundnode + return clone + + def cache_generator(self, key, generator): + results = [] + for result in generator: + results.append(result) + yield result + + self.inferred[key] = tuple(results) + return + + @contextlib.contextmanager + def restore_path(self): + path = set(self.path) + yield + self.path = path + + +class CallContext(object): + """Holds information for a call site.""" + + __slots__ = ('args', 'keywords') + + def __init__(self, args, keywords=None): + self.args = args + if keywords: + keywords = [(arg.arg, arg.value) for arg in keywords] + else: + keywords = [] + self.keywords = keywords + + +def copy_context(context): + if context is not None: + return context.clone() + else: + return InferenceContext() diff --git a/pymode/libs/astroid/decorators.py b/pymode/libs/astroid/decorators.py new file mode 100644 index 00000000..a446536c --- /dev/null +++ b/pymode/libs/astroid/decorators.py @@ -0,0 +1,75 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +# +# The code in this file was originally part of logilab-common, licensed under +# the same license. + +""" A few useful function/method decorators.""" + +import wrapt + + +@wrapt.decorator +def cached(func, instance, args, kwargs): + """Simple decorator to cache result of method calls without args.""" + cache = getattr(instance, '__cache', None) + if cache is None: + instance.__cache = cache = {} + try: + return cache[func] + except KeyError: + cache[func] = result = func(*args, **kwargs) + return result + + +class cachedproperty(object): + """ Provides a cached property equivalent to the stacking of + @cached and @property, but more efficient. + + After first usage, the becomes part of the object's + __dict__. Doing: + + del obj. empties the cache. + + Idea taken from the pyramid_ framework and the mercurial_ project. + + .. _pyramid: http://pypi.python.org/pypi/pyramid + .. _mercurial: http://pypi.python.org/pypi/Mercurial + """ + __slots__ = ('wrapped',) + + def __init__(self, wrapped): + try: + wrapped.__name__ + except AttributeError: + raise TypeError('%s must have a __name__ attribute' % + wrapped) + self.wrapped = wrapped + + @property + def __doc__(self): + doc = getattr(self.wrapped, '__doc__', None) + return ('%s' + % ('\n%s' % doc if doc else '')) + + def __get__(self, inst, objtype=None): + if inst is None: + return self + val = self.wrapped(inst) + setattr(inst, self.wrapped.__name__, val) + return val diff --git a/pymode/libs/astroid/exceptions.py b/pymode/libs/astroid/exceptions.py index 3889e2e7..47f2fe50 100644 --- a/pymode/libs/astroid/exceptions.py +++ b/pymode/libs/astroid/exceptions.py @@ -30,6 +30,26 @@ class AstroidBuildingException(AstroidError): class ResolveError(AstroidError): """base class of astroid resolution/inference error""" +class MroError(ResolveError): + """Error raised when there is a problem with method resolution of a class.""" + + +class DuplicateBasesError(MroError): + """Error raised when there are duplicate bases in the same class bases.""" + + +class InconsistentMroError(MroError): + """Error raised when a class's MRO is inconsistent.""" + + +class SuperError(ResolveError): + """Error raised when there is a problem with a super call.""" + + +class SuperArgumentTypeError(SuperError): + """Error raised when the super arguments are invalid.""" + + class NotFoundError(ResolveError): """raised when we are unable to resolve a name""" diff --git a/pymode/libs/astroid/inference.py b/pymode/libs/astroid/inference.py index 22807049..ddd43561 100644 --- a/pymode/libs/astroid/inference.py +++ b/pymode/libs/astroid/inference.py @@ -18,125 +18,32 @@ """this module contains a set of functions to handle inference on astroid trees """ -__doctype__ = "restructuredtext en" - -from itertools import chain +from __future__ import print_function +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import manager from astroid import nodes +from astroid import protocols +from astroid import util -from astroid.manager import AstroidManager -from astroid.exceptions import (AstroidError, InferenceError, NoDefault, - NotFoundError, UnresolvableName) -from astroid.bases import (YES, Instance, InferenceContext, - _infer_stmts, copy_context, path_wrapper, - raise_if_nothing_infered) -from astroid.protocols import ( - _arguments_infer_argname, - BIN_OP_METHOD, UNARY_OP_METHOD) - -MANAGER = AstroidManager() - - -class CallContext(object): - """when inferring a function call, this class is used to remember values - given as argument - """ - def __init__(self, args, starargs, dstarargs): - self.args = [] - self.nargs = {} - for arg in args: - if isinstance(arg, nodes.Keyword): - self.nargs[arg.arg] = arg.value - else: - self.args.append(arg) - self.starargs = starargs - self.dstarargs = dstarargs - def infer_argument(self, funcnode, name, context): - """infer a function argument value according to the call context""" - # 1. search in named keywords - try: - return self.nargs[name].infer(context) - except KeyError: - # Function.args.args can be None in astroid (means that we don't have - # information on argnames) - argindex = funcnode.args.find_argname(name)[0] - if argindex is not None: - # 2. first argument of instance/class method - if argindex == 0 and funcnode.type in ('method', 'classmethod'): - if context.boundnode is not None: - boundnode = context.boundnode - else: - # XXX can do better ? - boundnode = funcnode.parent.frame() - if funcnode.type == 'method': - if not isinstance(boundnode, Instance): - boundnode = Instance(boundnode) - return iter((boundnode,)) - if funcnode.type == 'classmethod': - return iter((boundnode,)) - # if we have a method, extract one position - # from the index, so we'll take in account - # the extra parameter represented by `self` or `cls` - if funcnode.type in ('method', 'classmethod'): - argindex -= 1 - # 2. search arg index - try: - return self.args[argindex].infer(context) - except IndexError: - pass - # 3. search in *args (.starargs) - if self.starargs is not None: - its = [] - for infered in self.starargs.infer(context): - if infered is YES: - its.append((YES,)) - continue - try: - its.append(infered.getitem(argindex, context).infer(context)) - except (InferenceError, AttributeError): - its.append((YES,)) - except (IndexError, TypeError): - continue - if its: - return chain(*its) - # 4. XXX search in **kwargs (.dstarargs) - if self.dstarargs is not None: - its = [] - for infered in self.dstarargs.infer(context): - if infered is YES: - its.append((YES,)) - continue - try: - its.append(infered.getitem(name, context).infer(context)) - except (InferenceError, AttributeError): - its.append((YES,)) - except (IndexError, TypeError): - continue - if its: - return chain(*its) - # 5. */** argument, (Tuple or Dict) - if name == funcnode.args.vararg: - return iter((nodes.const_factory(()))) - if name == funcnode.args.kwarg: - return iter((nodes.const_factory({}))) - # 6. return default value if any - try: - return funcnode.args.default_value(name).infer(context) - except NoDefault: - raise InferenceError(name) +MANAGER = manager.AstroidManager() # .infer method ############################################################### def infer_end(self, context=None): - """inference's end for node such as Module, Class, Function, Const... + """inference's end for node such as Module, ClassDef, FunctionDef, + Const... + """ yield self nodes.Module._infer = infer_end -nodes.Class._infer = infer_end -nodes.Function._infer = infer_end +nodes.ClassDef._infer = infer_end +nodes.FunctionDef._infer = infer_end nodes.Lambda._infer = infer_end nodes.Const._infer = infer_end nodes.List._infer = infer_end @@ -157,7 +64,7 @@ def _higher_function_scope(node): which encloses the given node. """ current = node - while current.parent and not isinstance(current.parent, nodes.Function): + while current.parent and not isinstance(current.parent, nodes.FunctionDef): current = current.parent if current and current.parent: return current.parent @@ -174,72 +81,80 @@ def infer_name(self, context=None): _, stmts = parent_function.lookup(self.name) if not stmts: - raise UnresolvableName(self.name) + raise exceptions.UnresolvableName(self.name) context = context.clone() context.lookupname = self.name - return _infer_stmts(stmts, context, frame) -nodes.Name._infer = path_wrapper(infer_name) -nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper + return bases._infer_stmts(stmts, context, frame) +nodes.Name._infer = bases.path_wrapper(infer_name) +nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper -def infer_callfunc(self, context=None): - """infer a CallFunc node by trying to guess what the function returns""" +@bases.path_wrapper +@bases.raise_if_nothing_inferred +def infer_call(self, context=None): + """infer a Call node by trying to guess what the function returns""" callcontext = context.clone() - callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs) + callcontext.callcontext = contextmod.CallContext(args=self.args, + keywords=self.keywords) callcontext.boundnode = None for callee in self.func.infer(context): - if callee is YES: + if callee is util.YES: yield callee continue try: if hasattr(callee, 'infer_call_result'): - for infered in callee.infer_call_result(self, callcontext): - yield infered - except InferenceError: + for inferred in callee.infer_call_result(self, callcontext): + yield inferred + except exceptions.InferenceError: ## XXX log error ? continue -nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc)) +nodes.Call._infer = infer_call +@bases.path_wrapper def infer_import(self, context=None, asname=True): """infer an Import node: return the imported module/object""" name = context.lookupname if name is None: - raise InferenceError() + raise exceptions.InferenceError() if asname: yield self.do_import_module(self.real_name(name)) else: yield self.do_import_module(name) -nodes.Import._infer = path_wrapper(infer_import) +nodes.Import._infer = infer_import + def infer_name_module(self, name): - context = InferenceContext() + context = contextmod.InferenceContext() context.lookupname = name return self.infer(context, asname=False) nodes.Import.infer_name_module = infer_name_module -def infer_from(self, context=None, asname=True): - """infer a From nodes: return the imported module/object""" +@bases.path_wrapper +def infer_import_from(self, context=None, asname=True): + """infer a ImportFrom node: return the imported module/object""" name = context.lookupname if name is None: - raise InferenceError() + raise exceptions.InferenceError() if asname: name = self.real_name(name) module = self.do_import_module() try: - context = copy_context(context) + context = contextmod.copy_context(context) context.lookupname = name - return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context) - except NotFoundError: - raise InferenceError(name) -nodes.From._infer = path_wrapper(infer_from) + stmts = module.getattr(name, ignore_locals=module is self.root()) + return bases._infer_stmts(stmts, context) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) +nodes.ImportFrom._infer = infer_import_from -def infer_getattr(self, context=None): - """infer a Getattr node by using getattr on the associated object""" +@bases.raise_if_nothing_inferred +def infer_attribute(self, context=None): + """infer an Attribute node by using getattr on the associated object""" for owner in self.expr.infer(context): - if owner is YES: + if owner is util.YES: yield owner continue try: @@ -247,58 +162,69 @@ def infer_getattr(self, context=None): for obj in owner.igetattr(self.attrname, context): yield obj context.boundnode = None - except (NotFoundError, InferenceError): + except (exceptions.NotFoundError, exceptions.InferenceError): context.boundnode = None except AttributeError: # XXX method / function context.boundnode = None -nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr)) -nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper +nodes.Attribute._infer = bases.path_wrapper(infer_attribute) +nodes.AssignAttr.infer_lhs = infer_attribute # # won't work with a path wrapper +@bases.path_wrapper def infer_global(self, context=None): if context.lookupname is None: - raise InferenceError() + raise exceptions.InferenceError() try: - return _infer_stmts(self.root().getattr(context.lookupname), context) - except NotFoundError: - raise InferenceError() -nodes.Global._infer = path_wrapper(infer_global) + return bases._infer_stmts(self.root().getattr(context.lookupname), + context) + except exceptions.NotFoundError: + raise exceptions.InferenceError() +nodes.Global._infer = infer_global +@bases.raise_if_nothing_inferred def infer_subscript(self, context=None): - """infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]""" + """Inference for subscripts + + We're understanding if the index is a Const + or a slice, passing the result of inference + to the value's `getitem` method, which should + handle each supported index type accordingly. + """ + value = next(self.value.infer(context)) - if value is YES: - yield YES + if value is util.YES: + yield util.YES return index = next(self.slice.infer(context)) - if index is YES: - yield YES + if index is util.YES: + yield util.YES return if isinstance(index, nodes.Const): try: assigned = value.getitem(index.value, context) except AttributeError: - raise InferenceError() + raise exceptions.InferenceError() except (IndexError, TypeError): - yield YES + yield util.YES return # Prevent inferring if the infered subscript # is the same as the original subscripted object. - if self is assigned: - yield YES + if self is assigned or assigned is util.YES: + yield util.YES return for infered in assigned.infer(context): yield infered else: - raise InferenceError() -nodes.Subscript._infer = path_wrapper(infer_subscript) -nodes.Subscript.infer_lhs = raise_if_nothing_infered(infer_subscript) + raise exceptions.InferenceError() +nodes.Subscript._infer = bases.path_wrapper(infer_subscript) +nodes.Subscript.infer_lhs = infer_subscript +@bases.raise_if_nothing_inferred def infer_unaryop(self, context=None): for operand in self.operand.infer(context): try: @@ -306,9 +232,9 @@ def infer_unaryop(self, context=None): except TypeError: continue except AttributeError: - meth = UNARY_OP_METHOD[self.op] + meth = protocols.UNARY_OP_METHOD[self.op] if meth is None: - yield YES + yield util.YES else: try: # XXX just suppose if the type implement meth, returned type @@ -318,88 +244,116 @@ def infer_unaryop(self, context=None): except GeneratorExit: raise except: - yield YES -nodes.UnaryOp._infer = path_wrapper(infer_unaryop) + yield util.YES +nodes.UnaryOp._infer = bases.path_wrapper(infer_unaryop) -def _infer_binop(operator, operand1, operand2, context, failures=None): - if operand1 is YES: +def _infer_binop(binop, operand1, operand2, context, failures=None): + if operand1 is util.YES: yield operand1 return try: - for valnode in operand1.infer_binary_op(operator, operand2, context): + for valnode in operand1.infer_binary_op(binop, operand2, context): yield valnode except AttributeError: try: # XXX just suppose if the type implement meth, returned type # will be the same - operand1.getattr(BIN_OP_METHOD[operator]) + operand1.getattr(protocols.BIN_OP_METHOD[operator]) yield operand1 except: if failures is None: - yield YES + yield util.YES else: failures.append(operand1) +@bases.yes_if_nothing_inferred def infer_binop(self, context=None): failures = [] for lhs in self.left.infer(context): - for val in _infer_binop(self.op, lhs, self.right, context, failures): + for val in _infer_binop(self, lhs, self.right, context, failures): yield val for lhs in failures: for rhs in self.right.infer(context): - for val in _infer_binop(self.op, rhs, lhs, context): + for val in _infer_binop(self, rhs, lhs, context): yield val -nodes.BinOp._infer = path_wrapper(infer_binop) +nodes.BinOp._infer = bases.path_wrapper(infer_binop) def infer_arguments(self, context=None): name = context.lookupname if name is None: - raise InferenceError() - return _arguments_infer_argname(self, name, context) + raise exceptions.InferenceError() + return protocols._arguments_infer_argname(self, name, context) nodes.Arguments._infer = infer_arguments -def infer_ass(self, context=None): - """infer a AssName/AssAttr: need to inspect the RHS part of the +@bases.path_wrapper +def infer_assign(self, context=None): + """infer a AssignName/AssignAttr: need to inspect the RHS part of the assign node """ stmt = self.statement() if isinstance(stmt, nodes.AugAssign): return stmt.infer(context) + stmts = list(self.assigned_stmts(context=context)) - return _infer_stmts(stmts, context) -nodes.AssName._infer = path_wrapper(infer_ass) -nodes.AssAttr._infer = path_wrapper(infer_ass) + return bases._infer_stmts(stmts, context) +nodes.AssignName._infer = infer_assign +nodes.AssignAttr._infer = infer_assign def infer_augassign(self, context=None): failures = [] for lhs in self.target.infer_lhs(context): - for val in _infer_binop(self.op, lhs, self.value, context, failures): + for val in _infer_binop(self, lhs, self.value, context, failures): yield val for lhs in failures: for rhs in self.value.infer(context): - for val in _infer_binop(self.op, rhs, lhs, context): + for val in _infer_binop(self, rhs, lhs, context): yield val -nodes.AugAssign._infer = path_wrapper(infer_augassign) +nodes.AugAssign._infer = bases.path_wrapper(infer_augassign) # no infer method on DelName and DelAttr (expected InferenceError) - +@bases.path_wrapper def infer_empty_node(self, context=None): if not self.has_underlying_object(): - yield YES + yield util.YES else: try: - for infered in MANAGER.infer_ast_from_something(self.object, - context=context): - yield infered - except AstroidError: - yield YES -nodes.EmptyNode._infer = path_wrapper(infer_empty_node) + for inferred in MANAGER.infer_ast_from_something(self.object, + context=context): + yield inferred + except exceptions.AstroidError: + yield util.YES +nodes.EmptyNode._infer = infer_empty_node def infer_index(self, context=None): return self.value.infer(context) nodes.Index._infer = infer_index + +# TODO: move directly into bases.Instance when the dependency hell +# will be solved. +def instance_getitem(self, index, context=None): + # Rewrap index to Const for this case + index = nodes.Const(index) + if context: + new_context = context.clone() + else: + context = new_context = contextmod.InferenceContext() + + # Create a new callcontext for providing index as an argument. + new_context.callcontext = contextmod.CallContext(args=[index]) + new_context.boundnode = self + + method = next(self.igetattr('__getitem__', context=context)) + if not isinstance(method, bases.BoundMethod): + raise exceptions.InferenceError + + try: + return next(method.infer_call_result(self, new_context)) + except StopIteration: + raise exceptions.InferenceError + +bases.Instance.getitem = instance_getitem diff --git a/pymode/libs/astroid/inspector.py b/pymode/libs/astroid/inspector.py deleted file mode 100644 index 1fc31926..00000000 --- a/pymode/libs/astroid/inspector.py +++ /dev/null @@ -1,273 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . -"""visitor doing some postprocessing on the astroid tree. -Try to resolve definitions (namespace) dictionary, relationship... - -This module has been imported from pyreverse -""" - -__docformat__ = "restructuredtext en" - -from os.path import dirname - -import astroid -from astroid.exceptions import InferenceError -from astroid.utils import LocalsVisitor -from astroid.modutils import get_module_part, is_relative, is_standard_module - -class IdGeneratorMixIn(object): - """ - Mixin adding the ability to generate integer uid - """ - def __init__(self, start_value=0): - self.id_count = start_value - - def init_counter(self, start_value=0): - """init the id counter - """ - self.id_count = start_value - - def generate_id(self): - """generate a new identifier - """ - self.id_count += 1 - return self.id_count - - -class Linker(IdGeneratorMixIn, LocalsVisitor): - """ - walk on the project tree and resolve relationships. - - According to options the following attributes may be added to visited nodes: - - * uid, - a unique identifier for the node (on astroid.Project, astroid.Module, - astroid.Class and astroid.locals_type). Only if the linker has been instantiated - with tag=True parameter (False by default). - - * Function - a mapping from locals names to their bounded value, which may be a - constant like a string or an integer, or an astroid node (on astroid.Module, - astroid.Class and astroid.Function). - - * instance_attrs_type - as locals_type but for klass member attributes (only on astroid.Class) - - * implements, - list of implemented interface _objects_ (only on astroid.Class nodes) - """ - - def __init__(self, project, inherited_interfaces=0, tag=False): - IdGeneratorMixIn.__init__(self) - LocalsVisitor.__init__(self) - # take inherited interface in consideration or not - self.inherited_interfaces = inherited_interfaces - # tag nodes or not - self.tag = tag - # visited project - self.project = project - - - def visit_project(self, node): - """visit an astroid.Project node - - * optionally tag the node with a unique id - """ - if self.tag: - node.uid = self.generate_id() - for module in node.modules: - self.visit(module) - - def visit_package(self, node): - """visit an astroid.Package node - - * optionally tag the node with a unique id - """ - if self.tag: - node.uid = self.generate_id() - for subelmt in node.values(): - self.visit(subelmt) - - def visit_module(self, node): - """visit an astroid.Module node - - * set the locals_type mapping - * set the depends mapping - * optionally tag the node with a unique id - """ - if hasattr(node, 'locals_type'): - return - node.locals_type = {} - node.depends = [] - if self.tag: - node.uid = self.generate_id() - - def visit_class(self, node): - """visit an astroid.Class node - - * set the locals_type and instance_attrs_type mappings - * set the implements list and build it - * optionally tag the node with a unique id - """ - if hasattr(node, 'locals_type'): - return - node.locals_type = {} - if self.tag: - node.uid = self.generate_id() - # resolve ancestors - for baseobj in node.ancestors(recurs=False): - specializations = getattr(baseobj, 'specializations', []) - specializations.append(node) - baseobj.specializations = specializations - # resolve instance attributes - node.instance_attrs_type = {} - for assattrs in node.instance_attrs.values(): - for assattr in assattrs: - self.handle_assattr_type(assattr, node) - # resolve implemented interface - try: - node.implements = list(node.interfaces(self.inherited_interfaces)) - except InferenceError: - node.implements = () - - def visit_function(self, node): - """visit an astroid.Function node - - * set the locals_type mapping - * optionally tag the node with a unique id - """ - if hasattr(node, 'locals_type'): - return - node.locals_type = {} - if self.tag: - node.uid = self.generate_id() - - link_project = visit_project - link_module = visit_module - link_class = visit_class - link_function = visit_function - - def visit_assname(self, node): - """visit an astroid.AssName node - - handle locals_type - """ - # avoid double parsing done by different Linkers.visit - # running over the same project: - if hasattr(node, '_handled'): - return - node._handled = True - if node.name in node.frame(): - frame = node.frame() - else: - # the name has been defined as 'global' in the frame and belongs - # there. Btw the frame is not yet visited as the name is in the - # root locals; the frame hence has no locals_type attribute - frame = node.root() - try: - values = node.infered() - try: - already_infered = frame.locals_type[node.name] - for valnode in values: - if not valnode in already_infered: - already_infered.append(valnode) - except KeyError: - frame.locals_type[node.name] = values - except astroid.InferenceError: - pass - - def handle_assattr_type(self, node, parent): - """handle an astroid.AssAttr node - - handle instance_attrs_type - """ - try: - values = list(node.infer()) - try: - already_infered = parent.instance_attrs_type[node.attrname] - for valnode in values: - if not valnode in already_infered: - already_infered.append(valnode) - except KeyError: - parent.instance_attrs_type[node.attrname] = values - except astroid.InferenceError: - pass - - def visit_import(self, node): - """visit an astroid.Import node - - resolve module dependencies - """ - context_file = node.root().file - for name in node.names: - relative = is_relative(name[0], context_file) - self._imported_module(node, name[0], relative) - - - def visit_from(self, node): - """visit an astroid.From node - - resolve module dependencies - """ - basename = node.modname - context_file = node.root().file - if context_file is not None: - relative = is_relative(basename, context_file) - else: - relative = False - for name in node.names: - if name[0] == '*': - continue - # analyze dependencies - fullname = '%s.%s' % (basename, name[0]) - if fullname.find('.') > -1: - try: - # XXX: don't use get_module_part, missing package precedence - fullname = get_module_part(fullname, context_file) - except ImportError: - continue - if fullname != basename: - self._imported_module(node, fullname, relative) - - - def compute_module(self, context_name, mod_path): - """return true if the module should be added to dependencies""" - package_dir = dirname(self.project.path) - if context_name == mod_path: - return 0 - elif is_standard_module(mod_path, (package_dir,)): - return 1 - return 0 - - # protected methods ######################################################## - - def _imported_module(self, node, mod_path, relative): - """notify an imported module, used to analyze dependencies - """ - module = node.root() - context_name = module.name - if relative: - mod_path = '%s.%s' % ('.'.join(context_name.split('.')[:-1]), - mod_path) - if self.compute_module(context_name, mod_path): - # handle dependencies - if not hasattr(module, 'depends'): - module.depends = [] - mod_paths = module.depends - if not mod_path in mod_paths: - mod_paths.append(mod_path) diff --git a/pymode/libs/astroid/manager.py b/pymode/libs/astroid/manager.py index b1fb3058..d08adc29 100644 --- a/pymode/libs/astroid/manager.py +++ b/pymode/libs/astroid/manager.py @@ -21,45 +21,23 @@ """ from __future__ import print_function -__docformat__ = "restructuredtext en" - -import collections import imp import os -from os.path import dirname, join, isdir, exists -from warnings import warn import zipimport -from logilab.common.configuration import OptionsProviderMixIn - -from astroid.exceptions import AstroidBuildingException +from astroid import exceptions from astroid import modutils +from astroid import transforms -def astroid_wrapper(func, modname): - """wrapper to give to AstroidManager.project_from_files""" - print('parsing %s...' % modname) - try: - return func(modname) - except AstroidBuildingException as exc: - print(exc) - except Exception as exc: - import traceback - traceback.print_exc() - -def _silent_no_wrap(func, modname): - """silent wrapper that doesn't do anything; can be used for tests""" - return func(modname) - def safe_repr(obj): try: return repr(obj) - except: + except Exception: # pylint: disable=broad-except return '???' - -class AstroidManager(OptionsProviderMixIn): +class AstroidManager(object): """the astroid manager, responsible to build astroid from files or modules. @@ -67,31 +45,27 @@ class AstroidManager(OptionsProviderMixIn): """ name = 'astroid loader' - options = (("ignore", - {'type' : "csv", 'metavar' : "", - 'dest' : "black_list", "default" : ('CVS',), - 'help' : "add (may be a directory) to the black list\ -. It should be a base name, not a path. You may set this option multiple times\ -."}), - ("project", - {'default': "No Name", 'type' : 'string', 'short': 'p', - 'metavar' : '', - 'help' : 'set the project name.'}), - ) brain = {} + def __init__(self): self.__dict__ = AstroidManager.brain if not self.__dict__: - OptionsProviderMixIn.__init__(self) - self.load_defaults() # NOTE: cache entries are added by the [re]builder self.astroid_cache = {} self._mod_file_cache = {} - self.transforms = collections.defaultdict(list) self._failed_import_hooks = [] self.always_load_extensions = False self.optimize_ast = False self.extension_package_whitelist = set() + self._transform = transforms.TransformVisitor() + + # Export these APIs for convenience + self.register_transform = self._transform.register_transform + self.unregister_transform = self._transform.unregister_transform + + def visit_transforms(self, node): + """Visit the transforms and apply them to the given *node*.""" + return self._transform.visit(node) def ast_from_file(self, filepath, modname=None, fallback=True, source=False): """given a module name, return the astroid object""" @@ -105,15 +79,15 @@ def ast_from_file(self, filepath, modname=None, fallback=True, source=False): modname = '.'.join(modutils.modpath_from_file(filepath)) except ImportError: modname = filepath - if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath: + if modname in self.astroid_cache and self.astroid_cache[modname].source_file == filepath: return self.astroid_cache[modname] if source: from astroid.builder import AstroidBuilder return AstroidBuilder(self).file_build(filepath, modname) elif fallback and modname: return self.ast_from_module_name(modname) - raise AstroidBuildingException('unable to get astroid for file %s' % - filepath) + raise exceptions.AstroidBuildingException( + 'unable to get astroid for file %s' % filepath) def _build_stub_module(self, modname): from astroid.builder import AstroidBuilder @@ -137,7 +111,7 @@ def ast_from_module_name(self, modname, context_file=None): return self._build_stub_module(modname) old_cwd = os.getcwd() if context_file: - os.chdir(dirname(context_file)) + os.chdir(os.path.dirname(context_file)) try: filepath, mp_type = self.file_from_module_name(modname, context_file) if mp_type == modutils.PY_ZIPMODULE: @@ -151,18 +125,20 @@ def ast_from_module_name(self, modname, context_file=None): module = modutils.load_module_from_name(modname) except Exception as ex: msg = 'Unable to load module %s (%s)' % (modname, ex) - raise AstroidBuildingException(msg) + raise exceptions.AstroidBuildingException(msg) return self.ast_from_module(module, modname) elif mp_type == imp.PY_COMPILED: - raise AstroidBuildingException("Unable to load compiled module %s" % (modname,)) + msg = "Unable to load compiled module %s" % (modname,) + raise exceptions.AstroidBuildingException(msg) if filepath is None: - raise AstroidBuildingException("Unable to load module %s" % (modname,)) + msg = "Unable to load module %s" % (modname,) + raise exceptions.AstroidBuildingException(msg) return self.ast_from_file(filepath, modname, fallback=False) - except AstroidBuildingException as e: + except exceptions.AstroidBuildingException as e: for hook in self._failed_import_hooks: try: return hook(modname) - except AstroidBuildingException: + except exceptions.AstroidBuildingException: pass raise e finally: @@ -186,11 +162,12 @@ def zip_import_data(self, filepath): module = builder.string_build(importer.get_source(resource), zmodname, filepath) return module - except: + except Exception: # pylint: disable=broad-except continue return None def file_from_module_name(self, modname, contextfile): + # pylint: disable=redefined-variable-type try: value = self._mod_file_cache[(modname, contextfile)] except KeyError: @@ -199,9 +176,9 @@ def file_from_module_name(self, modname, contextfile): modname.split('.'), context_file=contextfile) except ImportError as ex: msg = 'Unable to load module %s (%s)' % (modname, ex) - value = AstroidBuildingException(msg) + value = exceptions.AstroidBuildingException(msg) self._mod_file_cache[(modname, contextfile)] = value - if isinstance(value, AstroidBuildingException): + if isinstance(value, exceptions.AstroidBuildingException): raise value return value @@ -226,12 +203,11 @@ def ast_from_class(self, klass, modname=None): try: modname = klass.__module__ except AttributeError: - raise AstroidBuildingException( - 'Unable to get module for class %s' % safe_repr(klass)) + msg = 'Unable to get module for class %s' % safe_repr(klass) + raise exceptions.AstroidBuildingException(msg) modastroid = self.ast_from_module_name(modname) return modastroid.getattr(klass.__name__)[0] # XXX - def infer_ast_from_something(self, obj, context=None): """infer astroid for the given class""" if hasattr(obj, '__class__') and not isinstance(obj, type): @@ -241,75 +217,29 @@ def infer_ast_from_something(self, obj, context=None): try: modname = klass.__module__ except AttributeError: - raise AstroidBuildingException( - 'Unable to get module for %s' % safe_repr(klass)) + msg = 'Unable to get module for %s' % safe_repr(klass) + raise exceptions.AstroidBuildingException(msg) except Exception as ex: - raise AstroidBuildingException( - 'Unexpected error while retrieving module for %s: %s' - % (safe_repr(klass), ex)) + msg = ('Unexpected error while retrieving module for %s: %s' + % (safe_repr(klass), ex)) + raise exceptions.AstroidBuildingException(msg) try: name = klass.__name__ except AttributeError: - raise AstroidBuildingException( - 'Unable to get name for %s' % safe_repr(klass)) + msg = 'Unable to get name for %s' % safe_repr(klass) + raise exceptions.AstroidBuildingException(msg) except Exception as ex: - raise AstroidBuildingException( - 'Unexpected error while retrieving name for %s: %s' - % (safe_repr(klass), ex)) + exc = ('Unexpected error while retrieving name for %s: %s' + % (safe_repr(klass), ex)) + raise exceptions.AstroidBuildingException(exc) # take care, on living object __module__ is regularly wrong :( modastroid = self.ast_from_module_name(modname) if klass is obj: - for infered in modastroid.igetattr(name, context): - yield infered + for inferred in modastroid.igetattr(name, context): + yield inferred else: - for infered in modastroid.igetattr(name, context): - yield infered.instanciate_class() - - def project_from_files(self, files, func_wrapper=astroid_wrapper, - project_name=None, black_list=None): - """return a Project from a list of files or modules""" - # build the project representation - project_name = project_name or self.config.project - black_list = black_list or self.config.black_list - project = Project(project_name) - for something in files: - if not exists(something): - fpath = modutils.file_from_modpath(something.split('.')) - elif isdir(something): - fpath = join(something, '__init__.py') - else: - fpath = something - astroid = func_wrapper(self.ast_from_file, fpath) - if astroid is None: - continue - # XXX why is first file defining the project.path ? - project.path = project.path or astroid.file - project.add_module(astroid) - base_name = astroid.name - # recurse in package except if __init__ was explicitly given - if astroid.package and something.find('__init__') == -1: - # recurse on others packages / modules if this is a package - for fpath in modutils.get_module_files(dirname(astroid.file), - black_list): - astroid = func_wrapper(self.ast_from_file, fpath) - if astroid is None or astroid.name == base_name: - continue - project.add_module(astroid) - return project - - def register_transform(self, node_class, transform, predicate=None): - """Register `transform(node)` function to be applied on the given - Astroid's `node_class` if `predicate` is None or returns true - when called with the node as argument. - - The transform function may return a value which is then used to - substitute the original node in the tree. - """ - self.transforms[node_class].append((transform, predicate)) - - def unregister_transform(self, node_class, transform, predicate=None): - """Unregister the given transform.""" - self.transforms[node_class].remove((transform, predicate)) + for inferred in modastroid.igetattr(name, context): + yield inferred.instantiate_class() def register_failed_import_hook(self, hook): """Registers a hook to resolve imports that cannot be found otherwise. @@ -321,30 +251,6 @@ def register_failed_import_hook(self, hook): """ self._failed_import_hooks.append(hook) - def transform(self, node): - """Call matching transforms for the given node if any and return the - transformed node. - """ - cls = node.__class__ - if cls not in self.transforms: - # no transform registered for this class of node - return node - - transforms = self.transforms[cls] - orig_node = node # copy the reference - for transform_func, predicate in transforms: - if predicate is None or predicate(node): - ret = transform_func(node) - # if the transformation function returns something, it's - # expected to be a replacement for the node - if ret is not None: - if node is not orig_node: - # node has already be modified by some previous - # transformation, warn about it - warn('node %s substituted multiple times' % node) - node = ret - return node - def cache_module(self, module): """Cache a module if no module with the same name is known yet.""" self.astroid_cache.setdefault(module.name, module) @@ -359,33 +265,3 @@ def clear_cache(self, astroid_builtin=None): import astroid.raw_building astroid.raw_building._astroid_bootstrapping( astroid_builtin=astroid_builtin) - - -class Project(object): - """a project handle a set of modules / packages""" - def __init__(self, name=''): - self.name = name - self.path = None - self.modules = [] - self.locals = {} - self.__getitem__ = self.locals.__getitem__ - self.__iter__ = self.locals.__iter__ - self.values = self.locals.values - self.keys = self.locals.keys - self.items = self.locals.items - - def add_module(self, node): - self.locals[node.name] = node - self.modules.append(node) - - def get_module(self, name): - return self.locals[name] - - def get_children(self): - return self.modules - - def __repr__(self): - return '' % (self.name, id(self), - len(self.modules)) - - diff --git a/pymode/libs/astroid/mixins.py b/pymode/libs/astroid/mixins.py index dbf1673a..57082f0f 100644 --- a/pymode/libs/astroid/mixins.py +++ b/pymode/libs/astroid/mixins.py @@ -18,16 +18,16 @@ """This module contains some mixins for the different nodes. """ -from logilab.common.decorators import cachedproperty +import warnings -from astroid.exceptions import (AstroidBuildingException, InferenceError, - NotFoundError) +from astroid import decorators +from astroid import exceptions class BlockRangeMixIn(object): """override block range """ - @cachedproperty + @decorators.cachedproperty def blockstart_tolineno(self): return self.lineno @@ -55,15 +55,29 @@ def _get_filtered_stmts(self, _, node, _stmts, mystmt): return [node], True return _stmts, False - def ass_type(self): + def assign_type(self): return self + def ass_type(self): + warnings.warn('%s.ass_type() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() + class AssignTypeMixin(object): - def ass_type(self): + def assign_type(self): return self + def ass_type(self): + warnings.warn('%s.ass_type() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() + def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt): """method used in filter_stmts""" if self is mystmt: @@ -77,11 +91,18 @@ def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt): class ParentAssignTypeMixin(AssignTypeMixin): + def assign_type(self): + return self.parent.assign_type() + def ass_type(self): - return self.parent.ass_type() + warnings.warn('%s.ass_type() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() -class FromImportMixIn(FilterStmtsMixin): +class ImportFromMixin(FilterStmtsMixin): """MixIn for From and Import Nodes""" def _infer_name(self, frame, name): @@ -104,11 +125,14 @@ def do_import_module(self, modname=None): # FIXME: we used to raise InferenceError here, but why ? return mymodule try: - return mymodule.import_module(modname, level=level) - except AstroidBuildingException: - raise InferenceError(modname) + return mymodule.import_module(modname, level=level, + relative_only=level and level >= 1) + except exceptions.AstroidBuildingException as ex: + if isinstance(ex.args[0], SyntaxError): + raise exceptions.InferenceError(str(ex)) + raise exceptions.InferenceError(modname) except SyntaxError as ex: - raise InferenceError(str(ex)) + raise exceptions.InferenceError(str(ex)) def real_name(self, asname): """get name from 'as' name""" @@ -120,5 +144,4 @@ def real_name(self, asname): _asname = name if asname == _asname: return name - raise NotFoundError(asname) - + raise exceptions.NotFoundError(asname) diff --git a/pymode/libs/astroid/modutils.py b/pymode/libs/astroid/modutils.py index c547f3e6..31104cb5 100644 --- a/pymode/libs/astroid/modutils.py +++ b/pymode/libs/astroid/modutils.py @@ -28,10 +28,9 @@ """ from __future__ import with_statement -__docformat__ = "restructuredtext en" - import imp import os +import platform import sys from distutils.sysconfig import get_python_lib from distutils.errors import DistutilsPlatformError @@ -42,8 +41,6 @@ except ImportError: pkg_resources = None -from logilab.common import _handle_blacklist - PY_ZIPMODULE = object() if sys.platform.startswith('win'): @@ -53,12 +50,7 @@ PY_SOURCE_EXTS = ('py',) PY_COMPILED_EXTS = ('so',) -# Notes about STD_LIB_DIRS -# Consider arch-specific installation for STD_LIB_DIRS definition -# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on -# -# :see: `Problems with /usr/lib64 builds `_ -# :see: `FHS `_ + try: # The explicit sys.prefix is to work around a patch in virtualenv that # replaces the 'real' sys.prefix (i.e. the location of the binary) @@ -70,22 +62,53 @@ # Take care of installations where exec_prefix != prefix. get_python_lib(standard_lib=True, prefix=sys.exec_prefix), get_python_lib(standard_lib=True)]) - if os.name == 'nt': - STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls')) - try: - # real_prefix is defined when running inside virtualenv. - STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls')) - except AttributeError: - pass # get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to # non-valid path, see https://bugs.pypy.org/issue1164 except DistutilsPlatformError: STD_LIB_DIRS = set() -EXT_LIB_DIR = get_python_lib() +if os.name == 'nt': + STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls')) + try: + # real_prefix is defined when running inside virtualenv. + STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls')) + except AttributeError: + pass +if platform.python_implementation() == 'PyPy': + _root = os.path.join(sys.prefix, 'lib_pypy') + STD_LIB_DIRS.add(_root) + try: + # real_prefix is defined when running inside virtualenv. + STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'lib_pypy')) + except AttributeError: + pass + del _root +if os.name == 'posix': + # Need the real prefix is we're under a virtualenv, otherwise + # the usual one will do. + try: + prefix = sys.real_prefix + except AttributeError: + prefix = sys.prefix + + def _posix_path(path): + base_python = 'python%d.%d' % sys.version_info[:2] + return os.path.join(prefix, path, base_python) + + STD_LIB_DIRS.add(_posix_path('lib')) + if sys.maxsize > 2**32: + # This tries to fix a problem with /usr/lib64 builds, + # where systems are running both 32-bit and 64-bit code + # on the same machine, which reflects into the places where + # standard library could be found. More details can be found + # here http://bugs.python.org/issue1294959. + # An easy reproducing case would be + # https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753 + STD_LIB_DIRS.add(_posix_path('lib64')) -BUILTIN_MODULES = dict(zip(sys.builtin_module_names, - [1]*len(sys.builtin_module_names))) +EXT_LIB_DIR = get_python_lib() +IS_JYTHON = platform.python_implementation() == 'Jython' +BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True) class NoSourceFile(Exception): @@ -97,6 +120,32 @@ def _normalize_path(path): return os.path.normcase(os.path.abspath(path)) +def _path_from_filename(filename, is_jython=IS_JYTHON): + if not is_jython: + if sys.version_info > (3, 0): + return filename + else: + if filename.endswith(".pyc"): + return filename[:-1] + return filename + head, has_pyclass, _ = filename.partition("$py.class") + if has_pyclass: + return head + ".py" + return filename + + +def _handle_blacklist(blacklist, dirnames, filenames): + """remove files/directories in the black list + + dirnames/filenames are usually from os.walk + """ + for norecurs in blacklist: + if norecurs in dirnames: + dirnames.remove(norecurs) + elif norecurs in filenames: + filenames.remove(norecurs) + + _NORM_PATH_CACHE = {} def _cache_normalize_path(path): @@ -112,7 +161,7 @@ def _cache_normalize_path(path): result = _NORM_PATH_CACHE[path] = _normalize_path(path) return result -def load_module_from_name(dotted_name, path=None, use_sys=1): +def load_module_from_name(dotted_name, path=None, use_sys=True): """Load a Python module from its name. :type dotted_name: str @@ -184,14 +233,16 @@ def load_module_from_modpath(parts, path=None, use_sys=1): if prevmodule: setattr(prevmodule, part, module) _file = getattr(module, '__file__', '') + prevmodule = module + if not _file and _is_namespace(curname): + continue if not _file and len(modpath) != len(parts): raise ImportError('no module in %s' % '.'.join(parts[len(modpath):])) path = [os.path.dirname(_file)] - prevmodule = module return module -def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None): +def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None): """Load a Python module from it's path. :type filepath: str @@ -219,9 +270,11 @@ def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None): def _check_init(path, mod_path): """check there are some __init__.py all along the way""" + modpath = [] for part in mod_path: + modpath.append(part) path = os.path.join(path, part) - if not _has_init(path): + if not _is_namespace('.'.join(modpath)) and not _has_init(path): return False return True @@ -246,7 +299,9 @@ def modpath_from_file(filename, extrapath=None): :rtype: list(str) :return: the corresponding splitted module's name """ - base = os.path.splitext(os.path.abspath(filename))[0] + filename = _path_from_filename(filename) + filename = os.path.abspath(filename) + base = os.path.splitext(filename)[0] if extrapath is not None: for path_ in extrapath: path = os.path.abspath(path_) @@ -317,8 +372,8 @@ def file_info_from_modpath(modpath, path=None, context_file=None): def get_module_part(dotted_name, context_file=None): """given a dotted name return the module part of the name : - >>> get_module_part('logilab.common.modutils.get_module_part') - 'logilab.common.modutils' + >>> get_module_part('astroid.as_string.dump') + 'astroid.as_string' :type dotted_name: str :param dotted_name: full name of the identifier we are interested in @@ -382,9 +437,8 @@ def get_module_files(src_directory, blacklist): path of the directory corresponding to the package :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to the value of - `logilab.common.STD_BLACKLIST` + :param blacklist: iterable + list of files or directories to ignore. :rtype: list :return: @@ -419,7 +473,8 @@ def get_source_file(filename, include_no_ext=False): :rtype: str :return: the absolute path of the source file if it exists """ - base, orig_ext = os.path.splitext(os.path.abspath(filename)) + filename = os.path.abspath(_path_from_filename(filename)) + base, orig_ext = os.path.splitext(filename) for ext in PY_SOURCE_EXTS: source_path = '%s.%s' % (base, ext) if os.path.exists(source_path): @@ -464,7 +519,8 @@ def is_standard_module(modname, std_path=None): # modules which are not living in a file are considered standard # (sys and __builtin__ for instance) if filename is None: - return True + # we assume there are no namespaces in stdlib + return not _is_namespace(modname) filename = _normalize_path(filename) if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)): return False @@ -538,15 +594,27 @@ def _file_from_modpath(modpath, path=None, context=None): return mp_filename, mtype def _search_zip(modpath, pic): - for filepath, importer in pic.items(): + for filepath, importer in list(pic.items()): if importer is not None: if importer.find_module(modpath[0]): if not importer.find_module(os.path.sep.join(modpath)): raise ImportError('No module named %s in %s/%s' % ( '.'.join(modpath[1:]), filepath, modpath)) - return PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath + return (PY_ZIPMODULE, + os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), + filepath) raise ImportError('No module named %s' % '.'.join(modpath)) +try: + import pkg_resources +except ImportError: + pkg_resources = None + + +def _is_namespace(modname): + return (pkg_resources is not None + and modname in pkg_resources._namespace_packages) + def _module_file(modpath, path=None): """get a module type / file path @@ -579,14 +647,13 @@ def _module_file(modpath, path=None): except AttributeError: checkeggs = False # pkg_resources support (aka setuptools namespace packages) - if (pkg_resources is not None - and modpath[0] in pkg_resources._namespace_packages - and modpath[0] in sys.modules - and len(modpath) > 1): + if _is_namespace(modpath[0]) and modpath[0] in sys.modules: # setuptools has added into sys.modules a module object with proper # __path__, get back information from there module = sys.modules[modpath.pop(0)] - path = module.__path__ + path = list(module.__path__) + if not modpath: + return imp.C_BUILTIN, None imported = [] while modpath: modname = modpath[0] @@ -609,7 +676,7 @@ def _module_file(modpath, path=None): # Don't forget to close the stream to avoid # spurious ResourceWarnings. if stream: - stream.close() + stream.close() if checkeggs and mp_filename: fullabspath = [_cache_normalize_path(x) for x in _path] @@ -639,7 +706,11 @@ def _module_file(modpath, path=None): except IOError: path = [mp_filename] else: - if b'pkgutil' in data and b'extend_path' in data: + extend_path = b'pkgutil' in data and b'extend_path' in data + declare_namespace = ( + b"pkg_resources" in data + and b"declare_namespace(__name__)" in data) + if extend_path or declare_namespace: # extend_path is called, search sys.path for module/packages # of this name see pkgutil.extend_path documentation path = [os.path.join(p, *imported) for p in sys.path diff --git a/pymode/libs/astroid/node_classes.py b/pymode/libs/astroid/node_classes.py index 4b413ef8..ca773c3a 100644 --- a/pymode/libs/astroid/node_classes.py +++ b/pymode/libs/astroid/node_classes.py @@ -18,40 +18,47 @@ """Module for some node classes. More nodes in scoped_nodes.py """ -import sys +import abc +import warnings +import lazy_object_proxy import six -from logilab.common.decorators import cachedproperty -from astroid.exceptions import NoDefault -from astroid.bases import (NodeNG, Statement, Instance, InferenceContext, - _infer_stmts, YES, BUILTINS) -from astroid.mixins import (BlockRangeMixIn, AssignTypeMixin, - ParentAssignTypeMixin, FromImportMixIn) +from astroid import bases +from astroid import context as contextmod +from astroid import decorators +from astroid import exceptions +from astroid import mixins +from astroid import util -PY3K = sys.version_info >= (3, 0) +BUILTINS = six.moves.builtins.__name__ + +@bases.raise_if_nothing_inferred def unpack_infer(stmt, context=None): """recursively generate nodes inferred by the given statement. If the inferred value is a list or a tuple, recurse on the elements """ if isinstance(stmt, (List, Tuple)): for elt in stmt.elts: - for infered_elt in unpack_infer(elt, context): - yield infered_elt + if elt is util.YES: + yield elt + continue + for inferred_elt in unpack_infer(elt, context): + yield inferred_elt return - # if infered is a final node, return it and stop - infered = next(stmt.infer(context)) - if infered is stmt: - yield infered + # if inferred is a final node, return it and stop + inferred = next(stmt.infer(context)) + if inferred is stmt: + yield inferred return # else, infer recursivly, except YES object that should be returned as is - for infered in stmt.infer(context): - if infered is YES: - yield infered + for inferred in stmt.infer(context): + if inferred is util.YES: + yield inferred else: - for inf_inf in unpack_infer(infered, context): + for inf_inf in unpack_infer(inferred, context): yield inf_inf @@ -93,7 +100,9 @@ def are_exclusive(stmt1, stmt2, exceptions=None): c2attr, c2node = node.locate_child(previous) c1attr, c1node = node.locate_child(children[node]) if c1node is not c2node: - if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or + if ((c2attr == 'body' + and c1attr == 'handlers' + and children[node].catch(exceptions)) or (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or (c2attr == 'handlers' and c1attr == 'orelse') or (c2attr == 'orelse' and c1attr == 'handlers')): @@ -106,6 +115,31 @@ def are_exclusive(stmt1, stmt2, exceptions=None): return False +@six.add_metaclass(abc.ABCMeta) +class _BaseContainer(mixins.ParentAssignTypeMixin, + bases.NodeNG, + bases.Instance): + """Base class for Set, FrozenSet, Tuple and List.""" + + _astroid_fields = ('elts',) + + def __init__(self, elts=None): + if elts is None: + self.elts = [] + else: + self.elts = [const_factory(e) for e in elts] + + def itered(self): + return self.elts + + def bool_value(self): + return bool(self.elts) + + @abc.abstractmethod + def pytype(self): + pass + + class LookupMixIn(object): """Mixin looking up a name in the right scope """ @@ -124,14 +158,14 @@ def lookup(self, name): return self.scope().scope_lookup(self, name) def ilookup(self, name): - """infered lookup + """inferred lookup - return an iterator on infered values of the statements returned by + return an iterator on inferred values of the statements returned by the lookup method """ frame, stmts = self.lookup(name) - context = InferenceContext() - return _infer_stmts(stmts, context, frame) + context = contextmod.InferenceContext() + return bases._infer_stmts(stmts, context, frame) def _filter_stmts(self, stmts, frame, offset): """filter statements to remove ignorable statements. @@ -163,8 +197,7 @@ def _filter_stmts(self, stmts, frame, offset): if self.statement() is myframe and myframe.parent: myframe = myframe.parent.frame() - if not myframe is frame or self is frame: - return stmts + mystmt = self.statement() # line filtering if we are in the same frame # @@ -183,19 +216,18 @@ def _filter_stmts(self, stmts, frame, offset): # line filtering is on and we have reached our location, break if mylineno > 0 and stmt.fromlineno > mylineno: break - assert hasattr(node, 'ass_type'), (node, node.scope(), - node.scope().locals) - ass_type = node.ass_type() - + assert hasattr(node, 'assign_type'), (node, node.scope(), + node.scope().locals) + assign_type = node.assign_type() if node.has_base(self): break - _stmts, done = ass_type._get_filtered_stmts(self, node, _stmts, mystmt) + _stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt) if done: break - optional_assign = ass_type.optional_assign - if optional_assign and ass_type.parent_of(self): + optional_assign = assign_type.optional_assign + if optional_assign and assign_type.parent_of(self): # we are inside a loop, loop var assigment is hidding previous # assigment _stmts = [node] @@ -210,7 +242,7 @@ def _filter_stmts(self, stmts, frame, offset): else: # we got a parent index, this means the currently visited node # is at the same block level as a previously visited node - if _stmts[pindex].ass_type().parent_of(ass_type): + if _stmts[pindex].assign_type().parent_of(assign_type): # both statements are not at the same block level continue # if currently visited node is following previously considered @@ -239,7 +271,7 @@ def _filter_stmts(self, stmts, frame, offset): if not (optional_assign or are_exclusive(_stmts[pindex], node)): del _stmt_parents[pindex] del _stmts[pindex] - if isinstance(node, AssName): + if isinstance(node, AssignName): if not optional_assign and stmt.parent is mystmt.parent: _stmts = [] _stmt_parents = [] @@ -252,27 +284,24 @@ def _filter_stmts(self, stmts, frame, offset): _stmt_parents.append(stmt.parent) return _stmts + # Name classes -class AssName(LookupMixIn, ParentAssignTypeMixin, NodeNG): +class AssignName(LookupMixIn, mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing an AssName node""" -class DelName(LookupMixIn, ParentAssignTypeMixin, NodeNG): +class DelName(LookupMixIn, mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing a DelName node""" -class Name(LookupMixIn, NodeNG): +class Name(LookupMixIn, bases.NodeNG): """class representing a Name node""" - - -##################### node classes ######################################## - -class Arguments(NodeNG, AssignTypeMixin): +class Arguments(mixins.AssignTypeMixin, bases.NodeNG): """class representing an Arguments node""" - if PY3K: + if six.PY3: # Python 3.4+ uses a different approach regarding annotations, # each argument is a new class, _ast.arg, which exposes an # 'annotation' attribute. In astroid though, arguments are exposed @@ -306,7 +335,7 @@ def _infer_name(self, frame, name): return name return None - @cachedproperty + @decorators.cachedproperty def fromlineno(self): lineno = super(Arguments, self).fromlineno return max(lineno, self.parent.fromlineno or 0) @@ -315,15 +344,18 @@ def format_args(self): """return arguments formatted as string""" result = [] if self.args: - result.append(_format_args(self.args, self.defaults)) + result.append( + _format_args(self.args, self.defaults, + getattr(self, 'annotations', None)) + ) if self.vararg: result.append('*%s' % self.vararg) - if self.kwarg: - result.append('**%s' % self.kwarg) if self.kwonlyargs: if not self.vararg: result.append('*') result.append(_format_args(self.kwonlyargs, self.kw_defaults)) + if self.kwarg: + result.append('**%s' % self.kwarg) return ', '.join(result) def default_value(self, argname): @@ -339,7 +371,7 @@ def default_value(self, argname): i = _find_arg(argname, self.kwonlyargs)[0] if i is not None and self.kw_defaults[i] is not None: return self.kw_defaults[i] - raise NoDefault() + raise exceptions.NoDefault() def is_argument(self, name): """return True if the name is defined in arguments""" @@ -374,79 +406,91 @@ def _find_arg(argname, args, rec=False): return None, None -def _format_args(args, defaults=None): +def _format_args(args, defaults=None, annotations=None): values = [] if args is None: return '' + if annotations is None: + annotations = [] if defaults is not None: default_offset = len(args) - len(defaults) - for i, arg in enumerate(args): + packed = six.moves.zip_longest(args, annotations) + for i, (arg, annotation) in enumerate(packed): if isinstance(arg, Tuple): values.append('(%s)' % _format_args(arg.elts)) else: - values.append(arg.name) + argname = arg.name + if annotation is not None: + argname += ':' + annotation.as_string() + values.append(argname) + if defaults is not None and i >= default_offset: if defaults[i-default_offset] is not None: values[-1] += '=' + defaults[i-default_offset].as_string() return ', '.join(values) -class AssAttr(NodeNG, ParentAssignTypeMixin): - """class representing an AssAttr node""" +class AssignAttr(mixins.ParentAssignTypeMixin, bases.NodeNG): + """class representing an AssignAttr node""" _astroid_fields = ('expr',) expr = None -class Assert(Statement): +class Assert(bases.Statement): """class representing an Assert node""" _astroid_fields = ('test', 'fail',) test = None fail = None -class Assign(Statement, AssignTypeMixin): +class Assign(bases.Statement, mixins.AssignTypeMixin): """class representing an Assign node""" _astroid_fields = ('targets', 'value',) targets = None value = None -class AugAssign(Statement, AssignTypeMixin): +class AugAssign(bases.Statement, mixins.AssignTypeMixin): """class representing an AugAssign node""" _astroid_fields = ('target', 'value',) target = None value = None -class Backquote(NodeNG): +class Repr(bases.NodeNG): """class representing a Backquote node""" _astroid_fields = ('value',) value = None -class BinOp(NodeNG): +class BinOp(bases.NodeNG): """class representing a BinOp node""" _astroid_fields = ('left', 'right',) left = None right = None -class BoolOp(NodeNG): +class BoolOp(bases.NodeNG): """class representing a BoolOp node""" _astroid_fields = ('values',) values = None -class Break(Statement): +class Break(bases.Statement): """class representing a Break node""" -class CallFunc(NodeNG): - """class representing a CallFunc node""" - _astroid_fields = ('func', 'args', 'starargs', 'kwargs') +class Call(bases.NodeNG): + """class representing a Call node""" + _astroid_fields = ('func', 'args', 'keywords') func = None args = None - starargs = None - kwargs = None + keywords = None + + @property + def starargs(self): + args = self.args or [] + return [arg for arg in args if isinstance(arg, Starred)] - def __init__(self): - self.starargs = None - self.kwargs = None + @property + def kwargs(self): + keywords = self.keywords or [] + return [keyword for keyword in keywords if keyword.arg is None] -class Compare(NodeNG): +class Compare(bases.NodeNG): """class representing a Compare node""" _astroid_fields = ('left', 'ops',) left = None @@ -464,7 +508,8 @@ def last_child(self): return self.ops[-1][1] #return self.left -class Comprehension(NodeNG): + +class Comprehension(bases.NodeNG): """class representing a Comprehension node""" _astroid_fields = ('target', 'iter', 'ifs') target = None @@ -472,9 +517,16 @@ class Comprehension(NodeNG): ifs = None optional_assign = True - def ass_type(self): + def assign_type(self): return self + def ass_type(self): + warnings.warn('%s.ass_type() is deprecated and slated for removal' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() + def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt): """method used in filter_stmts""" if self is mystmt: @@ -490,7 +542,7 @@ def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt): return stmts, False -class Const(NodeNG, Instance): +class Const(bases.NodeNG, bases.Instance): """represent a constant node like num, str, bool, None, bytes""" def __init__(self, value=None): @@ -499,6 +551,11 @@ def __init__(self, value=None): def getitem(self, index, context=None): if isinstance(self.value, six.string_types): return Const(self.value[index]) + if isinstance(self.value, bytes) and six.PY3: + # Bytes aren't instances of six.string_types + # on Python 3. Also, indexing them should return + # integers. + return Const(self.value[index]) raise TypeError('%r (value=%s)' % (self, self.value)) def has_dynamic_getattr(self): @@ -513,11 +570,11 @@ def pytype(self): return self._proxied.qname() -class Continue(Statement): +class Continue(bases.Statement): """class representing a Continue node""" -class Decorators(NodeNG): +class Decorators(bases.NodeNG): """class representing a Decorators node""" _astroid_fields = ('nodes',) nodes = None @@ -529,19 +586,21 @@ def scope(self): # skip the function node to go directly to the upper level scope return self.parent.parent.scope() -class DelAttr(NodeNG, ParentAssignTypeMixin): + +class DelAttr(mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing a DelAttr node""" _astroid_fields = ('expr',) expr = None -class Delete(Statement, AssignTypeMixin): + +class Delete(mixins.AssignTypeMixin, bases.Statement): """class representing a Delete node""" _astroid_fields = ('targets',) targets = None -class Dict(NodeNG, Instance): +class Dict(bases.NodeNG, bases.Instance): """class representing a Dict node""" _astroid_fields = ('items',) @@ -550,7 +609,7 @@ def __init__(self, items=None): self.items = [] else: self.items = [(const_factory(k), const_factory(v)) - for k, v in items.items()] + for k, v in list(items.items())] def pytype(self): return '%s.dict' % BUILTINS @@ -573,39 +632,45 @@ def itered(self): def getitem(self, lookup_key, context=None): for key, value in self.items: - for inferedkey in key.infer(context): - if inferedkey is YES: + # TODO(cpopa): no support for overriding yet, {1:2, **{1: 3}}. + if isinstance(key, DictUnpack): + try: + return value.getitem(lookup_key, context) + except IndexError: continue - if isinstance(inferedkey, Const) \ - and inferedkey.value == lookup_key: + for inferredkey in key.infer(context): + if inferredkey is util.YES: + continue + if isinstance(inferredkey, Const) \ + and inferredkey.value == lookup_key: return value # This should raise KeyError, but all call sites only catch # IndexError. Let's leave it like that for now. raise IndexError(lookup_key) -class Discard(Statement): - """class representing a Discard node""" +class Expr(bases.Statement): + """class representing a Expr node""" _astroid_fields = ('value',) value = None -class Ellipsis(NodeNG): +class Ellipsis(bases.NodeNG): # pylint: disable=redefined-builtin """class representing an Ellipsis node""" -class EmptyNode(NodeNG): +class EmptyNode(bases.NodeNG): """class representing an EmptyNode node""" -class ExceptHandler(Statement, AssignTypeMixin): +class ExceptHandler(mixins.AssignTypeMixin, bases.Statement): """class representing an ExceptHandler node""" _astroid_fields = ('type', 'name', 'body',) type = None name = None body = None - @cachedproperty + @decorators.cachedproperty def blockstart_tolineno(self): if self.name: return self.name.tolineno @@ -622,7 +687,7 @@ def catch(self, exceptions): return True -class Exec(Statement): +class Exec(bases.Statement): """class representing an Exec node""" _astroid_fields = ('expr', 'globals', 'locals',) expr = None @@ -630,12 +695,12 @@ class Exec(Statement): locals = None -class ExtSlice(NodeNG): +class ExtSlice(bases.NodeNG): """class representing an ExtSlice node""" _astroid_fields = ('dims',) dims = None -class For(BlockRangeMixIn, AssignTypeMixin, Statement): +class For(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, bases.Statement): """class representing a For node""" _astroid_fields = ('target', 'iter', 'body', 'orelse',) target = None @@ -644,12 +709,26 @@ class For(BlockRangeMixIn, AssignTypeMixin, Statement): orelse = None optional_assign = True - @cachedproperty + @decorators.cachedproperty def blockstart_tolineno(self): return self.iter.tolineno -class From(FromImportMixIn, Statement): +class AsyncFor(For): + """Asynchronous For built with `async` keyword.""" + + +class Await(bases.NodeNG): + """Await node for the `await` keyword.""" + + _astroid_fields = ('value', ) + value = None + + def postinit(self, value=None): + self.value = value + + +class ImportFrom(mixins.ImportFromMixin, bases.Statement): """class representing a From node""" def __init__(self, fromname, names, level=0): @@ -657,13 +736,13 @@ def __init__(self, fromname, names, level=0): self.names = names self.level = level -class Getattr(NodeNG): - """class representing a Getattr node""" +class Attribute(bases.NodeNG): + """class representing a Attribute node""" _astroid_fields = ('expr',) expr = None -class Global(Statement): +class Global(bases.Statement): """class representing a Global node""" def __init__(self, names): @@ -673,14 +752,14 @@ def _infer_name(self, frame, name): return name -class If(BlockRangeMixIn, Statement): +class If(mixins.BlockRangeMixIn, bases.Statement): """class representing an If node""" _astroid_fields = ('test', 'body', 'orelse') test = None body = None orelse = None - @cachedproperty + @decorators.cachedproperty def blockstart_tolineno(self): return self.test.tolineno @@ -694,7 +773,7 @@ def block_range(self, lineno): self.body[0].fromlineno - 1) -class IfExp(NodeNG): +class IfExp(bases.NodeNG): """class representing an IfExp node""" _astroid_fields = ('test', 'body', 'orelse') test = None @@ -702,31 +781,24 @@ class IfExp(NodeNG): orelse = None -class Import(FromImportMixIn, Statement): +class Import(mixins.ImportFromMixin, bases.Statement): """class representing an Import node""" -class Index(NodeNG): +class Index(bases.NodeNG): """class representing an Index node""" _astroid_fields = ('value',) value = None -class Keyword(NodeNG): +class Keyword(bases.NodeNG): """class representing a Keyword node""" _astroid_fields = ('value',) value = None -class List(NodeNG, Instance, ParentAssignTypeMixin): +class List(_BaseContainer): """class representing a List node""" - _astroid_fields = ('elts',) - - def __init__(self, elts=None): - if elts is None: - self.elts = [] - else: - self.elts = [const_factory(e) for e in elts] def pytype(self): return '%s.list' % BUILTINS @@ -734,11 +806,8 @@ def pytype(self): def getitem(self, index, context=None): return self.elts[index] - def itered(self): - return self.elts - -class Nonlocal(Statement): +class Nonlocal(bases.Statement): """class representing a Nonlocal node""" def __init__(self, names): @@ -748,21 +817,21 @@ def _infer_name(self, frame, name): return name -class Pass(Statement): +class Pass(bases.Statement): """class representing a Pass node""" -class Print(Statement): +class Print(bases.Statement): """class representing a Print node""" _astroid_fields = ('dest', 'values',) dest = None values = None -class Raise(Statement): +class Raise(bases.Statement): """class representing a Raise node""" exc = None - if sys.version_info < (3, 0): + if six.PY2: _astroid_fields = ('exc', 'inst', 'tback') inst = None tback = None @@ -779,50 +848,40 @@ def raises_not_implemented(self): return True -class Return(Statement): +class Return(bases.Statement): """class representing a Return node""" _astroid_fields = ('value',) value = None -class Set(NodeNG, Instance, ParentAssignTypeMixin): +class Set(_BaseContainer): """class representing a Set node""" - _astroid_fields = ('elts',) - - def __init__(self, elts=None): - if elts is None: - self.elts = [] - else: - self.elts = [const_factory(e) for e in elts] def pytype(self): return '%s.set' % BUILTINS - def itered(self): - return self.elts - -class Slice(NodeNG): +class Slice(bases.NodeNG): """class representing a Slice node""" _astroid_fields = ('lower', 'upper', 'step') lower = None upper = None step = None -class Starred(NodeNG, ParentAssignTypeMixin): +class Starred(mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing a Starred node""" _astroid_fields = ('value',) value = None -class Subscript(NodeNG): +class Subscript(bases.NodeNG): """class representing a Subscript node""" _astroid_fields = ('value', 'slice') value = None slice = None -class TryExcept(BlockRangeMixIn, Statement): +class TryExcept(mixins.BlockRangeMixIn, bases.Statement): """class representing a TryExcept node""" _astroid_fields = ('body', 'handlers', 'orelse',) body = None @@ -845,7 +904,7 @@ def block_range(self, lineno): return self._elsed_block_range(lineno, self.orelse, last) -class TryFinally(BlockRangeMixIn, Statement): +class TryFinally(mixins.BlockRangeMixIn, bases.Statement): """class representing a TryFinally node""" _astroid_fields = ('body', 'finalbody',) body = None @@ -861,15 +920,8 @@ def block_range(self, lineno): return self._elsed_block_range(lineno, self.finalbody) -class Tuple(NodeNG, Instance, ParentAssignTypeMixin): +class Tuple(_BaseContainer): """class representing a Tuple node""" - _astroid_fields = ('elts',) - - def __init__(self, elts=None): - if elts is None: - self.elts = [] - else: - self.elts = [const_factory(e) for e in elts] def pytype(self): return '%s.tuple' % BUILTINS @@ -877,24 +929,21 @@ def pytype(self): def getitem(self, index, context=None): return self.elts[index] - def itered(self): - return self.elts - -class UnaryOp(NodeNG): +class UnaryOp(bases.NodeNG): """class representing an UnaryOp node""" _astroid_fields = ('operand',) operand = None -class While(BlockRangeMixIn, Statement): +class While(mixins.BlockRangeMixIn, bases.Statement): """class representing a While node""" _astroid_fields = ('test', 'body', 'orelse',) test = None body = None orelse = None - @cachedproperty + @decorators.cachedproperty def blockstart_tolineno(self): return self.test.tolineno @@ -903,13 +952,13 @@ def block_range(self, lineno): return self. _elsed_block_range(lineno, self.orelse) -class With(BlockRangeMixIn, AssignTypeMixin, Statement): +class With(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, bases.Statement): """class representing a With node""" _astroid_fields = ('items', 'body') items = None body = None - @cachedproperty + @decorators.cachedproperty def blockstart_tolineno(self): return self.items[-1][0].tolineno @@ -921,7 +970,12 @@ def get_children(self): for elt in self.body: yield elt -class Yield(NodeNG): + +class AsyncWith(With): + """Asynchronous `with` built with the `async` keyword.""" + + +class Yield(bases.NodeNG): """class representing a Yield node""" _astroid_fields = ('value',) value = None @@ -929,6 +983,11 @@ class Yield(NodeNG): class YieldFrom(Yield): """ Class representing a YieldFrom node. """ + +class DictUnpack(bases.NodeNG): + """Represents the unpacking of dicts into dicts using PEP 448.""" + + # constants ############################################################## CONST_CLS = { @@ -937,19 +996,20 @@ class YieldFrom(Yield): dict: Dict, set: Set, type(None): Const, + type(NotImplemented): Const, } def _update_const_classes(): """update constant classes, so the keys of CONST_CLS can be reused""" klasses = (bool, int, float, complex, str) - if sys.version_info < (3, 0): + if six.PY2: klasses += (unicode, long) - if sys.version_info >= (2, 6): - klasses += (bytes,) + klasses += (bytes,) for kls in klasses: CONST_CLS[kls] = Const _update_const_classes() + def const_factory(value): """return an astroid node for a python value""" # XXX we should probably be stricter here and only consider stuff in @@ -957,10 +1017,37 @@ def const_factory(value): # we should rather recall the builder on this value than returning an empty # node (another option being that const_factory shouldn't be called with something # not in CONST_CLS) - assert not isinstance(value, NodeNG) + assert not isinstance(value, bases.NodeNG) try: return CONST_CLS[value.__class__](value) except (KeyError, AttributeError): node = EmptyNode() node.object = value return node + + +# Backward-compatibility aliases +def instancecheck(cls, other): + wrapped = cls.__wrapped__ + other_cls = other.__class__ + is_instance_of = wrapped is other_cls or issubclass(other_cls, wrapped) + warnings.warn("%r is deprecated and slated for removal in astroid " + "2.0, use %r instead" % (cls.__class__.__name__, + wrapped.__name__), + PendingDeprecationWarning, stacklevel=2) + return is_instance_of + + +def proxy_alias(alias_name, node_type): + proxy = type(alias_name, (lazy_object_proxy.Proxy,), + {'__class__': object.__dict__['__class__'], + '__instancecheck__': instancecheck}) + return proxy(lambda: node_type) + +Backquote = proxy_alias('Backquote', Repr) +Discard = proxy_alias('Discard', Expr) +AssName = proxy_alias('AssName', AssignName) +AssAttr = proxy_alias('AssAttr', AssignAttr) +Getattr = proxy_alias('Getattr', Attribute) +CallFunc = proxy_alias('CallFunc', Call) +From = proxy_alias('From', ImportFrom) diff --git a/pymode/libs/astroid/nodes.py b/pymode/libs/astroid/nodes.py index 67c2f8e8..2fd6cb65 100644 --- a/pymode/libs/astroid/nodes.py +++ b/pymode/libs/astroid/nodes.py @@ -24,40 +24,54 @@ .next_sibling(), returning next sibling statement node .statement(), returning the first parent node marked as statement node .frame(), returning the first node defining a new local scope (i.e. - Module, Function or Class) + Module, FunctionDef or ClassDef) .set_local(name, node), define an identifier on the first parent frame, with the node defining it. This is used by the astroid builder and should not be used from out there. -on From and Import : +on ImportFrom and Import : .real_name(name), """ -# pylint: disable=unused-import +# pylint: disable=unused-import,redefined-builtin + +from astroid.node_classes import ( + Arguments, AssignAttr, Assert, Assign, + AssignName, AugAssign, Repr, BinOp, BoolOp, Break, Call, Compare, + Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, + Dict, Expr, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, + ImportFrom, Attribute, Global, If, IfExp, Import, Index, Keyword, + List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, + TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, + const_factory, + AsyncFor, Await, AsyncWith, + # Backwards-compatibility aliases + Backquote, Discard, AssName, AssAttr, Getattr, CallFunc, From, + # Node not present in the builtin ast module. + DictUnpack, +) +from astroid.scoped_nodes import ( + Module, GeneratorExp, Lambda, DictComp, + ListComp, SetComp, FunctionDef, ClassDef, + AsyncFunctionDef, + # Backwards-compatibility aliases + Class, Function, GenExpr, +) -__docformat__ = "restructuredtext en" -from astroid.node_classes import Arguments, AssAttr, Assert, Assign, \ - AssName, AugAssign, Backquote, BinOp, BoolOp, Break, CallFunc, Compare, \ - Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, \ - Dict, Discard, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, \ - From, Getattr, Global, If, IfExp, Import, Index, Keyword, \ - List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, \ - TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, \ - const_factory -from astroid.scoped_nodes import Module, GenExpr, Lambda, DictComp, \ - ListComp, SetComp, Function, Class ALL_NODE_CLASSES = ( - Arguments, AssAttr, Assert, Assign, AssName, AugAssign, - Backquote, BinOp, BoolOp, Break, - CallFunc, Class, Compare, Comprehension, Const, Continue, + AsyncFunctionDef, AsyncFor, AsyncWith, Await, + + Arguments, AssignAttr, Assert, Assign, AssignName, AugAssign, + Repr, BinOp, BoolOp, Break, + Call, ClassDef, Compare, Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, - Dict, DictComp, Discard, + Dict, DictComp, DictUnpack, Expr, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, - For, From, Function, - Getattr, GenExpr, Global, + For, ImportFrom, FunctionDef, + Attribute, GeneratorExp, Global, If, IfExp, Import, Index, Keyword, Lambda, List, ListComp, @@ -69,6 +83,5 @@ TryExcept, TryFinally, Tuple, UnaryOp, While, With, - Yield, YieldFrom + Yield, YieldFrom, ) - diff --git a/pymode/libs/astroid/objects.py b/pymode/libs/astroid/objects.py new file mode 100644 index 00000000..d2f4270b --- /dev/null +++ b/pymode/libs/astroid/objects.py @@ -0,0 +1,186 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +""" +Inference objects are a way to represent composite AST nodes, +which are used only as inference results, so they can't be found in the +code tree. For instance, inferring the following frozenset use, leads to an +inferred FrozenSet: + + CallFunc(func=Name('frozenset'), args=Tuple(...)) + +""" + +import six + +from astroid import MANAGER +from astroid.bases import ( + BUILTINS, NodeNG, Instance, _infer_stmts, + BoundMethod, _is_property +) +from astroid.decorators import cachedproperty +from astroid.exceptions import ( + SuperError, SuperArgumentTypeError, + NotFoundError, MroError +) +from astroid.node_classes import const_factory +from astroid.scoped_nodes import ClassDef, FunctionDef +from astroid.mixins import ParentAssignTypeMixin + + +class FrozenSet(NodeNG, Instance, ParentAssignTypeMixin): + """class representing a FrozenSet composite node""" + + def __init__(self, elts=None): + if elts is None: + self.elts = [] + else: + self.elts = [const_factory(e) for e in elts] + + def pytype(self): + return '%s.frozenset' % BUILTINS + + def itered(self): + return self.elts + + def _infer(self, context=None): + yield self + + @cachedproperty + def _proxied(self): + builtins = MANAGER.astroid_cache[BUILTINS] + return builtins.getattr('frozenset')[0] + + +class Super(NodeNG): + """Proxy class over a super call. + + This class offers almost the same behaviour as Python's super, + which is MRO lookups for retrieving attributes from the parents. + + The *mro_pointer* is the place in the MRO from where we should + start looking, not counting it. *mro_type* is the object which + provides the MRO, it can be both a type or an instance. + *self_class* is the class where the super call is, while + *scope* is the function where the super call is. + """ + + def __init__(self, mro_pointer, mro_type, self_class, scope): + self.type = mro_type + self.mro_pointer = mro_pointer + self._class_based = False + self._self_class = self_class + self._scope = scope + self._model = { + '__thisclass__': self.mro_pointer, + '__self_class__': self._self_class, + '__self__': self.type, + '__class__': self._proxied, + } + + def _infer(self, context=None): + yield self + + def super_mro(self): + """Get the MRO which will be used to lookup attributes in this super.""" + if not isinstance(self.mro_pointer, ClassDef): + raise SuperArgumentTypeError("The first super argument must be type.") + + if isinstance(self.type, ClassDef): + # `super(type, type)`, most likely in a class method. + self._class_based = True + mro_type = self.type + else: + mro_type = getattr(self.type, '_proxied', None) + if not isinstance(mro_type, (Instance, ClassDef)): + raise SuperArgumentTypeError("super(type, obj): obj must be an " + "instance or subtype of type") + + if not mro_type.newstyle: + raise SuperError("Unable to call super on old-style classes.") + + mro = mro_type.mro() + if self.mro_pointer not in mro: + raise SuperArgumentTypeError("super(type, obj): obj must be an " + "instance or subtype of type") + + index = mro.index(self.mro_pointer) + return mro[index + 1:] + + @cachedproperty + def _proxied(self): + builtins = MANAGER.astroid_cache[BUILTINS] + return builtins.getattr('super')[0] + + def pytype(self): + return '%s.super' % BUILTINS + + def display_type(self): + return 'Super of' + + @property + def name(self): + """Get the name of the MRO pointer.""" + return self.mro_pointer.name + + def igetattr(self, name, context=None): + """Retrieve the inferred values of the given attribute name.""" + + local_name = self._model.get(name) + if local_name: + yield local_name + return + + try: + mro = self.super_mro() + except (MroError, SuperError) as exc: + # Don't let invalid MROs or invalid super calls + # to leak out as is from this function. + six.raise_from(NotFoundError, exc) + + found = False + for cls in mro: + if name not in cls._locals: + continue + + found = True + for infered in _infer_stmts([cls[name]], context, frame=self): + if not isinstance(infered, FunctionDef): + yield infered + continue + + # We can obtain different descriptors from a super depending + # on what we are accessing and where the super call is. + if infered.type == 'classmethod': + yield BoundMethod(infered, cls) + elif self._scope.type == 'classmethod' and infered.type == 'method': + yield infered + elif self._class_based or infered.type == 'staticmethod': + yield infered + elif _is_property(infered): + # TODO: support other descriptors as well. + for value in infered.infer_call_result(self, context): + yield value + else: + yield BoundMethod(infered, cls) + + if not found: + raise NotFoundError(name) + + def getattr(self, name, context=None): + return list(self.igetattr(name, context=context)) diff --git a/pymode/libs/astroid/protocols.py b/pymode/libs/astroid/protocols.py index 4c11f9cf..87a6d4d2 100644 --- a/pymode/libs/astroid/protocols.py +++ b/pymode/libs/astroid/protocols.py @@ -19,28 +19,31 @@ where it makes sense. """ -__doctype__ = "restructuredtext en" import collections - -from astroid.exceptions import InferenceError, NoDefault, NotFoundError -from astroid.node_classes import unpack_infer -from astroid.bases import InferenceContext, copy_context, \ - raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES -from astroid.nodes import const_factory +import operator +import sys + +from astroid import arguments +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import node_classes from astroid import nodes +from astroid import util BIN_OP_METHOD = {'+': '__add__', '-': '__sub__', '/': '__div__', '//': '__floordiv__', '*': '__mul__', - '**': '__power__', + '**': '__pow__', '%': '__mod__', '&': '__and__', '|': '__or__', '^': '__xor__', '<<': '__lshift__', '>>': '__rshift__', + '@': '__matmul__' } UNARY_OP_METHOD = {'+': '__pos__', @@ -53,7 +56,7 @@ def tl_infer_unary_op(self, operator): if operator == 'not': - return const_factory(not bool(self.elts)) + return node_classes.const_factory(not bool(self.elts)) raise TypeError() # XXX log unsupported operation nodes.Tuple.infer_unary_op = tl_infer_unary_op nodes.List.infer_unary_op = tl_infer_unary_op @@ -61,19 +64,19 @@ def tl_infer_unary_op(self, operator): def dict_infer_unary_op(self, operator): if operator == 'not': - return const_factory(not bool(self.items)) + return node_classes.const_factory(not bool(self.items)) raise TypeError() # XXX log unsupported operation nodes.Dict.infer_unary_op = dict_infer_unary_op def const_infer_unary_op(self, operator): if operator == 'not': - return const_factory(not self.value) + return node_classes.const_factory(not self.value) # XXX log potentially raised TypeError elif operator == '+': - return const_factory(+self.value) + return node_classes.const_factory(+self.value) else: # operator == '-': - return const_factory(-self.value) + return node_classes.const_factory(-self.value) nodes.Const.infer_unary_op = const_infer_unary_op @@ -92,17 +95,23 @@ def const_infer_unary_op(self, operator): '<<': lambda a, b: a << b, '>>': lambda a, b: a >> b, } + +if sys.version_info >= (3, 5): + # MatMult is available since Python 3.5+. + BIN_OP_IMPL['@'] = operator.matmul + for key, impl in list(BIN_OP_IMPL.items()): BIN_OP_IMPL[key+'='] = impl -def const_infer_binary_op(self, operator, other, context): +def const_infer_binary_op(self, binop, other, context): + operator = binop.op for other in other.infer(context): if isinstance(other, nodes.Const): try: impl = BIN_OP_IMPL[operator] try: - yield const_factory(impl(self.value, other.value)) + yield node_classes.const_factory(impl(self.value, other.value)) except Exception: # ArithmeticError is not enough: float >> float is a TypeError # TODO : let pylint know about the problem @@ -110,68 +119,88 @@ def const_infer_binary_op(self, operator, other, context): except TypeError: # XXX log TypeError continue - elif other is YES: + elif other is util.YES: yield other else: try: - for val in other.infer_binary_op(operator, self, context): + for val in other.infer_binary_op(binop, self, context): yield val except AttributeError: - yield YES -nodes.Const.infer_binary_op = yes_if_nothing_infered(const_infer_binary_op) + yield util.YES +nodes.Const.infer_binary_op = bases.yes_if_nothing_inferred(const_infer_binary_op) + + +def _multiply_seq_by_int(self, binop, other, context): + node = self.__class__() + node.parent = binop + elts = [] + for elt in self.elts: + infered = util.safe_infer(elt, context) + if infered is None: + infered = util.YES + elts.append(infered) + node.elts = elts * other.value + return node -def tl_infer_binary_op(self, operator, other, context): + +def _filter_uninferable_nodes(elts, context): + for elt in elts: + if elt is util.YES: + yield elt + else: + for inferred in elt.infer(context): + yield inferred + + +def tl_infer_binary_op(self, binop, other, context): + operator = binop.op for other in other.infer(context): if isinstance(other, self.__class__) and operator == '+': node = self.__class__() - elts = [n for elt in self.elts for n in elt.infer(context) - if not n is YES] - elts += [n for elt in other.elts for n in elt.infer(context) - if not n is YES] + node.parent = binop + elts = list(_filter_uninferable_nodes(self.elts, context)) + elts += list(_filter_uninferable_nodes(other.elts, context)) node.elts = elts yield node elif isinstance(other, nodes.Const) and operator == '*': if not isinstance(other.value, int): - yield YES + yield util.YES continue - node = self.__class__() - elts = [n for elt in self.elts for n in elt.infer(context) - if not n is YES] * other.value - node.elts = elts - yield node - elif isinstance(other, Instance) and not isinstance(other, nodes.Const): - yield YES + yield _multiply_seq_by_int(self, binop, other, context) + elif isinstance(other, bases.Instance) and not isinstance(other, nodes.Const): + yield util.YES # XXX else log TypeError -nodes.Tuple.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op) -nodes.List.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op) +nodes.Tuple.infer_binary_op = bases.yes_if_nothing_inferred(tl_infer_binary_op) +nodes.List.infer_binary_op = bases.yes_if_nothing_inferred(tl_infer_binary_op) -def dict_infer_binary_op(self, operator, other, context): +def dict_infer_binary_op(self, binop, other, context): for other in other.infer(context): - if isinstance(other, Instance) and isinstance(other._proxied, nodes.Class): - yield YES + if isinstance(other, bases.Instance) and isinstance(other._proxied, nodes.ClassDef): + yield util.YES # XXX else log TypeError -nodes.Dict.infer_binary_op = yes_if_nothing_infered(dict_infer_binary_op) +nodes.Dict.infer_binary_op = bases.yes_if_nothing_inferred(dict_infer_binary_op) -def instance_infer_binary_op(self, operator, other, context): +def instance_infer_binary_op(self, binop, other, context): + operator = binop.op try: methods = self.getattr(BIN_OP_METHOD[operator]) - except (NotFoundError, KeyError): + except (exceptions.NotFoundError, KeyError): # Unknown operator - yield YES + yield util.YES else: for method in methods: - if not isinstance(method, nodes.Function): + if not isinstance(method, nodes.FunctionDef): continue for result in method.infer_call_result(self, context): - if result is not YES: + if result is not util.YES: yield result # We are interested only in the first infered method, # don't go looking in the rest of the methods of the ancestors. break -Instance.infer_binary_op = yes_if_nothing_infered(instance_infer_binary_op) +bases.Instance.infer_binary_op = bases.yes_if_nothing_inferred(instance_infer_binary_op) # assignment ################################################################## @@ -192,7 +221,7 @@ def _resolve_looppart(parts, asspath, context): asspath = asspath[:] index = asspath.pop(0) for part in parts: - if part is YES: + if part is util.YES: continue # XXX handle __iter__ and log potentially detected errors if not hasattr(part, 'itered'): @@ -212,104 +241,125 @@ def _resolve_looppart(parts, asspath, context): # we achieved to resolved the assignment path, # don't infer the last part yield assigned - elif assigned is YES: + elif assigned is util.YES: break else: # we are not yet on the last part of the path # search on each possibly inferred value try: - for infered in _resolve_looppart(assigned.infer(context), + for inferred in _resolve_looppart(assigned.infer(context), asspath, context): - yield infered - except InferenceError: + yield inferred + except exceptions.InferenceError: break -def for_assigned_stmts(self, node, context=None, asspath=None): +@bases.raise_if_nothing_inferred +def for_assigned_stmts(self, node=None, context=None, asspath=None): if asspath is None: for lst in self.iter.infer(context): if isinstance(lst, (nodes.Tuple, nodes.List)): for item in lst.elts: yield item else: - for infered in _resolve_looppart(self.iter.infer(context), + for inferred in _resolve_looppart(self.iter.infer(context), asspath, context): - yield infered + yield inferred -nodes.For.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts) -nodes.Comprehension.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts) +nodes.For.assigned_stmts = for_assigned_stmts +nodes.Comprehension.assigned_stmts = for_assigned_stmts -def mulass_assigned_stmts(self, node, context=None, asspath=None): +def sequence_assigned_stmts(self, node=None, context=None, asspath=None): if asspath is None: asspath = [] - asspath.insert(0, self.elts.index(node)) - return self.parent.assigned_stmts(self, context, asspath) -nodes.Tuple.assigned_stmts = mulass_assigned_stmts -nodes.List.assigned_stmts = mulass_assigned_stmts + try: + index = self.elts.index(node) + except ValueError: + util.reraise(exceptions.InferenceError( + 'Tried to retrieve a node {node!r} which does not exist', + node=self, assign_path=asspath, context=context)) + + asspath.insert(0, index) + return self.parent.assigned_stmts(node=self, context=context, asspath=asspath) + +nodes.Tuple.assigned_stmts = sequence_assigned_stmts +nodes.List.assigned_stmts = sequence_assigned_stmts -def assend_assigned_stmts(self, context=None): - return self.parent.assigned_stmts(self, context=context) -nodes.AssName.assigned_stmts = assend_assigned_stmts -nodes.AssAttr.assigned_stmts = assend_assigned_stmts +def assend_assigned_stmts(self, node=None, context=None, asspath=None): + return self.parent.assigned_stmts(node=self, context=context) +nodes.AssignName.assigned_stmts = assend_assigned_stmts +nodes.AssignAttr.assigned_stmts = assend_assigned_stmts def _arguments_infer_argname(self, name, context): # arguments information may be missing, in which case we can't do anything # more if not (self.args or self.vararg or self.kwarg): - yield YES + yield util.YES return # first argument of instance/class method if self.args and getattr(self.args[0], 'name', None) == name: functype = self.parent.type if functype == 'method': - yield Instance(self.parent.parent.frame()) + yield bases.Instance(self.parent.parent.frame()) return if functype == 'classmethod': yield self.parent.parent.frame() return + + if context and context.callcontext: + call_site = arguments.CallSite(context.callcontext) + for value in call_site.infer_argument(self.parent, name, context): + yield value + return + + # TODO: just provide the type here, no need to have an empty Dict. if name == self.vararg: - vararg = const_factory(()) + vararg = node_classes.const_factory(()) vararg.parent = self yield vararg return if name == self.kwarg: - kwarg = const_factory({}) + kwarg = node_classes.const_factory({}) kwarg.parent = self yield kwarg return # if there is a default value, yield it. And then yield YES to reflect # we can't guess given argument value try: - context = copy_context(context) - for infered in self.default_value(name).infer(context): - yield infered - yield YES - except NoDefault: - yield YES + context = contextmod.copy_context(context) + for inferred in self.default_value(name).infer(context): + yield inferred + yield util.YES + except exceptions.NoDefault: + yield util.YES -def arguments_assigned_stmts(self, node, context, asspath=None): +def arguments_assigned_stmts(self, node=None, context=None, asspath=None): if context.callcontext: # reset call context/name callcontext = context.callcontext - context = copy_context(context) + context = contextmod.copy_context(context) context.callcontext = None - return callcontext.infer_argument(self.parent, node.name, context) + args = arguments.CallSite(callcontext) + return args.infer_argument(self.parent, node.name, context) return _arguments_infer_argname(self, node.name, context) + nodes.Arguments.assigned_stmts = arguments_assigned_stmts -def assign_assigned_stmts(self, node, context=None, asspath=None): +@bases.raise_if_nothing_inferred +def assign_assigned_stmts(self, node=None, context=None, asspath=None): if not asspath: yield self.value return - for infered in _resolve_asspart(self.value.infer(context), asspath, context): - yield infered -nodes.Assign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts) -nodes.AugAssign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts) + for inferred in _resolve_asspart(self.value.infer(context), asspath, context): + yield inferred + +nodes.Assign.assigned_stmts = assign_assigned_stmts +nodes.AugAssign.assigned_stmts = assign_assigned_stmts def _resolve_asspart(parts, asspath, context): @@ -328,28 +378,30 @@ def _resolve_asspart(parts, asspath, context): # we achieved to resolved the assignment path, don't infer the # last part yield assigned - elif assigned is YES: + elif assigned is util.YES: return else: # we are not yet on the last part of the path search on each # possibly inferred value try: - for infered in _resolve_asspart(assigned.infer(context), + for inferred in _resolve_asspart(assigned.infer(context), asspath, context): - yield infered - except InferenceError: + yield inferred + except exceptions.InferenceError: return -def excepthandler_assigned_stmts(self, node, context=None, asspath=None): - for assigned in unpack_infer(self.type): - if isinstance(assigned, nodes.Class): - assigned = Instance(assigned) +@bases.raise_if_nothing_inferred +def excepthandler_assigned_stmts(self, node=None, context=None, asspath=None): + for assigned in node_classes.unpack_infer(self.type): + if isinstance(assigned, nodes.ClassDef): + assigned = bases.Instance(assigned) yield assigned -nodes.ExceptHandler.assigned_stmts = raise_if_nothing_infered(excepthandler_assigned_stmts) +nodes.ExceptHandler.assigned_stmts = bases.raise_if_nothing_inferred(excepthandler_assigned_stmts) -def with_assigned_stmts(self, node, context=None, asspath=None): +@bases.raise_if_nothing_inferred +def with_assigned_stmts(self, node=None, context=None, asspath=None): if asspath is None: for _, vars in self.items: if vars is None: @@ -358,13 +410,14 @@ def with_assigned_stmts(self, node, context=None, asspath=None): if isinstance(lst, (nodes.Tuple, nodes.List)): for item in lst.nodes: yield item -nodes.With.assigned_stmts = raise_if_nothing_infered(with_assigned_stmts) +nodes.With.assigned_stmts = with_assigned_stmts +@bases.yes_if_nothing_inferred def starred_assigned_stmts(self, node=None, context=None, asspath=None): stmt = self.statement() if not isinstance(stmt, (nodes.Assign, nodes.For)): - raise InferenceError() + raise exceptions.InferenceError() if isinstance(stmt, nodes.Assign): value = stmt.value @@ -372,24 +425,24 @@ def starred_assigned_stmts(self, node=None, context=None, asspath=None): if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1: # Too many starred arguments in the expression. - raise InferenceError() + raise exceptions.InferenceError() if context is None: - context = InferenceContext() + context = contextmod.InferenceContext() try: rhs = next(value.infer(context)) - except InferenceError: - yield YES + except exceptions.InferenceError: + yield util.YES return - if rhs is YES or not hasattr(rhs, 'elts'): + if rhs is util.YES or not hasattr(rhs, 'elts'): # Not interested in inferred values without elts. - yield YES + yield util.YES return elts = collections.deque(rhs.elts[:]) if len(lhs.elts) > len(rhs.elts): # a, *b, c = (1, 2) - raise InferenceError() + raise exceptions.InferenceError() # Unpack iteratively the values from the rhs of the assignment, # until the find the starred node. What will remain will @@ -408,8 +461,10 @@ def starred_assigned_stmts(self, node=None, context=None, asspath=None): elts.pop() continue # We're done - for elt in elts: - yield elt + packed = nodes.List() + packed.elts = elts + packed.parent = self + yield packed break nodes.Starred.assigned_stmts = starred_assigned_stmts diff --git a/pymode/libs/astroid/raw_building.py b/pymode/libs/astroid/raw_building.py index 99a026a7..aaaf52f2 100644 --- a/pymode/libs/astroid/raw_building.py +++ b/pymode/libs/astroid/raw_building.py @@ -19,9 +19,8 @@ (build_* functions) or from living object (object_build_* functions) """ -__docformat__ = "restructuredtext en" - import sys +import os from os.path import abspath from inspect import (getargspec, isdatadescriptor, isfunction, ismethod, ismethoddescriptor, isclass, isbuiltin, ismodule) @@ -35,6 +34,8 @@ MANAGER = AstroidManager() _CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types +_JYTHON = os.name == 'java' +_BUILTINS = vars(six.moves.builtins) def _io_discrepancy(member): # _io module names itself `io`: http://bugs.python.org/issue18602 @@ -48,6 +49,18 @@ def _attach_local_node(parent, node, name): node.name = name # needed by add_local_node parent.add_local_node(node) + +def _add_dunder_class(func, member): + """Add a __class__ member to the given func node, if we can determine it.""" + python_cls = member.__class__ + cls_name = getattr(python_cls, '__name__', None) + if not cls_name: + return + bases = [ancestor.__name__ for ancestor in python_cls.__bases__] + ast_klass = build_class(cls_name, bases, python_cls.__doc__) + func._instance_attrs['__class__'] = [ast_klass] + + _marker = object() def attach_dummy_node(node, name, object=_marker): @@ -170,6 +183,7 @@ def object_build_methoddescriptor(node, member, localname): # and empty argument list func.args.args = None node.add_local_node(func, localname) + _add_dunder_class(func, member) def _base_class_object_build(node, member, basenames, name=None, localname=None): """create astroid for a living class object, with a given set of base names @@ -196,7 +210,7 @@ def _base_class_object_build(node, member, basenames, name=None, localname=None) valnode.object = obj valnode.parent = klass valnode.lineno = 1 - klass.instance_attrs[name] = [valnode] + klass._instance_attrs[name] = [valnode] return klass @@ -228,7 +242,7 @@ def inspect_build(self, module, modname=None, path=None): except AttributeError: # in jython, java modules have no __doc__ (see #109562) node = build_module(modname) - node.file = node.path = path and abspath(path) or path + node.source_file = path and abspath(path) or path node.name = modname MANAGER.cache_module(node) node.package = hasattr(module, '__path__') @@ -273,7 +287,7 @@ def object_build(self, node, obj): continue if member in self._done: class_node = self._done[member] - if not class_node in node.locals.get(name, ()): + if not class_node in node._locals.get(name, ()): node.add_local_node(class_node, name) else: class_node = object_build_class(node, member, name) @@ -307,7 +321,8 @@ def imported_member(self, node, member, name): traceback.print_exc() modname = None if modname is None: - if name in ('__new__', '__subclasshook__'): + if (name in ('__new__', '__subclasshook__') + or (name in _BUILTINS and _JYTHON)): # Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14) # >>> print object.__new__.__module__ # None @@ -315,7 +330,13 @@ def imported_member(self, node, member, name): else: attach_dummy_node(node, name, member) return True - if {'gtk': 'gtk._gtk'}.get(modname, modname) != self._module.__name__: + + real_name = { + 'gtk': 'gtk_gtk', + '_io': 'io', + }.get(modname, modname) + + if real_name != self._module.__name__: # check if it sounds valid and then add an import node, else use a # dummy node try: @@ -337,13 +358,16 @@ def _astroid_bootstrapping(astroid_builtin=None): # this boot strapping is necessary since we need the Const nodes to # inspect_build builtins, and then we can proxy Const if astroid_builtin is None: - from logilab.common.compat import builtins + from six.moves import builtins astroid_builtin = Astroid_BUILDER.inspect_build(builtins) for cls, node_cls in CONST_CLS.items(): if cls is type(None): proxy = build_class('NoneType') proxy.parent = astroid_builtin + elif cls is type(NotImplemented): + proxy = build_class('NotImplementedType') + proxy.parent = astroid_builtin else: proxy = astroid_builtin.getattr(cls.__name__)[0] if cls in (dict, list, set, tuple): diff --git a/pymode/libs/astroid/rebuilder.py b/pymode/libs/astroid/rebuilder.py index 013479a8..859b8280 100644 --- a/pymode/libs/astroid/rebuilder.py +++ b/pymode/libs/astroid/rebuilder.py @@ -20,10 +20,10 @@ """ import sys +import _ast from _ast import ( - Expr as Discard, Str, # binary operators - Add, BinOp, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor, + Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor, LShift, RShift, # logical operators And, Or, @@ -50,6 +50,9 @@ LShift: '<<', RShift: '>>', } +if sys.version_info >= (3, 5): + from _ast import MatMult + _BIN_OP_CLASSES[MatMult] = '@' _BOOL_OP_CLASSES = {And: 'and', Or: 'or', @@ -79,19 +82,11 @@ } REDIRECT = {'arguments': 'Arguments', - 'Attribute': 'Getattr', 'comprehension': 'Comprehension', - 'Call': 'CallFunc', - 'ClassDef': 'Class', "ListCompFor": 'Comprehension', "GenExprFor": 'Comprehension', 'excepthandler': 'ExceptHandler', - 'Expr': 'Discard', - 'FunctionDef': 'Function', - 'GeneratorExp': 'GenExpr', - 'ImportFrom': 'From', 'keyword': 'Keyword', - 'Repr': 'Backquote', } PY3K = sys.version_info >= (3, 0) PY34 = sys.version_info >= (3, 4) @@ -99,7 +94,7 @@ def _init_set_doc(node, newnode): newnode.doc = None try: - if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str): + if isinstance(node.body[0], _ast.Expr) and isinstance(node.body[0].value, _ast.Str): newnode.doc = node.body[0].value.s node.body = node.body[1:] @@ -122,9 +117,21 @@ def _create_yield_node(node, parent, rebuilder, factory): newnode = factory() _lineno_parent(node, newnode, parent) if node.value is not None: - newnode.value = rebuilder.visit(node.value, newnode) + newnode.value = rebuilder.visit(node.value, newnode, None) return newnode +def _visit_or_none(node, attr, visitor, parent, assign_ctx, visit='visit', + **kws): + """If the given node has an attribute, visits the attribute, and + otherwise returns None. + + """ + value = getattr(node, attr, None) + if value: + return getattr(visitor, visit)(value, parent, assign_ctx, **kws) + else: + return None + class TreeRebuilder(object): """Rebuilds the _ast tree to become an Astroid tree""" @@ -133,10 +140,9 @@ def __init__(self, manager): self._manager = manager self.asscontext = None self._global_names = [] - self._from_nodes = [] + self._import_from_nodes = [] self._delayed_assattr = [] self._visit_meths = {} - self._transform = manager.transform self._peepholer = astpeephole.ASTPeepholeOptimizer() def visit_module(self, node, modname, modpath, package): @@ -146,10 +152,10 @@ def visit_module(self, node, modname, modpath, package): newnode.parent = None _init_set_doc(node, newnode) newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.file = newnode.path = modpath - return self._transform(newnode) + newnode.source_file = modpath + return newnode - def visit(self, node, parent): + def visit(self, node, parent, assign_ctx=None): cls = node.__class__ if cls in self._visit_meths: visit_method = self._visit_meths[cls] @@ -158,7 +164,7 @@ def visit(self, node, parent): visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower() visit_method = getattr(self, visit_name) self._visit_meths[cls] = visit_method - return self._transform(visit_method(node, parent)) + return visit_method(node, parent, assign_ctx) def _save_assignment(self, node, name=None): """save assignement situation since node.parent is not available yet""" @@ -167,15 +173,14 @@ def _save_assignment(self, node, name=None): else: node.parent.set_local(node.name, node) - - def visit_arguments(self, node, parent): + def visit_arguments(self, node, parent, assign_ctx=None): """visit a Arguments node by returning a fresh instance of it""" newnode = new.Arguments() newnode.parent = parent - self.asscontext = "Ass" - newnode.args = [self.visit(child, newnode) for child in node.args] - self.asscontext = None - newnode.defaults = [self.visit(child, newnode) for child in node.defaults] + newnode.args = [self.visit(child, newnode, "Assign") + for child in node.args] + newnode.defaults = [self.visit(child, newnode, assign_ctx) + for child in node.defaults] newnode.kwonlyargs = [] newnode.kw_defaults = [] vararg, kwarg = node.vararg, node.kwarg @@ -185,21 +190,21 @@ def visit_arguments(self, node, parent): if PY34: if vararg.annotation: newnode.varargannotation = self.visit(vararg.annotation, - newnode) + newnode, assign_ctx) vararg = vararg.arg elif PY3K and node.varargannotation: newnode.varargannotation = self.visit(node.varargannotation, - newnode) + newnode, assign_ctx) if kwarg: if PY34: if kwarg.annotation: newnode.kwargannotation = self.visit(kwarg.annotation, - newnode) + newnode, assign_ctx) kwarg = kwarg.arg elif PY3K: if node.kwargannotation: newnode.kwargannotation = self.visit(node.kwargannotation, - newnode) + newnode, assign_ctx) newnode.vararg = vararg newnode.kwarg = kwarg # save argument names in locals: @@ -209,81 +214,59 @@ def visit_arguments(self, node, parent): newnode.parent.set_local(kwarg, newnode) return newnode - def visit_assattr(self, node, parent): + def visit_assignattr(self, node, parent, assign_ctx=None): """visit a AssAttr node by returning a fresh instance of it""" - assc, self.asscontext = self.asscontext, None - newnode = new.AssAttr() + newnode = new.AssignAttr() _lineno_parent(node, newnode, parent) - newnode.expr = self.visit(node.expr, newnode) - self.asscontext = assc + newnode.expr = self.visit(node.expr, newnode, assign_ctx) self._delayed_assattr.append(newnode) return newnode - def visit_assert(self, node, parent): + def visit_assert(self, node, parent, assign_ctx=None): """visit a Assert node by returning a fresh instance of it""" newnode = new.Assert() _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) + newnode.test = self.visit(node.test, newnode, assign_ctx) if node.msg is not None: - newnode.fail = self.visit(node.msg, newnode) + newnode.fail = self.visit(node.msg, newnode, assign_ctx) return newnode - def visit_assign(self, node, parent): + def visit_assign(self, node, parent, assign_ctx=None): """visit a Assign node by returning a fresh instance of it""" newnode = new.Assign() _lineno_parent(node, newnode, parent) - self.asscontext = "Ass" - newnode.targets = [self.visit(child, newnode) for child in node.targets] - self.asscontext = None - newnode.value = self.visit(node.value, newnode) - # set some function or metaclass infos XXX explain ? - klass = newnode.parent.frame() - if (isinstance(klass, new.Class) - and isinstance(newnode.value, new.CallFunc) - and isinstance(newnode.value.func, new.Name)): - func_name = newnode.value.func.name - for ass_node in newnode.targets: - try: - meth = klass[ass_node.name] - if isinstance(meth, new.Function): - if func_name in ('classmethod', 'staticmethod'): - meth.type = func_name - elif func_name == 'classproperty': # see lgc.decorators - meth.type = 'classmethod' - meth.extra_decorators.append(newnode.value) - except (AttributeError, KeyError): - continue - return newnode - - def visit_assname(self, node, parent, node_name=None): + newnode.targets = [self.visit(child, newnode, "Assign") + for child in node.targets] + newnode.value = self.visit(node.value, newnode, None) + return newnode + + def visit_assignname(self, node, parent, assign_ctx=None, node_name=None): '''visit a node and return a AssName node''' - newnode = new.AssName() + newnode = new.AssignName() _set_infos(node, newnode, parent) newnode.name = node_name self._save_assignment(newnode) return newnode - def visit_augassign(self, node, parent): + def visit_augassign(self, node, parent, assign_ctx=None): """visit a AugAssign node by returning a fresh instance of it""" newnode = new.AugAssign() _lineno_parent(node, newnode, parent) newnode.op = _BIN_OP_CLASSES[node.op.__class__] + "=" - self.asscontext = "Ass" - newnode.target = self.visit(node.target, newnode) - self.asscontext = None - newnode.value = self.visit(node.value, newnode) + newnode.target = self.visit(node.target, newnode, "Assign") + newnode.value = self.visit(node.value, newnode, None) return newnode - def visit_backquote(self, node, parent): + def visit_repr(self, node, parent, assign_ctx=None): """visit a Backquote node by returning a fresh instance of it""" - newnode = new.Backquote() + newnode = new.Repr() _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode) + newnode.value = self.visit(node.value, newnode, assign_ctx) return newnode - def visit_binop(self, node, parent): + def visit_binop(self, node, parent, assign_ctx=None): """visit a BinOp node by returning a fresh instance of it""" - if isinstance(node.left, BinOp) and self._manager.optimize_ast: + if isinstance(node.left, _ast.BinOp) and self._manager.optimize_ast: # Optimize BinOp operations in order to remove # redundant recursion. For instance, if the # following code is parsed in order to obtain @@ -296,264 +279,299 @@ def visit_binop(self, node, parent): # problem for the correctness of the program). # # ("a" + "b" + # one thousand more + "c") - newnode = self._peepholer.optimize_binop(node) - if newnode: - _lineno_parent(node, newnode, parent) - return newnode + optimized = self._peepholer.optimize_binop(node) + if optimized: + _lineno_parent(node, optimized, parent) + return optimized newnode = new.BinOp() _lineno_parent(node, newnode, parent) - newnode.left = self.visit(node.left, newnode) - newnode.right = self.visit(node.right, newnode) + newnode.left = self.visit(node.left, newnode, assign_ctx) + newnode.right = self.visit(node.right, newnode, assign_ctx) newnode.op = _BIN_OP_CLASSES[node.op.__class__] return newnode - def visit_boolop(self, node, parent): + def visit_boolop(self, node, parent, assign_ctx=None): """visit a BoolOp node by returning a fresh instance of it""" newnode = new.BoolOp() _lineno_parent(node, newnode, parent) - newnode.values = [self.visit(child, newnode) for child in node.values] + newnode.values = [self.visit(child, newnode, assign_ctx) + for child in node.values] newnode.op = _BOOL_OP_CLASSES[node.op.__class__] return newnode - def visit_break(self, node, parent): + def visit_break(self, node, parent, assign_ctx=None): """visit a Break node by returning a fresh instance of it""" newnode = new.Break() _set_infos(node, newnode, parent) return newnode - def visit_callfunc(self, node, parent): + def visit_call(self, node, parent, assign_ctx=None): """visit a CallFunc node by returning a fresh instance of it""" - newnode = new.CallFunc() - _lineno_parent(node, newnode, parent) - newnode.func = self.visit(node.func, newnode) - newnode.args = [self.visit(child, newnode) for child in node.args] - if node.starargs is not None: - newnode.starargs = self.visit(node.starargs, newnode) - if node.kwargs is not None: - newnode.kwargs = self.visit(node.kwargs, newnode) - for child in node.keywords: - newnode.args.append(self.visit(child, newnode)) + newnode = new.Call() + _lineno_parent(node, newnode, parent) + newnode.func = self.visit(node.func, newnode, assign_ctx) + args = [self.visit(child, newnode, assign_ctx) + for child in node.args] + + starargs = _visit_or_none(node, 'starargs', self, newnode, + assign_ctx) + kwargs = _visit_or_none(node, 'kwargs', self, newnode, + assign_ctx) + keywords = None + if node.keywords: + keywords = [self.visit(child, newnode, assign_ctx) + for child in node.keywords] + + if starargs: + new_starargs = new.Starred() + new_starargs.col_offset = starargs.col_offset + new_starargs.lineno = starargs.lineno + new_starargs.parent = starargs.parent + new_starargs.value = starargs + args.append(new_starargs) + if kwargs: + new_kwargs = new.Keyword() + new_kwargs.arg = None + new_kwargs.col_offset = kwargs.col_offset + new_kwargs.lineno = kwargs.lineno + new_kwargs.parent = kwargs.parent + new_kwargs.value = kwargs + if keywords: + keywords.append(new_kwargs) + else: + keywords = [new_kwargs] + + newnode.args = args + newnode.keywords = keywords return newnode - def visit_class(self, node, parent): + def visit_classdef(self, node, parent, assign_ctx=None): """visit a Class node to become astroid""" - newnode = new.Class(node.name, None) + newnode = new.ClassDef(node.name, None) _lineno_parent(node, newnode, parent) _init_set_doc(node, newnode) - newnode.bases = [self.visit(child, newnode) for child in node.bases] - newnode.body = [self.visit(child, newnode) for child in node.body] - if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6 - newnode.decorators = self.visit_decorators(node, newnode) + newnode.bases = [self.visit(child, newnode, assign_ctx) + for child in node.bases] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + if node.decorator_list: + newnode.decorators = self.visit_decorators(node, newnode, assign_ctx) newnode.parent.frame().set_local(newnode.name, newnode) return newnode - def visit_const(self, node, parent): + def visit_const(self, node, parent, assign_ctx=None): """visit a Const node by returning a fresh instance of it""" newnode = new.Const(node.value) _set_infos(node, newnode, parent) return newnode - def visit_continue(self, node, parent): + def visit_continue(self, node, parent, assign_ctx=None): """visit a Continue node by returning a fresh instance of it""" newnode = new.Continue() _set_infos(node, newnode, parent) return newnode - def visit_compare(self, node, parent): + def visit_compare(self, node, parent, assign_ctx=None): """visit a Compare node by returning a fresh instance of it""" newnode = new.Compare() _lineno_parent(node, newnode, parent) - newnode.left = self.visit(node.left, newnode) - newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode)) + newnode.left = self.visit(node.left, newnode, assign_ctx) + newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode, assign_ctx)) for (op, expr) in zip(node.ops, node.comparators)] return newnode - def visit_comprehension(self, node, parent): + def visit_comprehension(self, node, parent, assign_ctx=None): """visit a Comprehension node by returning a fresh instance of it""" newnode = new.Comprehension() newnode.parent = parent - self.asscontext = "Ass" - newnode.target = self.visit(node.target, newnode) - self.asscontext = None - newnode.iter = self.visit(node.iter, newnode) - newnode.ifs = [self.visit(child, newnode) for child in node.ifs] + newnode.target = self.visit(node.target, newnode, 'Assign') + newnode.iter = self.visit(node.iter, newnode, None) + newnode.ifs = [self.visit(child, newnode, None) + for child in node.ifs] return newnode - def visit_decorators(self, node, parent): + def visit_decorators(self, node, parent, assign_ctx=None): """visit a Decorators node by returning a fresh instance of it""" # /!\ node is actually a _ast.Function node while # parent is a astroid.nodes.Function node newnode = new.Decorators() _lineno_parent(node, newnode, parent) - if 'decorators' in node._fields: # py < 2.6, i.e. 2.5 - decorators = node.decorators - else: - decorators = node.decorator_list - newnode.nodes = [self.visit(child, newnode) for child in decorators] + decorators = node.decorator_list + newnode.nodes = [self.visit(child, newnode, assign_ctx) + for child in decorators] return newnode - def visit_delete(self, node, parent): + def visit_delete(self, node, parent, assign_ctx=None): """visit a Delete node by returning a fresh instance of it""" newnode = new.Delete() _lineno_parent(node, newnode, parent) - self.asscontext = "Del" - newnode.targets = [self.visit(child, newnode) for child in node.targets] - self.asscontext = None + newnode.targets = [self.visit(child, newnode, 'Del') + for child in node.targets] return newnode - def visit_dict(self, node, parent): + def _visit_dict_items(self, node, parent, newnode, assign_ctx): + for key, value in zip(node.keys, node.values): + rebuilt_value = self.visit(value, newnode, assign_ctx) + if not key: + # Python 3.5 and extended unpacking + rebuilt_key = new.DictUnpack() + rebuilt_key.lineno = rebuilt_value.lineno + rebuilt_key.col_offset = rebuilt_value.col_offset + rebuilt_key.parent = rebuilt_value.parent + else: + rebuilt_key = self.visit(key, newnode, assign_ctx) + yield rebuilt_key, rebuilt_value + + def visit_dict(self, node, parent, assign_ctx=None): """visit a Dict node by returning a fresh instance of it""" newnode = new.Dict() _lineno_parent(node, newnode, parent) - newnode.items = [(self.visit(key, newnode), self.visit(value, newnode)) - for key, value in zip(node.keys, node.values)] + newnode.items = list(self._visit_dict_items(node, parent, newnode, assign_ctx)) return newnode - def visit_dictcomp(self, node, parent): + def visit_dictcomp(self, node, parent, assign_ctx=None): """visit a DictComp node by returning a fresh instance of it""" newnode = new.DictComp() _lineno_parent(node, newnode, parent) - newnode.key = self.visit(node.key, newnode) - newnode.value = self.visit(node.value, newnode) - newnode.generators = [self.visit(child, newnode) + newnode.key = self.visit(node.key, newnode, assign_ctx) + newnode.value = self.visit(node.value, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) for child in node.generators] return newnode - def visit_discard(self, node, parent): + def visit_expr(self, node, parent, assign_ctx=None): """visit a Discard node by returning a fresh instance of it""" - newnode = new.Discard() + newnode = new.Expr() _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode) + newnode.value = self.visit(node.value, newnode, assign_ctx) return newnode - def visit_ellipsis(self, node, parent): + def visit_ellipsis(self, node, parent, assign_ctx=None): """visit an Ellipsis node by returning a fresh instance of it""" newnode = new.Ellipsis() _set_infos(node, newnode, parent) return newnode - def visit_emptynode(self, node, parent): + def visit_emptynode(self, node, parent, assign_ctx=None): """visit an EmptyNode node by returning a fresh instance of it""" newnode = new.EmptyNode() _set_infos(node, newnode, parent) return newnode - def visit_excepthandler(self, node, parent): + def visit_excepthandler(self, node, parent, assign_ctx=None): """visit an ExceptHandler node by returning a fresh instance of it""" newnode = new.ExceptHandler() _lineno_parent(node, newnode, parent) if node.type is not None: - newnode.type = self.visit(node.type, newnode) + newnode.type = self.visit(node.type, newnode, assign_ctx) if node.name is not None: # /!\ node.name can be a tuple - self.asscontext = "Ass" - newnode.name = self.visit(node.name, newnode) - self.asscontext = None - newnode.body = [self.visit(child, newnode) for child in node.body] + newnode.name = self.visit(node.name, newnode, 'Assign') + newnode.body = [self.visit(child, newnode, None) + for child in node.body] return newnode - def visit_exec(self, node, parent): + def visit_exec(self, node, parent, assign_ctx=None): """visit an Exec node by returning a fresh instance of it""" newnode = new.Exec() _lineno_parent(node, newnode, parent) newnode.expr = self.visit(node.body, newnode) if node.globals is not None: - newnode.globals = self.visit(node.globals, newnode) + newnode.globals = self.visit(node.globals, newnode, + assign_ctx) if node.locals is not None: - newnode.locals = self.visit(node.locals, newnode) + newnode.locals = self.visit(node.locals, newnode, + assign_ctx) return newnode - def visit_extslice(self, node, parent): + def visit_extslice(self, node, parent, assign_ctx=None): """visit an ExtSlice node by returning a fresh instance of it""" newnode = new.ExtSlice() newnode.parent = parent - newnode.dims = [self.visit(dim, newnode) for dim in node.dims] + newnode.dims = [self.visit(dim, newnode, assign_ctx) + for dim in node.dims] return newnode - def visit_for(self, node, parent): + def _visit_for(self, cls, node, parent, assign_ctx=None): """visit a For node by returning a fresh instance of it""" - newnode = new.For() + newnode = cls() _lineno_parent(node, newnode, parent) - self.asscontext = "Ass" - newnode.target = self.visit(node.target, newnode) - self.asscontext = None - newnode.iter = self.visit(node.iter, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] + newnode.target = self.visit(node.target, newnode, "Assign") + newnode.iter = self.visit(node.iter, newnode, None) + newnode.body = [self.visit(child, newnode, None) + for child in node.body] + newnode.orelse = [self.visit(child, newnode, None) + for child in node.orelse] return newnode - def visit_from(self, node, parent): + def visit_for(self, node, parent, assign_ctx=None): + return self._visit_for(new.For, node, parent, + assign_ctx=assign_ctx) + def visit_importfrom(self, node, parent, assign_ctx=None): """visit a From node by returning a fresh instance of it""" names = [(alias.name, alias.asname) for alias in node.names] - newnode = new.From(node.module or '', names, node.level or None) + newnode = new.ImportFrom(node.module or '', names, node.level or None) _set_infos(node, newnode, parent) # store From names to add them to locals after building - self._from_nodes.append(newnode) + self._import_from_nodes.append(newnode) return newnode - def visit_function(self, node, parent): - """visit an Function node to become astroid""" + def _visit_functiondef(self, cls, node, parent, assign_ctx=None): + """visit an FunctionDef node to become astroid""" self._global_names.append({}) - newnode = new.Function(node.name, None) + newnode = cls(node.name, None) _lineno_parent(node, newnode, parent) _init_set_doc(node, newnode) - newnode.args = self.visit(node.args, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - if 'decorators' in node._fields: # py < 2.6 - attr = 'decorators' - else: - attr = 'decorator_list' - decorators = getattr(node, attr) + newnode.args = self.visit(node.args, newnode, assign_ctx) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + decorators = node.decorator_list if decorators: - newnode.decorators = self.visit_decorators(node, newnode) + newnode.decorators = self.visit_decorators( + node, newnode, assign_ctx) if PY3K and node.returns: - newnode.returns = self.visit(node.returns, newnode) + newnode.returns = self.visit(node.returns, newnode, + assign_ctx) self._global_names.pop() frame = newnode.parent.frame() - if isinstance(frame, new.Class): - if newnode.name == '__new__': - newnode._type = 'classmethod' - else: - newnode._type = 'method' - if newnode.decorators is not None: - for decorator_expr in newnode.decorators.nodes: - if isinstance(decorator_expr, new.Name): - if decorator_expr.name in ('classmethod', 'staticmethod'): - newnode._type = decorator_expr.name - elif decorator_expr.name == 'classproperty': - newnode._type = 'classmethod' frame.set_local(newnode.name, newnode) return newnode - def visit_genexpr(self, node, parent): + def visit_functiondef(self, node, parent, assign_ctx=None): + return self._visit_functiondef(new.FunctionDef, node, parent, + assign_ctx=assign_ctx) + + def visit_generatorexp(self, node, parent, assign_ctx=None): """visit a GenExpr node by returning a fresh instance of it""" - newnode = new.GenExpr() + newnode = new.GeneratorExp() _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode) - newnode.generators = [self.visit(child, newnode) for child in node.generators] + newnode.elt = self.visit(node.elt, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) + for child in node.generators] return newnode - def visit_getattr(self, node, parent): + def visit_attribute(self, node, parent, assign_ctx=None): """visit a Getattr node by returning a fresh instance of it""" - if self.asscontext == "Del": + # pylint: disable=redefined-variable-type + if assign_ctx == "Del": # FIXME : maybe we should reintroduce and visit_delattr ? # for instance, deactivating asscontext newnode = new.DelAttr() - elif self.asscontext == "Ass": + elif assign_ctx == "Assign": # FIXME : maybe we should call visit_assattr ? - newnode = new.AssAttr() - self._delayed_assattr.append(newnode) + # Prohibit a local save if we are in an ExceptHandler. + newnode = new.AssignAttr() + if not isinstance(parent, new.ExceptHandler): + self._delayed_assattr.append(newnode) else: - newnode = new.Getattr() + newnode = new.Attribute() _lineno_parent(node, newnode, parent) - asscontext, self.asscontext = self.asscontext, None - newnode.expr = self.visit(node.value, newnode) - self.asscontext = asscontext + newnode.expr = self.visit(node.value, newnode, None) newnode.attrname = node.attr return newnode - def visit_global(self, node, parent): + def visit_global(self, node, parent, assign_ctx=None): """visit an Global node to become astroid""" newnode = new.Global(node.names) _set_infos(node, newnode, parent) @@ -562,25 +580,27 @@ def visit_global(self, node, parent): self._global_names[-1].setdefault(name, []).append(newnode) return newnode - def visit_if(self, node, parent): + def visit_if(self, node, parent, assign_ctx=None): """visit a If node by returning a fresh instance of it""" newnode = new.If() _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] + newnode.test = self.visit(node.test, newnode, assign_ctx) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] return newnode - def visit_ifexp(self, node, parent): + def visit_ifexp(self, node, parent, assign_ctx=None): """visit a IfExp node by returning a fresh instance of it""" newnode = new.IfExp() _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) - newnode.body = self.visit(node.body, newnode) - newnode.orelse = self.visit(node.orelse, newnode) + newnode.test = self.visit(node.test, newnode, assign_ctx) + newnode.body = self.visit(node.body, newnode, assign_ctx) + newnode.orelse = self.visit(node.orelse, newnode, assign_ctx) return newnode - def visit_import(self, node, parent): + def visit_import(self, node, parent, assign_ctx=None): """visit a Import node by returning a fresh instance of it""" newnode = new.Import() _set_infos(node, newnode, parent) @@ -591,53 +611,54 @@ def visit_import(self, node, parent): newnode.parent.set_local(name.split('.')[0], newnode) return newnode - def visit_index(self, node, parent): + def visit_index(self, node, parent, assign_ctx=None): """visit a Index node by returning a fresh instance of it""" newnode = new.Index() newnode.parent = parent - newnode.value = self.visit(node.value, newnode) + newnode.value = self.visit(node.value, newnode, assign_ctx) return newnode - def visit_keyword(self, node, parent): + def visit_keyword(self, node, parent, assign_ctx=None): """visit a Keyword node by returning a fresh instance of it""" newnode = new.Keyword() newnode.parent = parent newnode.arg = node.arg - newnode.value = self.visit(node.value, newnode) + newnode.value = self.visit(node.value, newnode, assign_ctx) return newnode - def visit_lambda(self, node, parent): + def visit_lambda(self, node, parent, assign_ctx=None): """visit a Lambda node by returning a fresh instance of it""" newnode = new.Lambda() _lineno_parent(node, newnode, parent) - newnode.args = self.visit(node.args, newnode) - newnode.body = self.visit(node.body, newnode) + newnode.args = self.visit(node.args, newnode, assign_ctx) + newnode.body = self.visit(node.body, newnode, assign_ctx) return newnode - def visit_list(self, node, parent): + def visit_list(self, node, parent, assign_ctx=None): """visit a List node by returning a fresh instance of it""" newnode = new.List() _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode) for child in node.elts] + newnode.elts = [self.visit(child, newnode, assign_ctx) + for child in node.elts] return newnode - def visit_listcomp(self, node, parent): + def visit_listcomp(self, node, parent, assign_ctx=None): """visit a ListComp node by returning a fresh instance of it""" newnode = new.ListComp() _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode) - newnode.generators = [self.visit(child, newnode) + newnode.elt = self.visit(node.elt, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) for child in node.generators] return newnode - def visit_name(self, node, parent): + def visit_name(self, node, parent, assign_ctx=None): """visit a Name node by returning a fresh instance of it""" # True and False can be assigned to something in py2x, so we have to # check first the asscontext - if self.asscontext == "Del": + # pylint: disable=redefined-variable-type + if assign_ctx == "Del": newnode = new.DelName() - elif self.asscontext is not None: # Ass - assert self.asscontext == "Ass" + elif assign_ctx is not None: # Ass newnode = new.AssName() elif node.id in CONST_NAME_TRANSFORMS: newnode = new.Const(CONST_NAME_TRANSFORMS[node.id]) @@ -648,279 +669,321 @@ def visit_name(self, node, parent): _lineno_parent(node, newnode, parent) newnode.name = node.id # XXX REMOVE me : - if self.asscontext in ('Del', 'Ass'): # 'Aug' ?? + if assign_ctx in ('Del', 'Assign'): # 'Aug' ?? self._save_assignment(newnode) return newnode - def visit_bytes(self, node, parent): + def visit_bytes(self, node, parent, assign_ctx=None): """visit a Bytes node by returning a fresh instance of Const""" newnode = new.Const(node.s) _set_infos(node, newnode, parent) return newnode - def visit_num(self, node, parent): + def visit_num(self, node, parent, assign_ctx=None): """visit a Num node by returning a fresh instance of Const""" newnode = new.Const(node.n) _set_infos(node, newnode, parent) return newnode - def visit_pass(self, node, parent): + def visit_pass(self, node, parent, assign_ctx=None): """visit a Pass node by returning a fresh instance of it""" newnode = new.Pass() _set_infos(node, newnode, parent) return newnode - def visit_str(self, node, parent): + def visit_str(self, node, parent, assign_ctx=None): """visit a Str node by returning a fresh instance of Const""" newnode = new.Const(node.s) _set_infos(node, newnode, parent) return newnode - def visit_print(self, node, parent): + def visit_print(self, node, parent, assign_ctx=None): """visit a Print node by returning a fresh instance of it""" newnode = new.Print() _lineno_parent(node, newnode, parent) newnode.nl = node.nl if node.dest is not None: - newnode.dest = self.visit(node.dest, newnode) - newnode.values = [self.visit(child, newnode) for child in node.values] + newnode.dest = self.visit(node.dest, newnode, assign_ctx) + newnode.values = [self.visit(child, newnode, assign_ctx) + for child in node.values] return newnode - def visit_raise(self, node, parent): + def visit_raise(self, node, parent, assign_ctx=None): """visit a Raise node by returning a fresh instance of it""" newnode = new.Raise() _lineno_parent(node, newnode, parent) if node.type is not None: - newnode.exc = self.visit(node.type, newnode) + newnode.exc = self.visit(node.type, newnode, assign_ctx) if node.inst is not None: - newnode.inst = self.visit(node.inst, newnode) + newnode.inst = self.visit(node.inst, newnode, assign_ctx) if node.tback is not None: - newnode.tback = self.visit(node.tback, newnode) + newnode.tback = self.visit(node.tback, newnode, assign_ctx) return newnode - def visit_return(self, node, parent): + def visit_return(self, node, parent, assign_ctx=None): """visit a Return node by returning a fresh instance of it""" newnode = new.Return() _lineno_parent(node, newnode, parent) if node.value is not None: - newnode.value = self.visit(node.value, newnode) + newnode.value = self.visit(node.value, newnode, assign_ctx) return newnode - def visit_set(self, node, parent): + def visit_set(self, node, parent, assign_ctx=None): """visit a Set node by returning a fresh instance of it""" newnode = new.Set() _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode) for child in node.elts] + newnode.elts = [self.visit(child, newnode, assign_ctx) + for child in node.elts] return newnode - def visit_setcomp(self, node, parent): + def visit_setcomp(self, node, parent, assign_ctx=None): """visit a SetComp node by returning a fresh instance of it""" newnode = new.SetComp() _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode) - newnode.generators = [self.visit(child, newnode) + newnode.elt = self.visit(node.elt, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) for child in node.generators] return newnode - def visit_slice(self, node, parent): + def visit_slice(self, node, parent, assign_ctx=None): """visit a Slice node by returning a fresh instance of it""" newnode = new.Slice() newnode.parent = parent if node.lower is not None: - newnode.lower = self.visit(node.lower, newnode) + newnode.lower = self.visit(node.lower, newnode, assign_ctx) if node.upper is not None: - newnode.upper = self.visit(node.upper, newnode) + newnode.upper = self.visit(node.upper, newnode, assign_ctx) if node.step is not None: - newnode.step = self.visit(node.step, newnode) + newnode.step = self.visit(node.step, newnode, assign_ctx) return newnode - def visit_subscript(self, node, parent): + def visit_subscript(self, node, parent, assign_ctx=None): """visit a Subscript node by returning a fresh instance of it""" newnode = new.Subscript() _lineno_parent(node, newnode, parent) - subcontext, self.asscontext = self.asscontext, None - newnode.value = self.visit(node.value, newnode) - newnode.slice = self.visit(node.slice, newnode) - self.asscontext = subcontext + newnode.value = self.visit(node.value, newnode, None) + newnode.slice = self.visit(node.slice, newnode, None) return newnode - def visit_tryexcept(self, node, parent): + def visit_tryexcept(self, node, parent, assign_ctx=None): """visit a TryExcept node by returning a fresh instance of it""" newnode = new.TryExcept() _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.handlers = [self.visit(child, newnode) for child in node.handlers] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.handlers = [self.visit(child, newnode, assign_ctx) + for child in node.handlers] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] return newnode - def visit_tryfinally(self, node, parent): + def visit_tryfinally(self, node, parent, assign_ctx=None): """visit a TryFinally node by returning a fresh instance of it""" newnode = new.TryFinally() _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.finalbody = [self.visit(n, newnode, assign_ctx) + for n in node.finalbody] return newnode - def visit_tuple(self, node, parent): + def visit_tuple(self, node, parent, assign_ctx=None): """visit a Tuple node by returning a fresh instance of it""" newnode = new.Tuple() _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode) for child in node.elts] + newnode.elts = [self.visit(child, newnode, assign_ctx) + for child in node.elts] return newnode - def visit_unaryop(self, node, parent): + def visit_unaryop(self, node, parent, assign_ctx=None): """visit a UnaryOp node by returning a fresh instance of it""" newnode = new.UnaryOp() _lineno_parent(node, newnode, parent) - newnode.operand = self.visit(node.operand, newnode) + newnode.operand = self.visit(node.operand, newnode, assign_ctx) newnode.op = _UNARY_OP_CLASSES[node.op.__class__] return newnode - def visit_while(self, node, parent): + def visit_while(self, node, parent, assign_ctx=None): """visit a While node by returning a fresh instance of it""" newnode = new.While() _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] + newnode.test = self.visit(node.test, newnode, assign_ctx) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] return newnode - def visit_with(self, node, parent): + def visit_with(self, node, parent, assign_ctx=None): newnode = new.With() _lineno_parent(node, newnode, parent) - expr = self.visit(node.context_expr, newnode) - self.asscontext = "Ass" + expr = self.visit(node.context_expr, newnode, assign_ctx) if node.optional_vars is not None: - vars = self.visit(node.optional_vars, newnode) + vars = self.visit(node.optional_vars, newnode, 'Assign') else: vars = None self.asscontext = None newnode.items = [(expr, vars)] - newnode.body = [self.visit(child, newnode) for child in node.body] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] return newnode - def visit_yield(self, node, parent): + def visit_yield(self, node, parent, assign_ctx=None): """visit a Yield node by returning a fresh instance of it""" return _create_yield_node(node, parent, self, new.Yield) class TreeRebuilder3k(TreeRebuilder): """extend and overwrite TreeRebuilder for python3k""" - def visit_arg(self, node, parent): + def visit_arg(self, node, parent, assign_ctx=None): """visit a arg node by returning a fresh AssName instance""" - # the node is coming from py>=3.0, but we use AssName in py2.x - # XXX or we should instead introduce a Arg node in astroid ? - return self.visit_assname(node, parent, node.arg) + # TODO(cpopa): introduce an Arg node instead of using AssignName. + return self.visit_assignname(node, parent, assign_ctx, node.arg) - def visit_nameconstant(self, node, parent): + def visit_nameconstant(self, node, parent, assign_ctx=None): # in Python 3.4 we have NameConstant for True / False / None newnode = new.Const(node.value) _set_infos(node, newnode, parent) return newnode - def visit_arguments(self, node, parent): - newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent) - self.asscontext = "Ass" - newnode.kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs] - self.asscontext = None - newnode.kw_defaults = [self.visit(child, newnode) if child else None for child in node.kw_defaults] + def visit_arguments(self, node, parent, assign_ctx=None): + newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent, assign_ctx) + newnode.kwonlyargs = [self.visit(child, newnode, 'Assign') + for child in node.kwonlyargs] + newnode.kw_defaults = [self.visit(child, newnode, None) + if child else None for child in node.kw_defaults] newnode.annotations = [ - self.visit(arg.annotation, newnode) if arg.annotation else None + self.visit(arg.annotation, newnode, None) if arg.annotation else None for arg in node.args] return newnode - def visit_excepthandler(self, node, parent): + def visit_excepthandler(self, node, parent, assign_ctx=None): """visit an ExceptHandler node by returning a fresh instance of it""" newnode = new.ExceptHandler() _lineno_parent(node, newnode, parent) if node.type is not None: - newnode.type = self.visit(node.type, newnode) + newnode.type = self.visit(node.type, newnode, assign_ctx) if node.name is not None: - newnode.name = self.visit_assname(node, newnode, node.name) - newnode.body = [self.visit(child, newnode) for child in node.body] + newnode.name = self.visit_assignname(node, newnode, 'Assign', node.name) + newnode.body = [self.visit(child, newnode, None) + for child in node.body] return newnode - def visit_nonlocal(self, node, parent): + def visit_nonlocal(self, node, parent, assign_ctx=None): """visit a Nonlocal node and return a new instance of it""" newnode = new.Nonlocal(node.names) _set_infos(node, newnode, parent) return newnode - def visit_raise(self, node, parent): + def visit_raise(self, node, parent, assign_ctx=None): """visit a Raise node by returning a fresh instance of it""" newnode = new.Raise() _lineno_parent(node, newnode, parent) # no traceback; anyway it is not used in Pylint if node.exc is not None: - newnode.exc = self.visit(node.exc, newnode) + newnode.exc = self.visit(node.exc, newnode, assign_ctx) if node.cause is not None: - newnode.cause = self.visit(node.cause, newnode) + newnode.cause = self.visit(node.cause, newnode, assign_ctx) return newnode - def visit_starred(self, node, parent): + def visit_starred(self, node, parent, assign_ctx=None): """visit a Starred node and return a new instance of it""" newnode = new.Starred() _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode) + newnode.value = self.visit(node.value, newnode, assign_ctx) return newnode - def visit_try(self, node, parent): + def visit_try(self, node, parent, assign_ctx=None): # python 3.3 introduce a new Try node replacing TryFinally/TryExcept nodes + # pylint: disable=redefined-variable-type if node.finalbody: newnode = new.TryFinally() _lineno_parent(node, newnode, parent) - newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody] + newnode.finalbody = [self.visit(n, newnode, assign_ctx) + for n in node.finalbody] if node.handlers: excnode = new.TryExcept() _lineno_parent(node, excnode, newnode) - excnode.body = [self.visit(child, excnode) for child in node.body] - excnode.handlers = [self.visit(child, excnode) for child in node.handlers] - excnode.orelse = [self.visit(child, excnode) for child in node.orelse] + excnode.body = [self.visit(child, excnode, assign_ctx) + for child in node.body] + excnode.handlers = [self.visit(child, excnode, assign_ctx) + for child in node.handlers] + excnode.orelse = [self.visit(child, excnode, assign_ctx) + for child in node.orelse] newnode.body = [excnode] else: - newnode.body = [self.visit(child, newnode) for child in node.body] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] elif node.handlers: newnode = new.TryExcept() _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.handlers = [self.visit(child, newnode) for child in node.handlers] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.handlers = [self.visit(child, newnode, assign_ctx) + for child in node.handlers] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] return newnode - def visit_with(self, node, parent): + def _visit_with(self, cls, node, parent, assign_ctx=None): if 'items' not in node._fields: # python < 3.3 - return super(TreeRebuilder3k, self).visit_with(node, parent) + return super(TreeRebuilder3k, self).visit_with(node, parent, + assign_ctx) - newnode = new.With() + newnode = cls() _lineno_parent(node, newnode, parent) def visit_child(child): expr = self.visit(child.context_expr, newnode) - self.asscontext = 'Ass' if child.optional_vars: - var = self.visit(child.optional_vars, newnode) + var = self.visit(child.optional_vars, newnode, + 'Assign') else: var = None - self.asscontext = None return expr, var newnode.items = [visit_child(child) for child in node.items] - newnode.body = [self.visit(child, newnode) for child in node.body] + newnode.body = [self.visit(child, newnode, None) + for child in node.body] return newnode - def visit_yieldfrom(self, node, parent): + def visit_with(self, node, parent, assign_ctx=None): + return self._visit_with(new.With, node, parent, assign_ctx=assign_ctx) + + def visit_yieldfrom(self, node, parent, assign_ctx=None): return _create_yield_node(node, parent, self, new.YieldFrom) - def visit_class(self, node, parent): - newnode = super(TreeRebuilder3k, self).visit_class(node, parent) + def visit_classdef(self, node, parent, assign_ctx=None): + newnode = super(TreeRebuilder3k, self).visit_classdef(node, parent, assign_ctx) newnode._newstyle = True for keyword in node.keywords: if keyword.arg == 'metaclass': - newnode._metaclass = self.visit(keyword, newnode).value + newnode._metaclass = self.visit(keyword, newnode, assign_ctx).value break return newnode -if sys.version_info >= (3, 0): - TreeRebuilder = TreeRebuilder3k + # Async structs added in Python 3.5 + def visit_asyncfunctiondef(self, node, parent, assign_ctx=None): + return self._visit_functiondef(new.AsyncFunctionDef, node, parent, + assign_ctx=assign_ctx) + def visit_asyncfor(self, node, parent, assign_ctx=None): + return self._visit_for(new.AsyncFor, node, parent, + assign_ctx=assign_ctx) + + def visit_await(self, node, parent, assign_ctx=None): + newnode = new.Await() + newnode.lineno = node.lineno + newnode.col_offset = node.col_offset + newnode.parent = parent + newnode.value = self.visit(node.value, newnode, None) + return newnode + + def visit_asyncwith(self, node, parent, assign_ctx=None): + return self._visit_with(new.AsyncWith, node, parent, + assign_ctx=assign_ctx) + + +if sys.version_info >= (3, 0): + TreeRebuilder = TreeRebuilder3k diff --git a/pymode/libs/astroid/scoped_nodes.py b/pymode/libs/astroid/scoped_nodes.py index ac90f878..d78d1510 100644 --- a/pymode/libs/astroid/scoped_nodes.py +++ b/pymode/libs/astroid/scoped_nodes.py @@ -15,40 +15,33 @@ # # You should have received a copy of the GNU Lesser General Public License along # with astroid. If not, see . -"""This module contains the classes for "scoped" node, i.e. which are opening a -new local scope in the language definition : Module, Class, Function (and -Lambda, GenExpr, DictComp and SetComp to some extent). -""" -from __future__ import with_statement -__doctype__ = "restructuredtext en" +""" +This module contains the classes for "scoped" node, i.e. which are opening a +new local scope in the language definition : Module, ClassDef, FunctionDef (and +Lambda, GeneratorExp, DictComp and SetComp to some extent). +""" -import sys +import io +import itertools import warnings -from itertools import chain -try: - from io import BytesIO -except ImportError: - from cStringIO import StringIO as BytesIO import six -from logilab.common.compat import builtins -from logilab.common.decorators import cached, cachedproperty - -from astroid.exceptions import NotFoundError, \ - AstroidBuildingException, InferenceError, ResolveError -from astroid.node_classes import Const, DelName, DelAttr, \ - Dict, From, List, Pass, Raise, Return, Tuple, Yield, YieldFrom, \ - LookupMixIn, const_factory as cf, unpack_infer, CallFunc -from astroid.bases import NodeNG, InferenceContext, Instance, copy_context, \ - YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, \ - BUILTINS -from astroid.mixins import FilterStmtsMixin -from astroid.bases import Statement -from astroid.manager import AstroidManager +import wrapt + +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import manager +from astroid import mixins +from astroid import node_classes +from astroid import decorators as decorators_mod +from astroid import util + +BUILTINS = six.moves.builtins.__name__ ITER_METHODS = ('__iter__', '__getitem__') -PY3K = sys.version_info >= (3, 0) + def _c3_merge(sequences): """Merges MROs in *sequences* to a single MRO using the C3 algorithm. @@ -75,8 +68,10 @@ def _c3_merge(sequences): bases = ["({})".format(", ".join(base.name for base in subsequence)) for subsequence in sequences] - raise ResolveError("Cannot create a consistent method resolution " - "order for bases %s" % ", ".join(bases)) + raise exceptions.InconsistentMroError( + "Cannot create a consistent method resolution " + "order for bases %s" % ", ".join(bases)) + result.append(candidate) # remove the chosen candidate for seq in sequences: @@ -88,59 +83,62 @@ def _verify_duplicates_mro(sequences): for sequence in sequences: names = [node.qname() for node in sequence] if len(names) != len(set(names)): - raise ResolveError('Duplicates found in the mro.') + raise exceptions.DuplicateBasesError('Duplicates found in the mro.') -def remove_nodes(func, cls): - def wrapper(*args, **kwargs): +def remove_nodes(cls): + @wrapt.decorator + def decorator(func, instance, args, kwargs): nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)] if not nodes: - raise NotFoundError() + raise exceptions.NotFoundError() return nodes - return wrapper + return decorator def function_to_method(n, klass): - if isinstance(n, Function): + if isinstance(n, FunctionDef): if n.type == 'classmethod': - return BoundMethod(n, klass) + return bases.BoundMethod(n, klass) if n.type != 'staticmethod': - return UnboundMethod(n) + return bases.UnboundMethod(n) return n + def std_special_attributes(self, name, add_locals=True): if add_locals: - locals = self.locals + locals = self._locals else: locals = {} if name == '__name__': - return [cf(self.name)] + locals.get(name, []) + return [node_classes.const_factory(self.name)] + locals.get(name, []) if name == '__doc__': - return [cf(self.doc)] + locals.get(name, []) + return [node_classes.const_factory(self.doc)] + locals.get(name, []) if name == '__dict__': - return [Dict()] + locals.get(name, []) - raise NotFoundError(name) + return [node_classes.Dict()] + locals.get(name, []) + raise exceptions.NotFoundError(name) -MANAGER = AstroidManager() + +MANAGER = manager.AstroidManager() def builtin_lookup(name): """lookup a name into the builtin module return the list of matching statements and the astroid for the builtin module """ - builtin_astroid = MANAGER.ast_from_module(builtins) + builtin_astroid = MANAGER.ast_from_module(six.moves.builtins) if name == '__dict__': return builtin_astroid, () try: - stmts = builtin_astroid.locals[name] + stmts = builtin_astroid._locals[name] except KeyError: stmts = () return builtin_astroid, stmts -# TODO move this Mixin to mixins.py; problem: 'Function' in _scope_lookup -class LocalsDictNodeNG(LookupMixIn, NodeNG): - """ this class provides locals handling common to Module, Function - and Class nodes, including a dict like interface for direct access +# TODO move this Mixin to mixins.py; problem: 'FunctionDef' in _scope_lookup +class LocalsDictNodeNG(node_classes.LookupMixIn, bases.NodeNG): + """ this class provides locals handling common to Module, FunctionDef + and ClassDef nodes, including a dict like interface for direct access to locals information """ @@ -148,6 +146,18 @@ class LocalsDictNodeNG(LookupMixIn, NodeNG): # dictionary of locals with name as key and node defining the local as # value + @property + def locals(self): + util.attribute_to_function_warning('locals', 2.0, 'get_locals') + return self._locals + @locals.setter + def locals(self, _locals): + util.attribute_to_function_warning('locals', 2.0, 'get_locals') + self._locals = _locals + @locals.deleter + def locals(self): + util.attribute_to_function_warning('locals', 2.0, 'get_locals') + del self._locals def qname(self): """return the 'qualified' name of the node, eg module.name, @@ -158,21 +168,20 @@ def qname(self): return '%s.%s' % (self.parent.frame().qname(), self.name) def frame(self): - """return the first parent frame node (i.e. Module, Function or Class) + """return the first parent frame node (i.e. Module, FunctionDef or ClassDef) """ return self def scope(self): """return the first node defining a new scope (i.e. Module, - Function, Class, Lambda but also GenExpr, DictComp and SetComp) + FunctionDef, ClassDef, Lambda but also GeneratorExp, DictComp and SetComp) """ return self - def _scope_lookup(self, node, name, offset=0): """XXX method for interfacing the scope lookup""" try: - stmts = node._filter_stmts(self.locals[name], self, offset) + stmts = node._filter_stmts(self._locals[name], self, offset) except KeyError: stmts = () if stmts: @@ -186,8 +195,6 @@ def _scope_lookup(self, node, name, offset=0): return pscope.scope_lookup(node, name) return builtin_lookup(name) # Module - - def set_local(self, name, stmt): """define in locals ( is the node defining the name) if the node is a Module node (i.e. has globals), add the name to @@ -195,8 +202,8 @@ def set_local(self, name, stmt): if the name is already defined, ignore it """ - #assert not stmt in self.locals.get(name, ()), (self, stmt) - self.locals.setdefault(name, []).append(stmt) + #assert not stmt in self._locals.get(name, ()), (self, stmt) + self._locals.setdefault(name, []).append(stmt) __setitem__ = set_local @@ -212,7 +219,6 @@ def add_local_node(self, child_node, name=None): self._append_node(child_node) self.set_local(name or child_node.name, child_node) - def __getitem__(self, item): """method from the `dict` interface returning the first node associated with the given name in the locals dictionary @@ -221,7 +227,7 @@ def __getitem__(self, item): :param item: the name of the locally defined object :raises KeyError: if the name is not defined """ - return self.locals[item][0] + return self._locals[item][0] def __iter__(self): """method from the `dict` interface returning an iterator on @@ -233,27 +239,24 @@ def keys(self): """method from the `dict` interface returning a tuple containing locally defined names """ - return list(self.locals.keys()) + return list(self._locals.keys()) def values(self): """method from the `dict` interface returning a tuple containing - locally defined nodes which are instance of `Function` or `Class` + locally defined nodes which are instance of `FunctionDef` or `ClassDef` """ return [self[key] for key in self.keys()] def items(self): """method from the `dict` interface returning a list of tuple containing each locally defined name with its associated node, - which is an instance of `Function` or `Class` + which is an instance of `FunctionDef` or `ClassDef` """ return list(zip(self.keys(), self.values())) - def __contains__(self, name): - return name in self.locals - has_key = __contains__ + return name in self._locals -# Module ##################################################################### class Module(LocalsDictNodeNG): _astroid_fields = ('body',) @@ -265,9 +268,9 @@ class Module(LocalsDictNodeNG): # the file from which as been extracted the astroid representation. It may # be None if the representation has been built from a built-in module - file = None + source_file = None # Alternatively, if built from a string/bytes, this can be set - file_bytes = None + source_code = None # encoding of python source file, so we can get unicode out of it (python2 # only) file_encoding = None @@ -279,10 +282,10 @@ class Module(LocalsDictNodeNG): package = None # dictionary of globals with name as key and node defining the global # as value - globals = None + _globals = None # Future imports - future_imports = None + _future_imports = None # names of python special attributes (handled by getattr impl.) special_attributes = set(('__name__', '__doc__', '__file__', '__path__', @@ -294,15 +297,81 @@ def __init__(self, name, doc, pure_python=True): self.name = name self.doc = doc self.pure_python = pure_python - self.locals = self.globals = {} + self._locals = self._globals = {} self.body = [] - self.future_imports = set() + self._future_imports = set() + + # Future deprecation warnings + @property + def file(self): + util.rename_warning('file', 2.0, 'source_file') + return self.source_file + @file.setter + def file(self, source_file): + util.rename_warning('file', 2.0, 'source_file') + self.source_file = source_file + @file.deleter + def file(self): + util.rename_warning('file', 2.0, 'source_file') + del self.source_file + + @property + def path(self): + util.rename_warning('path', 2.0, 'source_file') + return self.source_file + @path.setter + def path(self, source_file): + util.rename_warning('path', 2.0, 'source_file') + self.source_file = source_file + @path.deleter + def path(self): + util.rename_warning('path', 2.0, 'source_file') + del self.source_file + + @property + def file_bytes(self): + util.rename_warning('file_bytes', 2.0, 'source_code') + return self.source_code + @file_bytes.setter + def file_bytes(self, source_code): + util.rename_warning('file_bytes', 2.0, 'source_code') + self.source_code = source_code + @file_bytes.deleter + def file_bytes(self): + util.rename_warning('file_bytes', 2.0, 'source_code') + del self.source_code + + @property + def globals(self): + util.attribute_to_function_warning('globals', 2.0, 'get_locals') + return self._locals + @globals.setter + def globals(self, _globals): + util.attribute_to_function_warning('globals', 2.0, 'get_locals') + self._locals = _globals + @globals.deleter + def globals(self): + util.attribute_to_function_warning('globals', 2.0, 'get_locals') + del self._locals + + @property + def future_imports(self): + util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') + return self._future_imports + @future_imports.setter + def future_imports(self, _future_imports): + util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') + self._future_imports = _future_imports + @future_imports.deleter + def future_imports(self): + util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') + del self._future_imports def _get_stream(self): - if self.file_bytes is not None: - return BytesIO(self.file_bytes) - if self.file is not None: - stream = open(self.file, 'rb') + if self.source_code is not None: + return io.BytesIO(self.source_code) + if self.source_file is not None: + stream = open(self.source_file, 'rb') return stream return None @@ -337,10 +406,10 @@ def block_range(self, lineno): return self.fromlineno, self.tolineno def scope_lookup(self, node, name, offset=0): - if name in self.scope_attrs and not name in self.locals: + if name in self.scope_attrs and name not in self._locals: try: return self, self.getattr(name) - except NotFoundError: + except exceptions.NotFoundError: return self, () return self._scope_lookup(node, name, offset) @@ -350,44 +419,42 @@ def pytype(self): def display_type(self): return 'Module' + @remove_nodes(node_classes.DelName) def getattr(self, name, context=None, ignore_locals=False): if name in self.special_attributes: if name == '__file__': - return [cf(self.file)] + self.locals.get(name, []) + return [node_classes.const_factory(self.source_file)] + self._locals.get(name, []) if name == '__path__' and self.package: - return [List()] + self.locals.get(name, []) + return [node_classes.List()] + self._locals.get(name, []) return std_special_attributes(self, name) - if not ignore_locals and name in self.locals: - return self.locals[name] + if not ignore_locals and name in self._locals: + return self._locals[name] if self.package: try: return [self.import_module(name, relative_only=True)] - except AstroidBuildingException: - raise NotFoundError(name) + except exceptions.AstroidBuildingException: + raise exceptions.NotFoundError(name) except SyntaxError: - raise NotFoundError(name) - except Exception:# XXX pylint tests never pass here; do we need it? - import traceback - traceback.print_exc() - raise NotFoundError(name) - getattr = remove_nodes(getattr, DelName) + raise exceptions.NotFoundError(name) + raise exceptions.NotFoundError(name) def igetattr(self, name, context=None): """inferred getattr""" # set lookup name since this is necessary to infer on import nodes for # instance - context = copy_context(context) + context = contextmod.copy_context(context) context.lookupname = name try: - return _infer_stmts(self.getattr(name, context), context, frame=self) - except NotFoundError: - raise InferenceError(name) + return bases._infer_stmts(self.getattr(name, context), + context, frame=self) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) def fully_defined(self): """return True if this module has been built from a .py file and so contains a complete representation including the code """ - return self.file is not None and self.file.endswith('.py') + return self.source_file is not None and self.source_file.endswith('.py') def statement(self): """return the first parent node marked as statement node @@ -403,11 +470,11 @@ def next_sibling(self): """module has no sibling""" return - if sys.version_info < (2, 8): - @cachedproperty + if six.PY2: + @decorators_mod.cachedproperty def _absolute_import_activated(self): - for stmt in self.locals.get('absolute_import', ()): - if isinstance(stmt, From) and stmt.modname == '__future__': + for stmt in self._locals.get('absolute_import', ()): + if isinstance(stmt, node_classes.ImportFrom) and stmt.modname == '__future__': return True return False else: @@ -423,7 +490,7 @@ def import_module(self, modname, relative_only=False, level=None): absmodname = self.relative_to_absolute_name(modname, level) try: return MANAGER.ast_from_module_name(absmodname) - except AstroidBuildingException: + except exceptions.AstroidBuildingException: # we only want to import a sub module or package of this module, # skip here if relative_only: @@ -454,7 +521,6 @@ def relative_to_absolute_name(self, modname, level): return '%s.%s' % (package_name, modname) return modname - def wildcard_import_names(self): """return the list of imported names when this module is 'wildcard imported' @@ -462,19 +528,6 @@ def wildcard_import_names(self): It doesn't include the '__builtins__' name which is added by the current CPython implementation of wildcard imports. """ - # take advantage of a living module if it exists - try: - living = sys.modules[self.name] - except KeyError: - pass - else: - try: - return living.__all__ - except AttributeError: - return [name for name in living.__dict__.keys() - if not name.startswith('_')] - # else lookup the astroid - # # We separate the different steps of lookup in try/excepts # to avoid catching too many Exceptions default = [name for name in self.keys() if not name.startswith('_')] @@ -482,9 +535,10 @@ def wildcard_import_names(self): all = self['__all__'] except KeyError: return default + try: explicit = next(all.assigned_stmts()) - except InferenceError: + except exceptions.InferenceError: return default except AttributeError: # not an assignment node @@ -492,28 +546,34 @@ def wildcard_import_names(self): return default # Try our best to detect the exported name. - infered = [] + inferred = [] try: explicit = next(explicit.infer()) - except InferenceError: + except exceptions.InferenceError: return default - if not isinstance(explicit, (Tuple, List)): + if not isinstance(explicit, (node_classes.Tuple, node_classes.List)): return default - str_const = lambda node: (isinstance(node, Const) and + str_const = lambda node: (isinstance(node, node_classes.Const) and isinstance(node.value, six.string_types)) for node in explicit.elts: if str_const(node): - infered.append(node.value) + inferred.append(node.value) else: try: - infered_node = next(node.infer()) - except InferenceError: + inferred_node = next(node.infer()) + except exceptions.InferenceError: continue - if str_const(infered_node): - infered.append(infered_node.value) - return infered + if str_const(inferred_node): + inferred.append(inferred_node.value) + return inferred + def _public_names(self): + """Get the list of the names which are publicly available in this module.""" + return [name for name in self.keys() if not name.startswith('_')] + + def bool_value(self): + return True class ComprehensionScope(LocalsDictNodeNG): @@ -523,11 +583,11 @@ def frame(self): scope_lookup = LocalsDictNodeNG._scope_lookup -class GenExpr(ComprehensionScope): +class GeneratorExp(ComprehensionScope): _astroid_fields = ('elt', 'generators') def __init__(self): - self.locals = {} + self._locals = {} self.elt = None self.generators = [] @@ -536,7 +596,7 @@ class DictComp(ComprehensionScope): _astroid_fields = ('key', 'value', 'generators') def __init__(self): - self.locals = {} + self._locals = {} self.key = None self.value = None self.generators = [] @@ -546,97 +606,53 @@ class SetComp(ComprehensionScope): _astroid_fields = ('elt', 'generators') def __init__(self): - self.locals = {} + self._locals = {} self.elt = None self.generators = [] -class _ListComp(NodeNG): +class _ListComp(bases.NodeNG): """class representing a ListComp node""" _astroid_fields = ('elt', 'generators') elt = None generators = None -if sys.version_info >= (3, 0): + +if six.PY3: class ListComp(_ListComp, ComprehensionScope): """class representing a ListComp node""" def __init__(self): - self.locals = {} + self._locals = {} else: class ListComp(_ListComp): """class representing a ListComp node""" -# Function ################################################################### def _infer_decorator_callchain(node): """Detect decorator call chaining and see if the end result is a static or a classmethod. """ - if not isinstance(node, Function): + if not isinstance(node, FunctionDef): return if not node.parent: return try: - # TODO: We don't handle multiple inference results right now, - # because there's no flow to reason when the return - # is what we are looking for, a static or a class method. - result = next(node.infer_call_result(node.parent)) - except (StopIteration, InferenceError): - return - if isinstance(result, Instance): - result = result._proxied - if isinstance(result, Class): - if result.is_subtype_of('%s.classmethod' % BUILTINS): - return 'classmethod' - if result.is_subtype_of('%s.staticmethod' % BUILTINS): - return 'staticmethod' - - -def _function_type(self): - """ - Function type, possible values are: - method, function, staticmethod, classmethod. - """ - # Can't infer that this node is decorated - # with a subclass of `classmethod` where `type` is first set, - # so do it here. - if self.decorators: - for node in self.decorators.nodes: - if isinstance(node, CallFunc): - # Handle the following case: - # @some_decorator(arg1, arg2) - # def func(...) - # - try: - current = next(node.func.infer()) - except InferenceError: - continue - _type = _infer_decorator_callchain(current) - if _type is not None: - return _type - - try: - for infered in node.infer(): - # Check to see if this returns a static or a class method. - _type = _infer_decorator_callchain(infered) - if _type is not None: - return _type - - if not isinstance(infered, Class): - continue - for ancestor in infered.ancestors(): - if not isinstance(ancestor, Class): - continue - if ancestor.is_subtype_of('%s.classmethod' % BUILTINS): - return 'classmethod' - elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS): - return 'staticmethod' - except InferenceError: - pass - return self._type + # TODO: We don't handle multiple inference results right now, + # because there's no flow to reason when the return + # is what we are looking for, a static or a class method. + result = next(node.infer_call_result(node.parent)) + except (StopIteration, exceptions.InferenceError): + return + if isinstance(result, bases.Instance): + result = result._proxied + if isinstance(result, ClassDef): + if result.is_subtype_of('%s.classmethod' % BUILTINS): + return 'classmethod' + if result.is_subtype_of('%s.staticmethod' % BUILTINS): + return 'staticmethod' -class Lambda(LocalsDictNodeNG, FilterStmtsMixin): +class Lambda(mixins.FilterStmtsMixin, LocalsDictNodeNG): _astroid_fields = ('args', 'body',) name = '' @@ -644,7 +660,7 @@ class Lambda(LocalsDictNodeNG, FilterStmtsMixin): type = 'function' def __init__(self): - self.locals = {} + self._locals = {} self.args = [] self.body = [] @@ -689,9 +705,10 @@ def scope_lookup(self, node, name, offset=0): return frame._scope_lookup(node, name, offset) -class Function(Statement, Lambda): - if PY3K: - _astroid_fields = ('decorators', 'args', 'body', 'returns') + +class FunctionDef(bases.Statement, Lambda): + if six.PY3: + _astroid_fields = ('decorators', 'args', 'returns', 'body') returns = None else: _astroid_fields = ('decorators', 'args', 'body') @@ -699,32 +716,137 @@ class Function(Statement, Lambda): special_attributes = set(('__name__', '__doc__', '__dict__')) is_function = True # attributes below are set by the builder module or by raw factories - blockstart_tolineno = None decorators = None - _type = "function" - type = cachedproperty(_function_type) def __init__(self, name, doc): - self.locals = {} + self._locals = {} self.args = [] self.body = [] self.name = name self.doc = doc - self.extra_decorators = [] - self.instance_attrs = {} + self._instance_attrs = {} + + @property + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + return self._instance_attrs + @instance_attrs.setter + def instance_attrs(self, _instance_attrs): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + self._instance_attrs = _instance_attrs + @instance_attrs.deleter + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + del self._instance_attrs + + @decorators_mod.cachedproperty + def extra_decorators(self): + """Get the extra decorators that this function can haves + Additional decorators are considered when they are used as + assignments, as in `method = staticmethod(method)`. + The property will return all the callables that are used for + decoration. + """ + frame = self.parent.frame() + if not isinstance(frame, ClassDef): + return [] + + decorators = [] + for assign in frame.nodes_of_class(node_classes.Assign): + if (isinstance(assign.value, node_classes.Call) + and isinstance(assign.value.func, node_classes.Name)): + for assign_node in assign.targets: + if not isinstance(assign_node, node_classes.AssignName): + # Support only `name = callable(name)` + continue + + if assign_node.name != self.name: + # Interested only in the assignment nodes that + # decorates the current method. + continue + try: + meth = frame[self.name] + except KeyError: + continue + else: + # Must be a function and in the same frame as the + # original method. + if (isinstance(meth, FunctionDef) + and assign_node.frame() == frame): + decorators.append(assign.value) + return decorators + + @decorators_mod.cachedproperty + def type(self): + """Get the function type for this node. + + Possible values are: method, function, staticmethod, classmethod. + """ + builtin_descriptors = {'classmethod', 'staticmethod'} + + for decorator in self.extra_decorators: + if decorator.func.name in builtin_descriptors: + return decorator.func.name - @cachedproperty + frame = self.parent.frame() + type_name = 'function' + if isinstance(frame, ClassDef): + if self.name == '__new__': + return 'classmethod' + else: + type_name = 'method' + + if self.decorators: + for node in self.decorators.nodes: + if isinstance(node, node_classes.Name): + if node.name in builtin_descriptors: + return node.name + + if isinstance(node, node_classes.Call): + # Handle the following case: + # @some_decorator(arg1, arg2) + # def func(...) + # + try: + current = next(node.func.infer()) + except exceptions.InferenceError: + continue + _type = _infer_decorator_callchain(current) + if _type is not None: + return _type + + try: + for inferred in node.infer(): + # Check to see if this returns a static or a class method. + _type = _infer_decorator_callchain(inferred) + if _type is not None: + return _type + + if not isinstance(inferred, ClassDef): + continue + for ancestor in inferred.ancestors(): + if not isinstance(ancestor, ClassDef): + continue + if ancestor.is_subtype_of('%s.classmethod' % BUILTINS): + return 'classmethod' + elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS): + return 'staticmethod' + except exceptions.InferenceError: + pass + return type_name + + @decorators_mod.cachedproperty def fromlineno(self): # lineno is the line number of the first decorator, we want the def # statement lineno lineno = self.lineno if self.decorators is not None: lineno += sum(node.tolineno - node.lineno + 1 - for node in self.decorators.nodes) + for node in self.decorators.nodes) return lineno - @cachedproperty + @decorators_mod.cachedproperty def blockstart_tolineno(self): return self.args.tolineno @@ -740,29 +862,41 @@ def getattr(self, name, context=None): done by an Instance proxy at inference time. """ if name == '__module__': - return [cf(self.root().qname())] - if name in self.instance_attrs: - return self.instance_attrs[name] + return [node_classes.const_factory(self.root().qname())] + if name in self._instance_attrs: + return self._instance_attrs[name] return std_special_attributes(self, name, False) + def igetattr(self, name, context=None): + """Inferred getattr, which returns an iterator of inferred statements.""" + try: + return bases._infer_stmts(self.getattr(name, context), + context, frame=self) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) + def is_method(self): """return true if the function node should be considered as a method""" - # check we are defined in a Class, because this is usually expected + # check we are defined in a ClassDef, because this is usually expected # (e.g. pylint...) when is_method() return True - return self.type != 'function' and isinstance(self.parent.frame(), Class) + return self.type != 'function' and isinstance(self.parent.frame(), ClassDef) + @decorators_mod.cached def decoratornames(self): """return a list of decorator qualified names""" result = set() decoratornodes = [] if self.decorators is not None: + # pylint: disable=unsupported-binary-operation; damn flow control. decoratornodes += self.decorators.nodes decoratornodes += self.extra_decorators for decnode in decoratornodes: - for infnode in decnode.infer(): - result.add(infnode.qname()) + try: + for infnode in decnode.infer(): + result.add(infnode.qname()) + except exceptions.InferenceError: + continue return result - decoratornames = cached(decoratornames) def is_bound(self): """return true if the function is bound to an Instance or a class""" @@ -779,34 +913,34 @@ def is_abstract(self, pass_is_abstract=True): if self.decorators: for node in self.decorators.nodes: try: - infered = next(node.infer()) - except InferenceError: + inferred = next(node.infer()) + except exceptions.InferenceError: continue - if infered and infered.qname() in ('abc.abstractproperty', - 'abc.abstractmethod'): + if inferred and inferred.qname() in ('abc.abstractproperty', + 'abc.abstractmethod'): return True for child_node in self.body: - if isinstance(child_node, Raise): + if isinstance(child_node, node_classes.Raise): if child_node.raises_not_implemented(): return True - if pass_is_abstract and isinstance(child_node, Pass): - return True - return False + return pass_is_abstract and isinstance(child_node, node_classes.Pass) # empty function is the same as function with a single "pass" statement if pass_is_abstract: return True def is_generator(self): """return true if this is a generator function""" - # XXX should be flagged, not computed - return next(self.nodes_of_class((Yield, YieldFrom), - skip_klass=(Function, Lambda)), False) + yield_nodes = (node_classes.Yield, node_classes.YieldFrom) + return next(self.nodes_of_class(yield_nodes, + skip_klass=(FunctionDef, Lambda)), False) def infer_call_result(self, caller, context=None): """infer what a function is returning when called""" if self.is_generator(): - yield Generator() + result = bases.Generator() + result.parent = self + yield result return # This is really a gigantic hack to work around metaclass generators # that return transient class-generating functions. Pylint's AST structure @@ -818,25 +952,29 @@ def infer_call_result(self, caller, context=None): len(self.args.args) == 1 and self.args.vararg is not None): metaclass = next(caller.args[0].infer(context)) - if isinstance(metaclass, Class): - c = Class('temporary_class', None) + if isinstance(metaclass, ClassDef): + c = ClassDef('temporary_class', None) c.hide = True c.parent = self - bases = [next(b.infer(context)) for b in caller.args[1:]] - c.bases = [base for base in bases if base != YES] + class_bases = [next(b.infer(context)) for b in caller.args[1:]] + c.bases = [base for base in class_bases if base != util.YES] c._metaclass = metaclass yield c return - returns = self.nodes_of_class(Return, skip_klass=Function) + returns = self.nodes_of_class(node_classes.Return, skip_klass=FunctionDef) for returnnode in returns: if returnnode.value is None: - yield Const(None) + yield node_classes.Const(None) else: try: - for infered in returnnode.value.infer(context): - yield infered - except InferenceError: - yield YES + for inferred in returnnode.value.infer(context): + yield inferred + except exceptions.InferenceError: + yield util.YES + + +class AsyncFunctionDef(FunctionDef): + """Asynchronous function created with the `async` keyword.""" def _rec_get_names(args, names=None): @@ -844,16 +982,13 @@ def _rec_get_names(args, names=None): if names is None: names = [] for arg in args: - if isinstance(arg, Tuple): + if isinstance(arg, node_classes.Tuple): _rec_get_names(arg.elts, names) else: names.append(arg.name) return names -# Class ###################################################################### - - def _is_metaclass(klass, seen=None): """ Return if the given class can be used as a metaclass. @@ -865,30 +1000,31 @@ def _is_metaclass(klass, seen=None): for base in klass.bases: try: for baseobj in base.infer(): - if baseobj in seen: + baseobj_name = baseobj.qname() + if baseobj_name in seen: continue else: - seen.add(baseobj) - if isinstance(baseobj, Instance): + seen.add(baseobj_name) + if isinstance(baseobj, bases.Instance): # not abstract return False - if baseobj is YES: + if baseobj is util.YES: continue if baseobj is klass: continue - if not isinstance(baseobj, Class): + if not isinstance(baseobj, ClassDef): continue if baseobj._type == 'metaclass': return True if _is_metaclass(baseobj, seen): return True - except InferenceError: + except exceptions.InferenceError: continue return False def _class_type(klass, ancestors=None): - """return a Class node type to differ metaclass, interface and exception + """return a ClassDef node type to differ metaclass and exception from 'regular' classes """ # XXX we have to store ancestors in case we have a ancestor loop @@ -896,18 +1032,17 @@ def _class_type(klass, ancestors=None): return klass._type if _is_metaclass(klass): klass._type = 'metaclass' - elif klass.name.endswith('Interface'): - klass._type = 'interface' elif klass.name.endswith('Exception'): klass._type = 'exception' else: if ancestors is None: ancestors = set() - if klass in ancestors: + klass_name = klass.qname() + if klass_name in ancestors: # XXX we are in loop ancestors, and have found no type klass._type = 'class' return 'class' - ancestors.add(klass) + ancestors.add(klass_name) for base in klass.ancestors(recurs=False): name = _class_type(base, ancestors) if name != 'class': @@ -921,14 +1056,8 @@ def _class_type(klass, ancestors=None): klass._type = 'class' return klass._type -def _iface_hdlr(iface_node): - """a handler function used by interfaces to handle suspicious - interface nodes - """ - return True - -class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin): +class ClassDef(mixins.FilterStmtsMixin, LocalsDictNodeNG, bases.Statement): # some of the attributes below are set by the builder module or # by a raw factories @@ -939,26 +1068,38 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin): decorators = None special_attributes = set(('__name__', '__doc__', '__dict__', '__module__', '__bases__', '__mro__', '__subclasses__')) - blockstart_tolineno = None _type = None _metaclass_hack = False hide = False type = property(_class_type, doc="class'type, possible values are 'class' | " - "'metaclass' | 'interface' | 'exception'") + "'metaclass' | 'exception'") def __init__(self, name, doc): - self.instance_attrs = {} - self.locals = {} + self._instance_attrs = {} + self._locals = {} self.bases = [] self.body = [] self.name = name self.doc = doc + @property + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + return self._instance_attrs + @instance_attrs.setter + def instance_attrs(self, _instance_attrs): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + self._instance_attrs = _instance_attrs + @instance_attrs.deleter + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + del self._instance_attrs + def _newstyle_impl(self, context=None): if context is None: - context = InferenceContext() + context = contextmod.InferenceContext() if self._newstyle is not None: return self._newstyle for base in self.ancestors(recurs=False, context=context): @@ -968,7 +1109,7 @@ def _newstyle_impl(self, context=None): klass = self._explicit_metaclass() # could be any callable, we'd need to infer the result of klass(name, # bases, dict). punt if it's not a class node. - if klass is not None and isinstance(klass, Class): + if klass is not None and isinstance(klass, ClassDef): self._newstyle = klass._newstyle_impl(context) if self._newstyle is None: self._newstyle = False @@ -979,7 +1120,7 @@ def _newstyle_impl(self, context=None): doc="boolean indicating if it's a new style class" "or not") - @cachedproperty + @decorators_mod.cachedproperty def blockstart_tolineno(self): if self.bases: return self.bases[-1].tolineno @@ -1011,32 +1152,52 @@ def is_subtype_of(self, type_name, context=None): if anc.qname() == type_name: return True + def _infer_type_call(self, caller, context): + name_node = next(caller.args[0].infer(context)) + if (isinstance(name_node, node_classes.Const) and + isinstance(name_node.value, six.string_types)): + name = name_node.value + else: + return util.YES + + result = ClassDef(name, None) + + # Get the bases of the class. + class_bases = next(caller.args[1].infer(context)) + if isinstance(class_bases, (node_classes.Tuple, node_classes.List)): + result.bases = class_bases.itered() + else: + # There is currently no AST node that can represent an 'unknown' + # node (YES is not an AST node), therefore we simply return YES here + # although we know at least the name of the class. + return util.YES + + # Get the members of the class + try: + members = next(caller.args[2].infer(context)) + except exceptions.InferenceError: + members = None + + if members and isinstance(members, node_classes.Dict): + for attr, value in members.items: + if (isinstance(attr, node_classes.Const) and + isinstance(attr.value, six.string_types)): + result._locals[attr.value] = [value] + + result.parent = caller.parent + return result + def infer_call_result(self, caller, context=None): """infer what a class is returning when called""" - if self.is_subtype_of('%s.type' % (BUILTINS,), context) and len(caller.args) == 3: - name_node = next(caller.args[0].infer(context)) - if (isinstance(name_node, Const) and - isinstance(name_node.value, six.string_types)): - name = name_node.value - else: - yield YES - return - result = Class(name, None) - bases = next(caller.args[1].infer(context)) - if isinstance(bases, (Tuple, List)): - result.bases = bases.itered() - else: - # There is currently no AST node that can represent an 'unknown' - # node (YES is not an AST node), therefore we simply return YES here - # although we know at least the name of the class. - yield YES - return - result.parent = caller.parent + if (self.is_subtype_of('%s.type' % (BUILTINS,), context) + and len(caller.args) == 3): + result = self._infer_type_call(caller, context) yield result else: - yield Instance(self) + yield bases.Instance(self) def scope_lookup(self, node, name, offset=0): + # pylint: disable=redefined-variable-type if any(node == base or base.parent_of(node) for base in self.bases): # Handle the case where we have either a name @@ -1060,11 +1221,10 @@ def scope_lookup(self, node, name, offset=0): frame = self return frame._scope_lookup(node, name, offset) - # list of parent class as a list of string (i.e. names as they appear - # in the class definition) XXX bw compat + @property def basenames(self): + """Get the list of parent class names, as they appear in the class definition.""" return [bnode.as_string() for bnode in self.bases] - basenames = property(basenames) def ancestors(self, recurs=True, context=None): """return an iterator on the node base classes in a prefixed @@ -1078,8 +1238,8 @@ def ancestors(self, recurs=True, context=None): # FIXME: inference make infinite loops possible here yielded = set([self]) if context is None: - context = InferenceContext() - if sys.version_info[0] >= 3: + context = contextmod.InferenceContext() + if six.PY3: if not self.bases and self.qname() != 'builtins.object': yield builtin_lookup("object")[1][0] return @@ -1088,15 +1248,14 @@ def ancestors(self, recurs=True, context=None): with context.restore_path(): try: for baseobj in stmt.infer(context): - if not isinstance(baseobj, Class): - if isinstance(baseobj, Instance): + if not isinstance(baseobj, ClassDef): + if isinstance(baseobj, bases.Instance): baseobj = baseobj._proxied else: - # duh ? continue if not baseobj.hide: if baseobj in yielded: - continue # cf xxx above + continue yielded.add(baseobj) yield baseobj if recurs: @@ -1106,18 +1265,28 @@ def ancestors(self, recurs=True, context=None): # This class is the ancestor of itself. break if grandpa in yielded: - continue # cf xxx above + continue yielded.add(grandpa) yield grandpa - except InferenceError: - # XXX log error ? + except exceptions.InferenceError: continue def local_attr_ancestors(self, name, context=None): """return an iterator on astroid representation of parent classes which have defined in their locals """ - for astroid in self.ancestors(context=context): + if self.newstyle and all(n.newstyle for n in self.ancestors(context)): + # Look up in the mro if we can. This will result in the + # attribute being looked up just as Python does it. + try: + ancestors = self.mro(context)[1:] + except exceptions.MroError: + # Fallback to use ancestors, we can't determine + # a sane MRO. + ancestors = self.ancestors(context=context) + else: + ancestors = self.ancestors(context=context) + for astroid in ancestors: if name in astroid: yield astroid @@ -1126,12 +1295,13 @@ def instance_attr_ancestors(self, name, context=None): which have defined in their instance attribute dictionary """ for astroid in self.ancestors(context=context): - if name in astroid.instance_attrs: + if name in astroid._instance_attrs: yield astroid def has_base(self, node): return node in self.bases + @remove_nodes(node_classes.DelAttr) def local_attr(self, name, context=None): """return the list of assign node associated to name in this class locals or in its parents @@ -1141,14 +1311,13 @@ def local_attr(self, name, context=None): its parent classes """ try: - return self.locals[name] + return self._locals[name] except KeyError: - # get if from the first parent implementing it if any for class_node in self.local_attr_ancestors(name, context): - return class_node.locals[name] - raise NotFoundError(name) - local_attr = remove_nodes(local_attr, DelAttr) + return class_node._locals[name] + raise exceptions.NotFoundError(name) + @remove_nodes(node_classes.DelAttr) def instance_attr(self, name, context=None): """return the astroid nodes associated to name in this class instance attributes dictionary and in its parents @@ -1157,20 +1326,24 @@ def instance_attr(self, name, context=None): if no attribute with this name has been find in this class or its parent classes """ - # Return a copy, so we don't modify self.instance_attrs, + # Return a copy, so we don't modify self._instance_attrs, # which could lead to infinite loop. - values = list(self.instance_attrs.get(name, [])) + values = list(self._instance_attrs.get(name, [])) # get all values from parents for class_node in self.instance_attr_ancestors(name, context): - values += class_node.instance_attrs[name] + values += class_node._instance_attrs[name] if not values: - raise NotFoundError(name) + raise exceptions.NotFoundError(name) return values - instance_attr = remove_nodes(instance_attr, DelAttr) + + def instantiate_class(self): + """return Instance of ClassDef node, else return self""" + return bases.Instance(self) def instanciate_class(self): - """return Instance of Class node, else return self""" - return Instance(self) + """return Instance of ClassDef node, else return self""" + util.rename_warning('instanciate_class()', 2.0, 'instantiate_class()') + return self.instantiate_class() def getattr(self, name, context=None): """this method doesn't look in the instance_attrs dictionary since it's @@ -1179,25 +1352,27 @@ def getattr(self, name, context=None): It may return a YES object if the attribute has not been actually found but a __getattr__ or __getattribute__ method is defined """ - values = self.locals.get(name, []) + values = self._locals.get(name, []) if name in self.special_attributes: if name == '__module__': - return [cf(self.root().qname())] + values - # FIXME: do we really need the actual list of ancestors? - # returning [Tuple()] + values don't break any test - # this is ticket http://www.logilab.org/ticket/52785 - # XXX need proper meta class handling + MRO implementation - if name == '__bases__' or (name == '__mro__' and self.newstyle): - node = Tuple() - node.items = self.ancestors(recurs=True, context=context) + return [node_classes.const_factory(self.root().qname())] + values + if name == '__bases__': + node = node_classes.Tuple() + elts = list(self._inferred_bases(context)) + node.elts = elts return [node] + values + if name == '__mro__' and self.newstyle: + mro = self.mro() + node = node_classes.Tuple() + node.elts = mro + return [node] return std_special_attributes(self, name) - # don't modify the list in self.locals! + # don't modify the list in self._locals! values = list(values) for classnode in self.ancestors(recurs=True, context=context): - values += classnode.locals.get(name, []) + values += classnode._locals.get(name, []) if not values: - raise NotFoundError(name) + raise exceptions.NotFoundError(name) return values def igetattr(self, name, context=None): @@ -1206,46 +1381,50 @@ def igetattr(self, name, context=None): """ # set lookup name since this is necessary to infer on import nodes for # instance - context = copy_context(context) + context = contextmod.copy_context(context) context.lookupname = name try: - for infered in _infer_stmts(self.getattr(name, context), context, - frame=self): + for inferred in bases._infer_stmts(self.getattr(name, context), + context, frame=self): # yield YES object instead of descriptors when necessary - if not isinstance(infered, Const) and isinstance(infered, Instance): + if (not isinstance(inferred, node_classes.Const) + and isinstance(inferred, bases.Instance)): try: - infered._proxied.getattr('__get__', context) - except NotFoundError: - yield infered + inferred._proxied.getattr('__get__', context) + except exceptions.NotFoundError: + yield inferred else: - yield YES + yield util.YES else: - yield function_to_method(infered, self) - except NotFoundError: + yield function_to_method(inferred, self) + except exceptions.NotFoundError: if not name.startswith('__') and self.has_dynamic_getattr(context): # class handle some dynamic attributes, return a YES object - yield YES + yield util.YES else: - raise InferenceError(name) + raise exceptions.InferenceError(name) def has_dynamic_getattr(self, context=None): - """return True if the class has a custom __getattr__ or - __getattribute__ method """ - # need to explicitly handle optparse.Values (setattr is not detected) - if self.name == 'Values' and self.root().name == 'optparse': - return True + Check if the current instance has a custom __getattr__ + or a custom __getattribute__. + + If any such method is found and it is not from + builtins, nor from an extension module, then the function + will return True. + """ + def _valid_getattr(node): + root = node.root() + return root.name != BUILTINS and getattr(root, 'pure_python', None) + try: - self.getattr('__getattr__', context) - return True - except NotFoundError: + return _valid_getattr(self.getattr('__getattr__', context)[0]) + except exceptions.NotFoundError: #if self.newstyle: XXX cause an infinite recursion error try: getattribute = self.getattr('__getattribute__', context)[0] - if getattribute.root().name != BUILTINS: - # class has a custom __getattribute__ defined - return True - except NotFoundError: + return _valid_getattr(getattribute) + except exceptions.NotFoundError: pass return False @@ -1254,7 +1433,7 @@ def methods(self): its ancestors """ done = {} - for astroid in chain(iter((self,)), self.ancestors()): + for astroid in itertools.chain(iter((self,)), self.ancestors()): for meth in astroid.mymethods(): if meth.name in done: continue @@ -1264,31 +1443,19 @@ def methods(self): def mymethods(self): """return an iterator on all methods defined in the class""" for member in self.values(): - if isinstance(member, Function): + if isinstance(member, FunctionDef): yield member - def interfaces(self, herited=True, handler_func=_iface_hdlr): - """return an iterator on interfaces implemented by the given - class node + def implicit_metaclass(self): + """Get the implicit metaclass of the current class + + For newstyle classes, this will return an instance of builtins.type. + For oldstyle classes, it will simply return None, since there's + no implicit metaclass there. """ - # FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)... - try: - implements = Instance(self).getattr('__implements__')[0] - except NotFoundError: - return - if not herited and not implements.frame() is self: - return - found = set() - missing = False - for iface in unpack_infer(implements): - if iface is YES: - missing = True - continue - if not iface in found and handler_func(iface): - found.add(iface) - yield iface - if missing: - raise InferenceError() + + if self.newstyle: + return builtin_lookup('type')[1][0] _metaclass = None def _explicit_metaclass(self): @@ -1304,29 +1471,29 @@ def _explicit_metaclass(self): for base in self.bases: try: for baseobj in base.infer(): - if isinstance(baseobj, Class) and baseobj.hide: + if isinstance(baseobj, ClassDef) and baseobj.hide: self._metaclass = baseobj._metaclass self._metaclass_hack = True break - except InferenceError: + except exceptions.InferenceError: pass if self._metaclass: # Expects this from Py3k TreeRebuilder try: return next(node for node in self._metaclass.infer() - if node is not YES) - except (InferenceError, StopIteration): + if node is not util.YES) + except (exceptions.InferenceError, StopIteration): return None - if sys.version_info >= (3, ): + if six.PY3: return None - if '__metaclass__' in self.locals: - assignment = self.locals['__metaclass__'][-1] + if '__metaclass__' in self._locals: + assignment = self._locals['__metaclass__'][-1] elif self.bases: return None - elif '__metaclass__' in self.root().locals: - assignments = [ass for ass in self.root().locals['__metaclass__'] + elif '__metaclass__' in self.root()._locals: + assignments = [ass for ass in self.root()._locals['__metaclass__'] if ass.lineno < self.lineno] if not assignments: return None @@ -1335,34 +1502,42 @@ def _explicit_metaclass(self): return None try: - infered = next(assignment.infer()) - except InferenceError: + inferred = next(assignment.infer()) + except exceptions.InferenceError: return - if infered is YES: # don't expose this + if inferred is util.YES: # don't expose this return None - return infered + return inferred + + def _find_metaclass(self, seen=None): + if seen is None: + seen = set() + seen.add(self) + + klass = self._explicit_metaclass() + if klass is None: + for parent in self.ancestors(): + if parent not in seen: + klass = parent._find_metaclass(seen) + if klass is not None: + break + return klass def metaclass(self): - """ Return the metaclass of this class. + """Return the metaclass of this class. If this class does not define explicitly a metaclass, then the first defined metaclass in ancestors will be used instead. """ - klass = self._explicit_metaclass() - if klass is None: - for parent in self.ancestors(): - klass = parent.metaclass() - if klass is not None: - break - return klass + return self._find_metaclass() def has_metaclass_hack(self): return self._metaclass_hack def _islots(self): """ Return an iterator with the inferred slots. """ - if '__slots__' not in self.locals: + if '__slots__' not in self._locals: return for slots in self.igetattr('__slots__'): # check if __slots__ is a valid type @@ -1370,12 +1545,12 @@ def _islots(self): try: slots.getattr(meth) break - except NotFoundError: + except exceptions.NotFoundError: continue else: continue - if isinstance(slots, Const): + if isinstance(slots, node_classes.Const): # a string. Ignore the following checks, # but yield the node, only if it has a value if slots.value: @@ -1385,30 +1560,50 @@ def _islots(self): # we can't obtain the values, maybe a .deque? continue - if isinstance(slots, Dict): + if isinstance(slots, node_classes.Dict): values = [item[0] for item in slots.items] else: values = slots.itered() - if values is YES: + if values is util.YES: continue + if not values: + # Stop the iteration, because the class + # has an empty list of slots. + raise StopIteration(values) for elt in values: try: - for infered in elt.infer(): - if infered is YES: + for inferred in elt.infer(): + if inferred is util.YES: continue - if (not isinstance(infered, Const) or - not isinstance(infered.value, + if (not isinstance(inferred, node_classes.Const) or + not isinstance(inferred.value, six.string_types)): continue - if not infered.value: + if not inferred.value: continue - yield infered - except InferenceError: + yield inferred + except exceptions.InferenceError: continue + def _slots(self): + if not self.newstyle: + raise NotImplementedError( + "The concept of slots is undefined for old-style classes.") + + slots = self._islots() + try: + first = next(slots) + except StopIteration as exc: + # The class doesn't have a __slots__ definition or empty slots. + if exc.args and exc.args[0] not in ('', None): + return exc.args[0] + return None + # pylint: disable=unsupported-binary-operation; false positive + return [first] + list(slots) + # Cached, because inferring them all the time is expensive - @cached + @decorators_mod.cached def slots(self): """Get all the slots for this node. @@ -1417,19 +1612,30 @@ def slots(self): Also, it will return None in the case the slots weren't inferred. Otherwise, it will return a list of slot names. """ + def grouped_slots(): + # Not interested in object, since it can't have slots. + for cls in self.mro()[:-1]: + try: + cls_slots = cls._slots() + except NotImplementedError: + continue + if cls_slots is not None: + for slot in cls_slots: + yield slot + else: + yield None + if not self.newstyle: raise NotImplementedError( "The concept of slots is undefined for old-style classes.") - slots = self._islots() - try: - first = next(slots) - except StopIteration: - # The class doesn't have a __slots__ definition. + slots = list(grouped_slots()) + if not all(slot is not None for slot in slots): return None - return [first] + list(slots) - def _inferred_bases(self, recurs=True, context=None): + return sorted(slots, key=lambda item: item.value) + + def _inferred_bases(self, context=None): # TODO(cpopa): really similar with .ancestors, # but the difference is when one base is inferred, # only the first object is wanted. That's because @@ -1445,8 +1651,8 @@ def _inferred_bases(self, recurs=True, context=None): # only in SomeClass. if context is None: - context = InferenceContext() - if sys.version_info[0] >= 3: + context = contextmod.InferenceContext() + if six.PY3: if not self.bases and self.qname() != 'builtins.object': yield builtin_lookup("object")[1][0] return @@ -1454,15 +1660,17 @@ def _inferred_bases(self, recurs=True, context=None): for stmt in self.bases: try: baseobj = next(stmt.infer(context=context)) - except InferenceError: - # XXX log error ? + except exceptions.InferenceError: continue - if isinstance(baseobj, Instance): + if isinstance(baseobj, bases.Instance): baseobj = baseobj._proxied - if not isinstance(baseobj, Class): + if not isinstance(baseobj, ClassDef): continue if not baseobj.hide: yield baseobj + else: + for base in baseobj.bases: + yield base def mro(self, context=None): """Get the method resolution order, using C3 linearization. @@ -1476,9 +1684,33 @@ def mro(self, context=None): "Could not obtain mro for old-style classes.") bases = list(self._inferred_bases(context=context)) - unmerged_mro = ([[self]] + - [base.mro() for base in bases if base is not self] + - [bases]) - + bases_mro = [] + for base in bases: + try: + mro = base.mro(context=context) + bases_mro.append(mro) + except NotImplementedError: + # Some classes have in their ancestors both newstyle and + # old style classes. For these we can't retrieve the .mro, + # although in Python it's possible, since the class we are + # currently working is in fact new style. + # So, we fallback to ancestors here. + ancestors = list(base.ancestors(context=context)) + bases_mro.append(ancestors) + + unmerged_mro = ([[self]] + bases_mro + [bases]) _verify_duplicates_mro(unmerged_mro) return _c3_merge(unmerged_mro) + +def get_locals(node): + '''Stub function for forwards compatibility.''' + return node._locals + +def get_attributes(node): + '''Stub function for forwards compatibility.''' + return node._instance_attrs + +# Backwards-compatibility aliases +Class = node_classes.proxy_alias('Class', ClassDef) +Function = node_classes.proxy_alias('Function', FunctionDef) +GenExpr = node_classes.proxy_alias('GenExpr', GeneratorExp) diff --git a/pymode/libs/astroid/test_utils.py b/pymode/libs/astroid/test_utils.py index 19bd7b96..9e45abcf 100644 --- a/pymode/libs/astroid/test_utils.py +++ b/pymode/libs/astroid/test_utils.py @@ -1,7 +1,6 @@ """Utility functions for test code that uses astroid ASTs as input.""" import functools import sys -import textwrap from astroid import nodes from astroid import builder @@ -14,7 +13,6 @@ # when calling extract_node. _STATEMENT_SELECTOR = '#@' - def _extract_expressions(node): """Find expressions in a call to _TRANSIENT_FUNCTION and extract them. @@ -28,7 +26,7 @@ def _extract_expressions(node): :yields: The sequence of wrapped expressions on the modified tree expression can be found. """ - if (isinstance(node, nodes.CallFunc) + if (isinstance(node, nodes.Call) and isinstance(node.func, nodes.Name) and node.func.name == _TRANSIENT_FUNCTION): real_expr = node.args[0] @@ -68,7 +66,7 @@ def _find_statement_by_line(node, line): can be found. :rtype: astroid.bases.NodeNG or None """ - if isinstance(node, (nodes.Class, nodes.Function)): + if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)): # This is an inaccuracy in the AST: the nodes that can be # decorated do not carry explicit information on which line # the actual definition (class/def), but .fromline seems to @@ -142,7 +140,7 @@ def extract_node(code, module_name=''): :rtype: astroid.bases.NodeNG, or a list of nodes. """ def _extract(node): - if isinstance(node, nodes.Discard): + if isinstance(node, nodes.Expr): return node.value else: return node @@ -152,7 +150,7 @@ def _extract(node): if line.strip().endswith(_STATEMENT_SELECTOR): requested_lines.append(idx + 1) - tree = build_module(code, module_name=module_name) + tree = builder.parse(code, module_name=module_name) extracted = [] if requested_lines: for line in requested_lines: @@ -171,21 +169,6 @@ def _extract(node): return extracted -def build_module(code, module_name='', path=None): - """Parses a string module with a builder. - :param code: The code for the module. - :type code: str - :param module_name: The name for the module - :type module_name: str - :param path: The path for the module - :type module_name: str - :returns: The module AST. - :rtype: astroid.bases.NodeNG - """ - code = textwrap.dedent(code) - return builder.AstroidBuilder(None).string_build(code, modname=module_name, path=path) - - def require_version(minver=None, maxver=None): """ Compare version of python interpreter to the given one. Skip the test if older. diff --git a/pymode/libs/pkg_resources/_vendor/__init__.py b/pymode/libs/astroid/tests/__init__.py similarity index 100% rename from pymode/libs/pkg_resources/_vendor/__init__.py rename to pymode/libs/astroid/tests/__init__.py diff --git a/pymode/libs/astroid/tests/resources.py b/pymode/libs/astroid/tests/resources.py new file mode 100644 index 00000000..7988d053 --- /dev/null +++ b/pymode/libs/astroid/tests/resources.py @@ -0,0 +1,72 @@ +# Copyright 2014 Google, Inc. All rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import os +import sys + +import pkg_resources + +from astroid import builder +from astroid import MANAGER +from astroid.bases import BUILTINS + + +DATA_DIR = 'testdata/python{}/'.format(sys.version_info[0]) + +def find(name): + return pkg_resources.resource_filename( + 'astroid.tests', os.path.normpath(os.path.join(DATA_DIR, name))) + + +def build_file(path, modname=None): + return builder.AstroidBuilder().file_build(find(path), modname) + + +class SysPathSetup(object): + def setUp(self): + sys.path.insert(0, find('')) + + def tearDown(self): + del sys.path[0] + datadir = find('') + for key in list(sys.path_importer_cache): + if key.startswith(datadir): + del sys.path_importer_cache[key] + + +class AstroidCacheSetupMixin(object): + """Mixin for handling the astroid cache problems. + + When clearing the astroid cache, some tests fails due to + cache inconsistencies, where some objects had a different + builtins object referenced. + This saves the builtins module and makes sure to add it + back to the astroid_cache after the tests finishes. + The builtins module is special, since some of the + transforms for a couple of its objects (str, bytes etc) + are executed only once, so astroid_bootstrapping will be + useless for retrieving the original builtins module. + """ + + @classmethod + def setUpClass(cls): + cls._builtins = MANAGER.astroid_cache.get(BUILTINS) + + @classmethod + def tearDownClass(cls): + if cls._builtins: + MANAGER.astroid_cache[BUILTINS] = cls._builtins diff --git a/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg b/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg new file mode 100644 index 0000000000000000000000000000000000000000..f62599c7b10469b9eadabb31e9a42128a769c7cd GIT binary patch literal 1222 zcmWIWW@Zs#U|`^2V0WyvGL3P1+yvw;0%Bnx&aEt{EJ)OkkI&4@EQycTE2vDq{ik!f z_N!x_=Tq1;5?GSh74)7l32E~11UgKbs2I`Asd9`*h35;;UlWH_UOO6HYHA|00t?gX z;%fG=d001TTw@jTTsqTI^OvXQ%%iGRmNP4titd`3CYJVV<;$19c1~rT%G&wsg1;w&+sQ$d~(!s_JietmCU zt#fD2clU>H2n{g5U>x$C3CR?Y$GxZZOpXTX?t_}->h7-V>F4IJAM76*_t<%}h{;@`zS&LcdYtZG(rN*BxefrA0=WeO(-#dQ{Rhs@f zH_wS}{_3UWW$+{@h&$+WP|)W|+K-EkK5y#YsHJsMzvH~8uJ>8Sljx37u41p@1UiHr zh(TV1JEkPRAU-FxEHww@oYQM{R_J&&P!l5%%OYz|Ni9gtOG(X3u8hyg z%*!qYneiB(Zb4+-Rhb34#ffRD7&;CGJDSu1Rc;4j6deKHkRbf*tLy3GspENt7ZMAb zgAA@1KltQ*#&>JbhqXK_cs!mootAjf_@v3ZxLCMbYpqDoC(%!zyp4=LU)pK&sW`Zl zTj+A*ET_MF{{A`qcZZC(x6!BW3qNg1;w&+sQ$d~(!s_JietmCU zt#fD2clU>H2n{g5U>x$C3CR?Y$GxZZOpXTX?t_}->h7-V>F4IJAM76*_t<%}h{;@`zS&LcdYtZG(rN*BxefrA0=WeO(-#dQ{Rhs@f zH_wS}{_3UWW$+{@h&$+WP|)W|+K-EkK5y#YsHJsMzvH~8uJ>8Sljx37u41p@1UiHr zh(TV1JEkPRAU-FxEHww@oYQM{R_J&&P!l5%%OYz|Ni9gtOG(X3u8hyg z%*!qYneiB(Zb4+-Rhb34#ffRD7&;CGJDSu1Rc;4j6deKHkRbf*tLy3GspENt7ZMAb zgAA@1KltQ*#&>JbhqXK_cs!mootAjf_@v3ZxLCMbYpqDoC(%!zyp4=LU)pK&sW`Zl zTj+A*ET_MF{{A`qcZZC(x6!BW3qN> (2) +c = ~b +c = not b +d = [c] +e = d[:] +e = d[a:b:c] +raise_string(*args, **kwargs) +print >> stream, 'bonjour' +print >> stream, 'salut', + +def make_class(any, base=data.module.YO, *args, **kwargs): + """check base is correctly resolved to Concrete0""" + + + class Aaaa(base): + """dynamic class""" + + + return Aaaa +from os.path import abspath +import os as myos + + +class A: + pass + + + +class A(A): + pass + + +def generator(): + """A generator.""" + yield + +def not_a_generator(): + """A function that contains generator, but is not one.""" + + def generator(): + yield + genl = lambda : (yield) + +def with_metaclass(meta, *bases): + return meta('NewBase', bases, {}) + + +class NotMetaclass(with_metaclass(Metaclass)): + pass + + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/noendingnewline.py b/pymode/libs/astroid/tests/testdata/python2/data/noendingnewline.py new file mode 100644 index 00000000..e1d6e4a1 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/noendingnewline.py @@ -0,0 +1,36 @@ +import unittest + + +class TestCase(unittest.TestCase): + + def setUp(self): + unittest.TestCase.setUp(self) + + + def tearDown(self): + unittest.TestCase.tearDown(self) + + def testIt(self): + self.a = 10 + self.xxx() + + + def xxx(self): + if False: + pass + print 'a' + + if False: + pass + pass + + if False: + pass + print 'rara' + + +if __name__ == '__main__': + print 'test2' + unittest.main() + + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/nonregr.py b/pymode/libs/astroid/tests/testdata/python2/data/nonregr.py new file mode 100644 index 00000000..813469fe --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/nonregr.py @@ -0,0 +1,57 @@ +from __future__ import generators, print_function + +try: + enumerate = enumerate +except NameError: + + def enumerate(iterable): + """emulates the python2.3 enumerate() function""" + i = 0 + for val in iterable: + yield i, val + i += 1 + +def toto(value): + for k, v in value: + print(v.get('yo')) + + +import imp +fp, mpath, desc = imp.find_module('optparse',a) +s_opt = imp.load_module('std_optparse', fp, mpath, desc) + +class OptionParser(s_opt.OptionParser): + + def parse_args(self, args=None, values=None, real_optparse=False): + if real_optparse: + pass +## return super(OptionParser, self).parse_args() + else: + import optcomp + optcomp.completion(self) + + +class Aaa(object): + """docstring""" + def __init__(self): + self.__setattr__('a','b') + pass + + def one_public(self): + """docstring""" + pass + + def another_public(self): + """docstring""" + pass + +class Ccc(Aaa): + """docstring""" + + class Ddd(Aaa): + """docstring""" + pass + + class Eee(Ddd): + """docstring""" + pass diff --git a/pymode/libs/astroid/tests/testdata/python2/data/notall.py b/pymode/libs/astroid/tests/testdata/python2/data/notall.py new file mode 100644 index 00000000..7be27b18 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/notall.py @@ -0,0 +1,7 @@ +name = 'a' +_bla = 2 +other = 'o' +class Aaa: pass + +def func(): print('yo') + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/package/__init__.py new file mode 100644 index 00000000..575d18b1 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/__init__.py @@ -0,0 +1,4 @@ +"""package's __init__ file""" + + +from . import subpackage diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/absimport.py b/pymode/libs/astroid/tests/testdata/python2/data/package/absimport.py new file mode 100644 index 00000000..33ed117c --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/absimport.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import, print_function +import import_package_subpackage_module # fail +print(import_package_subpackage_module) + +from . import hello as hola + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/hello.py b/pymode/libs/astroid/tests/testdata/python2/data/package/hello.py new file mode 100644 index 00000000..b154c844 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/hello.py @@ -0,0 +1,2 @@ +"""hello module""" + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py b/pymode/libs/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py new file mode 100644 index 00000000..ad442c16 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py @@ -0,0 +1,49 @@ +# pylint: disable-msg=I0011,C0301,W0611 +"""I found some of my scripts trigger off an AttributeError in pylint +0.8.1 (with common 0.12.0 and astroid 0.13.1). + +Traceback (most recent call last): + File "/usr/bin/pylint", line 4, in ? + lint.Run(sys.argv[1:]) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 729, in __init__ + linter.check(args) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 412, in check + self.check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 426, in check_file + astroid = self._check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 450, in _check_file + self.check_astroid_module(astroid, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 494, in check_astroid_module + self.astroid_events(astroid, [checker for checker in checkers + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 508, in astroid_events + checker.visit(astroid) + File "/usr/lib/python2.4/site-packages/logilab/astroid/utils.py", line 84, in visit + method(node) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 295, in visit_import + self._check_module_attrs(node, module, name_parts[1:]) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 357, in _check_module_attrs + self.add_message('E0611', args=(name, module.name), +AttributeError: Import instance has no attribute 'name' + + +You can reproduce it by: +(1) create package structure like the following: + +package/ + __init__.py + subpackage/ + __init__.py + module.py + +(2) in package/__init__.py write: + +import subpackage + +(3) run pylint with a script importing package.subpackage.module. +""" +__revision__ = '$Id: import_package_subpackage_module.py,v 1.1 2005-11-10 15:59:32 syt Exp $' +import package.subpackage.module diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/__init__.py new file mode 100644 index 00000000..dc4782e6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/__init__.py @@ -0,0 +1 @@ +"""package.subpackage""" diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/module.py b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/module.py new file mode 100644 index 00000000..4b7244ba --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/module.py @@ -0,0 +1 @@ +"""package.subpackage.module""" diff --git a/pymode/libs/astroid/tests/testdata/python2/data/recursion.py b/pymode/libs/astroid/tests/testdata/python2/data/recursion.py new file mode 100644 index 00000000..85f65134 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/recursion.py @@ -0,0 +1,3 @@ +""" For issue #25 """ +class Base(object): + pass \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python2/data/suppliermodule_test.py b/pymode/libs/astroid/tests/testdata/python2/data/suppliermodule_test.py new file mode 100644 index 00000000..ddacb477 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/suppliermodule_test.py @@ -0,0 +1,13 @@ +""" file suppliermodule.py """ + +class NotImplemented(Exception): + pass + +class Interface: + def get_value(self): + raise NotImplemented() + + def set_value(self, value): + raise NotImplemented() + +class DoNothing : pass diff --git a/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/__init__.py new file mode 100644 index 00000000..713e5591 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/__init__.py @@ -0,0 +1 @@ +x = "șțîâ" \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/core/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg b/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg new file mode 100644 index 0000000000000000000000000000000000000000..f62599c7b10469b9eadabb31e9a42128a769c7cd GIT binary patch literal 1222 zcmWIWW@Zs#U|`^2V0WyvGL3P1+yvw;0%Bnx&aEt{EJ)OkkI&4@EQycTE2vDq{ik!f z_N!x_=Tq1;5?GSh74)7l32E~11UgKbs2I`Asd9`*h35;;UlWH_UOO6HYHA|00t?gX z;%fG=d001TTw@jTTsqTI^OvXQ%%iGRmNP4titd`3CYJVV<;$19c1~rT%G&wsg1;w&+sQ$d~(!s_JietmCU zt#fD2clU>H2n{g5U>x$C3CR?Y$GxZZOpXTX?t_}->h7-V>F4IJAM76*_t<%}h{;@`zS&LcdYtZG(rN*BxefrA0=WeO(-#dQ{Rhs@f zH_wS}{_3UWW$+{@h&$+WP|)W|+K-EkK5y#YsHJsMzvH~8uJ>8Sljx37u41p@1UiHr zh(TV1JEkPRAU-FxEHww@oYQM{R_J&&P!l5%%OYz|Ni9gtOG(X3u8hyg z%*!qYneiB(Zb4+-Rhb34#ffRD7&;CGJDSu1Rc;4j6deKHkRbf*tLy3GspENt7ZMAb zgAA@1KltQ*#&>JbhqXK_cs!mootAjf_@v3ZxLCMbYpqDoC(%!zyp4=LU)pK&sW`Zl zTj+A*ET_MF{{A`qcZZC(x6!BW3qNg1;w&+sQ$d~(!s_JietmCU zt#fD2clU>H2n{g5U>x$C3CR?Y$GxZZOpXTX?t_}->h7-V>F4IJAM76*_t<%}h{;@`zS&LcdYtZG(rN*BxefrA0=WeO(-#dQ{Rhs@f zH_wS}{_3UWW$+{@h&$+WP|)W|+K-EkK5y#YsHJsMzvH~8uJ>8Sljx37u41p@1UiHr zh(TV1JEkPRAU-FxEHww@oYQM{R_J&&P!l5%%OYz|Ni9gtOG(X3u8hyg z%*!qYneiB(Zb4+-Rhb34#ffRD7&;CGJDSu1Rc;4j6deKHkRbf*tLy3GspENt7ZMAb zgAA@1KltQ*#&>JbhqXK_cs!mootAjf_@v3ZxLCMbYpqDoC(%!zyp4=LU)pK&sW`Zl zTj+A*ET_MF{{A`qcZZC(x6!BW3qN> (2) +c = ~b +c = not b +d = [c] +e = d[:] +e = d[a:b:c] +raise_string(*args, **kwargs) +print('bonjour', file=stream) +print('salut', end=' ', file=stream) + +def make_class(any, base=data.module.YO, *args, **kwargs): + """check base is correctly resolved to Concrete0""" + + + class Aaaa(base): + """dynamic class""" + + + return Aaaa +from os.path import abspath +import os as myos + + +class A: + pass + + + +class A(A): + pass + + +def generator(): + """A generator.""" + yield + +def not_a_generator(): + """A function that contains generator, but is not one.""" + + def generator(): + yield + genl = lambda : (yield) + +def with_metaclass(meta, *bases): + return meta('NewBase', bases, {}) + + +class NotMetaclass(with_metaclass(Metaclass)): + pass + + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/noendingnewline.py b/pymode/libs/astroid/tests/testdata/python3/data/noendingnewline.py new file mode 100644 index 00000000..e17b92cc --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/noendingnewline.py @@ -0,0 +1,36 @@ +import unittest + + +class TestCase(unittest.TestCase): + + def setUp(self): + unittest.TestCase.setUp(self) + + + def tearDown(self): + unittest.TestCase.tearDown(self) + + def testIt(self): + self.a = 10 + self.xxx() + + + def xxx(self): + if False: + pass + print('a') + + if False: + pass + pass + + if False: + pass + print('rara') + + +if __name__ == '__main__': + print('test2') + unittest.main() + + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/nonregr.py b/pymode/libs/astroid/tests/testdata/python3/data/nonregr.py new file mode 100644 index 00000000..78765c85 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/nonregr.py @@ -0,0 +1,57 @@ + + +try: + enumerate = enumerate +except NameError: + + def enumerate(iterable): + """emulates the python2.3 enumerate() function""" + i = 0 + for val in iterable: + yield i, val + i += 1 + +def toto(value): + for k, v in value: + print(v.get('yo')) + + +import imp +fp, mpath, desc = imp.find_module('optparse',a) +s_opt = imp.load_module('std_optparse', fp, mpath, desc) + +class OptionParser(s_opt.OptionParser): + + def parse_args(self, args=None, values=None, real_optparse=False): + if real_optparse: + pass +## return super(OptionParser, self).parse_args() + else: + import optcomp + optcomp.completion(self) + + +class Aaa(object): + """docstring""" + def __init__(self): + self.__setattr__('a','b') + pass + + def one_public(self): + """docstring""" + pass + + def another_public(self): + """docstring""" + pass + +class Ccc(Aaa): + """docstring""" + + class Ddd(Aaa): + """docstring""" + pass + + class Eee(Ddd): + """docstring""" + pass diff --git a/pymode/libs/astroid/tests/testdata/python3/data/notall.py b/pymode/libs/astroid/tests/testdata/python3/data/notall.py new file mode 100644 index 00000000..9d35aa3a --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/notall.py @@ -0,0 +1,8 @@ + +name = 'a' +_bla = 2 +other = 'o' +class Aaa: pass + +def func(): print('yo') + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/package/__init__.py new file mode 100644 index 00000000..575d18b1 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/__init__.py @@ -0,0 +1,4 @@ +"""package's __init__ file""" + + +from . import subpackage diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/absimport.py b/pymode/libs/astroid/tests/testdata/python3/data/package/absimport.py new file mode 100644 index 00000000..33ed117c --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/absimport.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import, print_function +import import_package_subpackage_module # fail +print(import_package_subpackage_module) + +from . import hello as hola + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/hello.py b/pymode/libs/astroid/tests/testdata/python3/data/package/hello.py new file mode 100644 index 00000000..b154c844 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/hello.py @@ -0,0 +1,2 @@ +"""hello module""" + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py b/pymode/libs/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py new file mode 100644 index 00000000..ad442c16 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py @@ -0,0 +1,49 @@ +# pylint: disable-msg=I0011,C0301,W0611 +"""I found some of my scripts trigger off an AttributeError in pylint +0.8.1 (with common 0.12.0 and astroid 0.13.1). + +Traceback (most recent call last): + File "/usr/bin/pylint", line 4, in ? + lint.Run(sys.argv[1:]) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 729, in __init__ + linter.check(args) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 412, in check + self.check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 426, in check_file + astroid = self._check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 450, in _check_file + self.check_astroid_module(astroid, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 494, in check_astroid_module + self.astroid_events(astroid, [checker for checker in checkers + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 508, in astroid_events + checker.visit(astroid) + File "/usr/lib/python2.4/site-packages/logilab/astroid/utils.py", line 84, in visit + method(node) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 295, in visit_import + self._check_module_attrs(node, module, name_parts[1:]) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 357, in _check_module_attrs + self.add_message('E0611', args=(name, module.name), +AttributeError: Import instance has no attribute 'name' + + +You can reproduce it by: +(1) create package structure like the following: + +package/ + __init__.py + subpackage/ + __init__.py + module.py + +(2) in package/__init__.py write: + +import subpackage + +(3) run pylint with a script importing package.subpackage.module. +""" +__revision__ = '$Id: import_package_subpackage_module.py,v 1.1 2005-11-10 15:59:32 syt Exp $' +import package.subpackage.module diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/__init__.py new file mode 100644 index 00000000..dc4782e6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/__init__.py @@ -0,0 +1 @@ +"""package.subpackage""" diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/module.py b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/module.py new file mode 100644 index 00000000..4b7244ba --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/module.py @@ -0,0 +1 @@ +"""package.subpackage.module""" diff --git a/pymode/libs/astroid/tests/testdata/python3/data/recursion.py b/pymode/libs/astroid/tests/testdata/python3/data/recursion.py new file mode 100644 index 00000000..85f65134 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/recursion.py @@ -0,0 +1,3 @@ +""" For issue #25 """ +class Base(object): + pass \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python3/data/suppliermodule_test.py b/pymode/libs/astroid/tests/testdata/python3/data/suppliermodule_test.py new file mode 100644 index 00000000..ddacb477 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/suppliermodule_test.py @@ -0,0 +1,13 @@ +""" file suppliermodule.py """ + +class NotImplemented(Exception): + pass + +class Interface: + def get_value(self): + raise NotImplemented() + + def set_value(self, value): + raise NotImplemented() + +class DoNothing : pass diff --git a/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/__init__.py new file mode 100644 index 00000000..713e5591 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/__init__.py @@ -0,0 +1 @@ +x = "șțîâ" \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/core/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/unittest_brain.py b/pymode/libs/astroid/tests/unittest_brain.py new file mode 100644 index 00000000..9dbbe1d0 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_brain.py @@ -0,0 +1,506 @@ +# Copyright 2013 Google Inc. All Rights Reserved. +# +# This file is part of astroid. +# +# logilab-astng is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# logilab-astng is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-astng. If not, see . +"""Tests for basic functionality in astroid.brain.""" +import sys +import unittest + +import six + +from astroid import MANAGER +from astroid import bases +from astroid import builder +from astroid import nodes +from astroid import test_utils +from astroid import util +import astroid + + +try: + import nose # pylint: disable=unused-import + HAS_NOSE = True +except ImportError: + HAS_NOSE = False + +try: + import multiprocessing # pylint: disable=unused-import + HAS_MULTIPROCESSING = True +except ImportError: + HAS_MULTIPROCESSING = False + +try: + import enum # pylint: disable=unused-import + HAS_ENUM = True +except ImportError: + HAS_ENUM = False + +try: + import dateutil # pylint: disable=unused-import + HAS_DATEUTIL = True +except ImportError: + HAS_DATEUTIL = False + +try: + import numpy # pylint: disable=unused-import + HAS_NUMPY = True +except ImportError: + HAS_NUMPY = False + +try: + import pytest # pylint: disable=unused-import + HAS_PYTEST = True +except ImportError: + HAS_PYTEST = False + + +class HashlibTest(unittest.TestCase): + def test_hashlib(self): + """Tests that brain extensions for hashlib work.""" + hashlib_module = MANAGER.ast_from_module_name('hashlib') + for class_name in ['md5', 'sha1']: + class_obj = hashlib_module[class_name] + self.assertIn('update', class_obj) + self.assertIn('digest', class_obj) + self.assertIn('hexdigest', class_obj) + self.assertIn('block_size', class_obj) + self.assertIn('digest_size', class_obj) + self.assertEqual(len(class_obj['__init__'].args.args), 2) + self.assertEqual(len(class_obj['__init__'].args.defaults), 1) + self.assertEqual(len(class_obj['update'].args.args), 2) + self.assertEqual(len(class_obj['digest'].args.args), 1) + self.assertEqual(len(class_obj['hexdigest'].args.args), 1) + + +class NamedTupleTest(unittest.TestCase): + + def test_namedtuple_base(self): + klass = test_utils.extract_node(""" + from collections import namedtuple + + class X(namedtuple("X", ["a", "b", "c"])): + pass + """) + self.assertEqual( + [anc.name for anc in klass.ancestors()], + ['X', 'tuple', 'object']) + for anc in klass.ancestors(): + self.assertFalse(anc.parent is None) + + def test_namedtuple_inference(self): + klass = test_utils.extract_node(""" + from collections import namedtuple + + name = "X" + fields = ["a", "b", "c"] + class X(namedtuple(name, fields)): + pass + """) + for base in klass.ancestors(): + if base.name == 'X': + break + self.assertSetEqual({"a", "b", "c"}, set(base._instance_attrs)) + + def test_namedtuple_inference_failure(self): + klass = test_utils.extract_node(""" + from collections import namedtuple + + def foo(fields): + return __(namedtuple("foo", fields)) + """) + self.assertIs(util.YES, next(klass.infer())) + + @unittest.skipIf(sys.version_info[0] > 2, + 'namedtuple inference is broken on Python 3') + def test_namedtuple_advanced_inference(self): + # urlparse return an object of class ParseResult, which has a + # namedtuple call and a mixin as base classes + result = test_utils.extract_node(""" + import urlparse + + result = __(urlparse.urlparse('gopher://')) + """) + instance = next(result.infer()) + self.assertEqual(len(instance.getattr('scheme')), 1) + self.assertEqual(len(instance.getattr('port')), 1) + with self.assertRaises(astroid.NotFoundError): + instance.getattr('foo') + self.assertEqual(len(instance.getattr('geturl')), 1) + self.assertEqual(instance.name, 'ParseResult') + + def test_namedtuple_instance_attrs(self): + result = test_utils.extract_node(''' + from collections import namedtuple + namedtuple('a', 'a b c')(1, 2, 3) #@ + ''') + inferred = next(result.infer()) + for name, attr in inferred._instance_attrs.items(): + self.assertEqual(attr[0].attrname, name) + + def test_namedtuple_uninferable_fields(self): + node = test_utils.extract_node(''' + x = [A] * 2 + from collections import namedtuple + l = namedtuple('a', x) + l(1) + ''') + inferred = next(node.infer()) + self.assertIs(util.YES, inferred) + + +class ModuleExtenderTest(unittest.TestCase): + def testExtensionModules(self): + transformer = MANAGER._transform + for extender, _ in transformer.transforms[nodes.Module]: + n = nodes.Module('__main__', None) + extender(n) + + +@unittest.skipUnless(HAS_NOSE, "This test requires nose library.") +class NoseBrainTest(unittest.TestCase): + + def test_nose_tools(self): + methods = test_utils.extract_node(""" + from nose.tools import assert_equal + from nose.tools import assert_equals + from nose.tools import assert_true + assert_equal = assert_equal #@ + assert_true = assert_true #@ + assert_equals = assert_equals #@ + """) + assert_equal = next(methods[0].value.infer()) + assert_true = next(methods[1].value.infer()) + assert_equals = next(methods[2].value.infer()) + + self.assertIsInstance(assert_equal, astroid.BoundMethod) + self.assertIsInstance(assert_true, astroid.BoundMethod) + self.assertIsInstance(assert_equals, astroid.BoundMethod) + self.assertEqual(assert_equal.qname(), + 'unittest.case.TestCase.assertEqual') + self.assertEqual(assert_true.qname(), + 'unittest.case.TestCase.assertTrue') + self.assertEqual(assert_equals.qname(), + 'unittest.case.TestCase.assertEqual') + + +class SixBrainTest(unittest.TestCase): + + def test_attribute_access(self): + ast_nodes = test_utils.extract_node(''' + import six + six.moves.http_client #@ + six.moves.urllib_parse #@ + six.moves.urllib_error #@ + six.moves.urllib.request #@ + ''') + http_client = next(ast_nodes[0].infer()) + self.assertIsInstance(http_client, nodes.Module) + self.assertEqual(http_client.name, + 'http.client' if six.PY3 else 'httplib') + + urllib_parse = next(ast_nodes[1].infer()) + if six.PY3: + self.assertIsInstance(urllib_parse, nodes.Module) + self.assertEqual(urllib_parse.name, 'urllib.parse') + else: + # On Python 2, this is a fake module, the same behaviour + # being mimicked in brain's tip for six.moves. + self.assertIsInstance(urllib_parse, astroid.Instance) + urljoin = next(urllib_parse.igetattr('urljoin')) + urlencode = next(urllib_parse.igetattr('urlencode')) + if six.PY2: + # In reality it's a function, but our implementations + # transforms it into a method. + self.assertIsInstance(urljoin, astroid.BoundMethod) + self.assertEqual(urljoin.qname(), 'urlparse.urljoin') + self.assertIsInstance(urlencode, astroid.BoundMethod) + self.assertEqual(urlencode.qname(), 'urllib.urlencode') + else: + self.assertIsInstance(urljoin, nodes.FunctionDef) + self.assertEqual(urljoin.qname(), 'urllib.parse.urljoin') + self.assertIsInstance(urlencode, nodes.FunctionDef) + self.assertEqual(urlencode.qname(), 'urllib.parse.urlencode') + + urllib_error = next(ast_nodes[2].infer()) + if six.PY3: + self.assertIsInstance(urllib_error, nodes.Module) + self.assertEqual(urllib_error.name, 'urllib.error') + else: + # On Python 2, this is a fake module, the same behaviour + # being mimicked in brain's tip for six.moves. + self.assertIsInstance(urllib_error, astroid.Instance) + urlerror = next(urllib_error.igetattr('URLError')) + self.assertIsInstance(urlerror, nodes.ClassDef) + content_too_short = next(urllib_error.igetattr('ContentTooShortError')) + self.assertIsInstance(content_too_short, nodes.ClassDef) + + urllib_request = next(ast_nodes[3].infer()) + if six.PY3: + self.assertIsInstance(urllib_request, nodes.Module) + self.assertEqual(urllib_request.name, 'urllib.request') + else: + self.assertIsInstance(urllib_request, astroid.Instance) + urlopen = next(urllib_request.igetattr('urlopen')) + urlretrieve = next(urllib_request.igetattr('urlretrieve')) + if six.PY2: + # In reality it's a function, but our implementations + # transforms it into a method. + self.assertIsInstance(urlopen, astroid.BoundMethod) + self.assertEqual(urlopen.qname(), 'urllib2.urlopen') + self.assertIsInstance(urlretrieve, astroid.BoundMethod) + self.assertEqual(urlretrieve.qname(), 'urllib.urlretrieve') + else: + self.assertIsInstance(urlopen, nodes.FunctionDef) + self.assertEqual(urlopen.qname(), 'urllib.request.urlopen') + self.assertIsInstance(urlretrieve, nodes.FunctionDef) + self.assertEqual(urlretrieve.qname(), 'urllib.request.urlretrieve') + + def test_from_imports(self): + ast_node = test_utils.extract_node(''' + from six.moves import http_client + http_client.HTTPSConnection #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + if six.PY3: + qname = 'http.client.HTTPSConnection' + else: + qname = 'httplib.HTTPSConnection' + self.assertEqual(inferred.qname(), qname) + + +@unittest.skipUnless(HAS_MULTIPROCESSING, + 'multiprocesing is required for this test, but ' + 'on some platforms it is missing ' + '(Jython for instance)') +class MultiprocessingBrainTest(unittest.TestCase): + + def test_multiprocessing_module_attributes(self): + # Test that module attributes are working, + # especially on Python 3.4+, where they are obtained + # from a context. + module = test_utils.extract_node(""" + import multiprocessing + """) + module = module.do_import_module('multiprocessing') + cpu_count = next(module.igetattr('cpu_count')) + if sys.version_info < (3, 4): + self.assertIsInstance(cpu_count, nodes.FunctionDef) + else: + self.assertIsInstance(cpu_count, astroid.BoundMethod) + + def test_module_name(self): + module = test_utils.extract_node(""" + import multiprocessing + multiprocessing.SyncManager() + """) + inferred_sync_mgr = next(module.infer()) + module = inferred_sync_mgr.root() + self.assertEqual(module.name, 'multiprocessing.managers') + + def test_multiprocessing_manager(self): + # Test that we have the proper attributes + # for a multiprocessing.managers.SyncManager + module = builder.parse(""" + import multiprocessing + manager = multiprocessing.Manager() + queue = manager.Queue() + joinable_queue = manager.JoinableQueue() + event = manager.Event() + rlock = manager.RLock() + bounded_semaphore = manager.BoundedSemaphore() + condition = manager.Condition() + barrier = manager.Barrier() + pool = manager.Pool() + list = manager.list() + dict = manager.dict() + value = manager.Value() + array = manager.Array() + namespace = manager.Namespace() + """) + queue = next(module['queue'].infer()) + self.assertEqual(queue.qname(), + "{}.Queue".format(six.moves.queue.__name__)) + + joinable_queue = next(module['joinable_queue'].infer()) + self.assertEqual(joinable_queue.qname(), + "{}.Queue".format(six.moves.queue.__name__)) + + event = next(module['event'].infer()) + event_name = "threading.{}".format("Event" if six.PY3 else "_Event") + self.assertEqual(event.qname(), event_name) + + rlock = next(module['rlock'].infer()) + rlock_name = "threading._RLock" + self.assertEqual(rlock.qname(), rlock_name) + + bounded_semaphore = next(module['bounded_semaphore'].infer()) + semaphore_name = "threading.{}".format( + "BoundedSemaphore" if six.PY3 else "_BoundedSemaphore") + self.assertEqual(bounded_semaphore.qname(), semaphore_name) + + pool = next(module['pool'].infer()) + pool_name = "multiprocessing.pool.Pool" + self.assertEqual(pool.qname(), pool_name) + + for attr in ('list', 'dict'): + obj = next(module[attr].infer()) + self.assertEqual(obj.qname(), + "{}.{}".format(bases.BUILTINS, attr)) + + array = next(module['array'].infer()) + self.assertEqual(array.qname(), "array.array") + + manager = next(module['manager'].infer()) + # Verify that we have these attributes + self.assertTrue(manager.getattr('start')) + self.assertTrue(manager.getattr('shutdown')) + + +@unittest.skipUnless(HAS_ENUM, + 'The enum module was only added in Python 3.4. Support for ' + 'older Python versions may be available through the enum34 ' + 'compatibility module.') +class EnumBrainTest(unittest.TestCase): + + def test_simple_enum(self): + module = builder.parse(""" + import enum + + class MyEnum(enum.Enum): + one = "one" + two = "two" + + def mymethod(self, x): + return 5 + + """) + + enum = next(module['MyEnum'].infer()) + one = enum['one'] + self.assertEqual(one.pytype(), '.MyEnum.one') + + property_type = '{}.property'.format(bases.BUILTINS) + for propname in ('name', 'value'): + prop = next(iter(one.getattr(propname))) + self.assertIn(property_type, prop.decoratornames()) + + meth = one.getattr('mymethod')[0] + self.assertIsInstance(meth, astroid.FunctionDef) + + def test_looks_like_enum_false_positive(self): + # Test that a class named Enumeration is not considered a builtin enum. + module = builder.parse(''' + class Enumeration(object): + def __init__(self, name, enum_list): + pass + test = 42 + ''') + enum = module['Enumeration'] + test = next(enum.igetattr('test')) + self.assertEqual(test.value, 42) + + def test_enum_multiple_base_classes(self): + module = builder.parse(""" + import enum + + class Mixin: + pass + + class MyEnum(Mixin, enum.Enum): + one = 1 + """) + enum = next(module['MyEnum'].infer()) + one = enum['one'] + + clazz = one.getattr('__class__')[0] + self.assertTrue(clazz.is_subtype_of('.Mixin'), + 'Enum instance should share base classes with generating class') + + def test_int_enum(self): + module = builder.parse(""" + import enum + + class MyEnum(enum.IntEnum): + one = 1 + """) + + enum = next(module['MyEnum'].infer()) + one = enum['one'] + + clazz = one.getattr('__class__')[0] + int_type = '{}.{}'.format(bases.BUILTINS, 'int') + self.assertTrue(clazz.is_subtype_of(int_type), + 'IntEnum based enums should be a subtype of int') + + def test_enum_func_form_is_class_not_instance(self): + cls, instance = test_utils.extract_node(''' + from enum import Enum + f = Enum('Audience', ['a', 'b', 'c']) + f #@ + f(1) #@ + ''') + inferred_cls = next(cls.infer()) + self.assertIsInstance(inferred_cls, bases.Instance) + inferred_instance = next(instance.infer()) + self.assertIsInstance(inferred_instance, bases.Instance) + self.assertIsInstance(next(inferred_instance.igetattr('name')), nodes.Const) + self.assertIsInstance(next(inferred_instance.igetattr('value')), nodes.Const) + + +@unittest.skipUnless(HAS_DATEUTIL, "This test requires the dateutil library.") +class DateutilBrainTest(unittest.TestCase): + def test_parser(self): + module = builder.parse(""" + from dateutil.parser import parse + d = parse('2000-01-01') + """) + d_type = next(module['d'].infer()) + self.assertEqual(d_type.qname(), "datetime.datetime") + + +@unittest.skipUnless(HAS_NUMPY, "This test requires the numpy library.") +class NumpyBrainTest(unittest.TestCase): + + def test_numpy(self): + node = test_utils.extract_node(''' + import numpy + numpy.ones #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.FunctionDef) + + +@unittest.skipUnless(HAS_PYTEST, "This test requires the pytest library.") +class PytestBrainTest(unittest.TestCase): + + def test_pytest(self): + ast_node = test_utils.extract_node(''' + import pytest + pytest #@ + ''') + module = next(ast_node.infer()) + attrs = ['deprecated_call', 'warns', 'exit', 'fail', 'skip', + 'importorskip', 'xfail', 'mark', 'raises', 'freeze_includes', + 'set_trace', 'fixture', 'yield_fixture'] + if pytest.__version__.split('.')[0] == '3': + attrs += ['approx', 'register_assert_rewrite'] + + for attr in attrs: + self.assertIn(attr, module) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_builder.py b/pymode/libs/astroid/tests/unittest_builder.py new file mode 100644 index 00000000..920f36e8 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_builder.py @@ -0,0 +1,774 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for the astroid builder and rebuilder module""" + +import os +import sys +import unittest + +import six + +from astroid import builder +from astroid import exceptions +from astroid import manager +from astroid import nodes +from astroid import test_utils +from astroid import util +from astroid.tests import resources + +MANAGER = manager.AstroidManager() +BUILTINS = six.moves.builtins.__name__ + + +class FromToLineNoTest(unittest.TestCase): + + def setUp(self): + self.astroid = resources.build_file('data/format.py') + + def test_callfunc_lineno(self): + stmts = self.astroid.body + # on line 4: + # function('aeozrijz\ + # earzer', hop) + discard = stmts[0] + self.assertIsInstance(discard, nodes.Expr) + self.assertEqual(discard.fromlineno, 4) + self.assertEqual(discard.tolineno, 5) + callfunc = discard.value + self.assertIsInstance(callfunc, nodes.Call) + self.assertEqual(callfunc.fromlineno, 4) + self.assertEqual(callfunc.tolineno, 5) + name = callfunc.func + self.assertIsInstance(name, nodes.Name) + self.assertEqual(name.fromlineno, 4) + self.assertEqual(name.tolineno, 4) + strarg = callfunc.args[0] + self.assertIsInstance(strarg, nodes.Const) + if hasattr(sys, 'pypy_version_info'): + lineno = 4 + else: + lineno = 5 # no way for this one in CPython (is 4 actually) + self.assertEqual(strarg.fromlineno, lineno) + self.assertEqual(strarg.tolineno, lineno) + namearg = callfunc.args[1] + self.assertIsInstance(namearg, nodes.Name) + self.assertEqual(namearg.fromlineno, 5) + self.assertEqual(namearg.tolineno, 5) + # on line 10: + # fonction(1, + # 2, + # 3, + # 4) + discard = stmts[2] + self.assertIsInstance(discard, nodes.Expr) + self.assertEqual(discard.fromlineno, 10) + self.assertEqual(discard.tolineno, 13) + callfunc = discard.value + self.assertIsInstance(callfunc, nodes.Call) + self.assertEqual(callfunc.fromlineno, 10) + self.assertEqual(callfunc.tolineno, 13) + name = callfunc.func + self.assertIsInstance(name, nodes.Name) + self.assertEqual(name.fromlineno, 10) + self.assertEqual(name.tolineno, 10) + for i, arg in enumerate(callfunc.args): + self.assertIsInstance(arg, nodes.Const) + self.assertEqual(arg.fromlineno, 10+i) + self.assertEqual(arg.tolineno, 10+i) + + def test_function_lineno(self): + stmts = self.astroid.body + # on line 15: + # def definition(a, + # b, + # c): + # return a + b + c + function = stmts[3] + self.assertIsInstance(function, nodes.FunctionDef) + self.assertEqual(function.fromlineno, 15) + self.assertEqual(function.tolineno, 18) + return_ = function.body[0] + self.assertIsInstance(return_, nodes.Return) + self.assertEqual(return_.fromlineno, 18) + self.assertEqual(return_.tolineno, 18) + if sys.version_info < (3, 0): + self.assertEqual(function.blockstart_tolineno, 17) + else: + self.skipTest('FIXME http://bugs.python.org/issue10445 ' + '(no line number on function args)') + + def test_decorated_function_lineno(self): + astroid = builder.parse(''' + @decorator + def function( + arg): + print (arg) + ''', __name__) + function = astroid['function'] + self.assertEqual(function.fromlineno, 3) # XXX discussable, but that's what is expected by pylint right now + self.assertEqual(function.tolineno, 5) + self.assertEqual(function.decorators.fromlineno, 2) + self.assertEqual(function.decorators.tolineno, 2) + if sys.version_info < (3, 0): + self.assertEqual(function.blockstart_tolineno, 4) + else: + self.skipTest('FIXME http://bugs.python.org/issue10445 ' + '(no line number on function args)') + + + def test_class_lineno(self): + stmts = self.astroid.body + # on line 20: + # class debile(dict, + # object): + # pass + class_ = stmts[4] + self.assertIsInstance(class_, nodes.ClassDef) + self.assertEqual(class_.fromlineno, 20) + self.assertEqual(class_.tolineno, 22) + self.assertEqual(class_.blockstart_tolineno, 21) + pass_ = class_.body[0] + self.assertIsInstance(pass_, nodes.Pass) + self.assertEqual(pass_.fromlineno, 22) + self.assertEqual(pass_.tolineno, 22) + + def test_if_lineno(self): + stmts = self.astroid.body + # on line 20: + # if aaaa: pass + # else: + # aaaa,bbbb = 1,2 + # aaaa,bbbb = bbbb,aaaa + if_ = stmts[5] + self.assertIsInstance(if_, nodes.If) + self.assertEqual(if_.fromlineno, 24) + self.assertEqual(if_.tolineno, 27) + self.assertEqual(if_.blockstart_tolineno, 24) + self.assertEqual(if_.orelse[0].fromlineno, 26) + self.assertEqual(if_.orelse[1].tolineno, 27) + + def test_for_while_lineno(self): + for code in (''' + for a in range(4): + print (a) + break + else: + print ("bouh") + ''', ''' + while a: + print (a) + break + else: + print ("bouh") + '''): + astroid = builder.parse(code, __name__) + stmt = astroid.body[0] + self.assertEqual(stmt.fromlineno, 2) + self.assertEqual(stmt.tolineno, 6) + self.assertEqual(stmt.blockstart_tolineno, 2) + self.assertEqual(stmt.orelse[0].fromlineno, 6) # XXX + self.assertEqual(stmt.orelse[0].tolineno, 6) + + def test_try_except_lineno(self): + astroid = builder.parse(''' + try: + print (a) + except: + pass + else: + print ("bouh") + ''', __name__) + try_ = astroid.body[0] + self.assertEqual(try_.fromlineno, 2) + self.assertEqual(try_.tolineno, 7) + self.assertEqual(try_.blockstart_tolineno, 2) + self.assertEqual(try_.orelse[0].fromlineno, 7) # XXX + self.assertEqual(try_.orelse[0].tolineno, 7) + hdlr = try_.handlers[0] + self.assertEqual(hdlr.fromlineno, 4) + self.assertEqual(hdlr.tolineno, 5) + self.assertEqual(hdlr.blockstart_tolineno, 4) + + + def test_try_finally_lineno(self): + astroid = builder.parse(''' + try: + print (a) + finally: + print ("bouh") + ''', __name__) + try_ = astroid.body[0] + self.assertEqual(try_.fromlineno, 2) + self.assertEqual(try_.tolineno, 5) + self.assertEqual(try_.blockstart_tolineno, 2) + self.assertEqual(try_.finalbody[0].fromlineno, 5) # XXX + self.assertEqual(try_.finalbody[0].tolineno, 5) + + + def test_try_finally_25_lineno(self): + astroid = builder.parse(''' + try: + print (a) + except: + pass + finally: + print ("bouh") + ''', __name__) + try_ = astroid.body[0] + self.assertEqual(try_.fromlineno, 2) + self.assertEqual(try_.tolineno, 7) + self.assertEqual(try_.blockstart_tolineno, 2) + self.assertEqual(try_.finalbody[0].fromlineno, 7) # XXX + self.assertEqual(try_.finalbody[0].tolineno, 7) + + + def test_with_lineno(self): + astroid = builder.parse(''' + from __future__ import with_statement + with file("/tmp/pouet") as f: + print (f) + ''', __name__) + with_ = astroid.body[1] + self.assertEqual(with_.fromlineno, 3) + self.assertEqual(with_.tolineno, 4) + self.assertEqual(with_.blockstart_tolineno, 3) + + +class BuilderTest(unittest.TestCase): + + def setUp(self): + self.builder = builder.AstroidBuilder() + + def test_data_build_null_bytes(self): + with self.assertRaises(exceptions.AstroidBuildingException): + self.builder.string_build('\x00') + + def test_data_build_invalid_x_escape(self): + with self.assertRaises(exceptions.AstroidBuildingException): + self.builder.string_build('"\\x1"') + + def test_missing_newline(self): + """check that a file with no trailing new line is parseable""" + resources.build_file('data/noendingnewline.py') + + def test_missing_file(self): + with self.assertRaises(exceptions.AstroidBuildingException): + resources.build_file('data/inexistant.py') + + def test_inspect_build0(self): + """test astroid tree build from a living object""" + builtin_ast = MANAGER.ast_from_module_name(BUILTINS) + if six.PY2: + fclass = builtin_ast['file'] + self.assertIn('name', fclass) + self.assertIn('mode', fclass) + self.assertIn('read', fclass) + self.assertTrue(fclass.newstyle) + self.assertTrue(fclass.pytype(), '%s.type' % BUILTINS) + self.assertIsInstance(fclass['read'], nodes.FunctionDef) + # check builtin function has args.args == None + dclass = builtin_ast['dict'] + self.assertIsNone(dclass['has_key'].args.args) + # just check type and object are there + builtin_ast.getattr('type') + objectastroid = builtin_ast.getattr('object')[0] + self.assertIsInstance(objectastroid.getattr('__new__')[0], nodes.FunctionDef) + # check open file alias + builtin_ast.getattr('open') + # check 'help' is there (defined dynamically by site.py) + builtin_ast.getattr('help') + # check property has __init__ + pclass = builtin_ast['property'] + self.assertIn('__init__', pclass) + self.assertIsInstance(builtin_ast['None'], nodes.Const) + self.assertIsInstance(builtin_ast['True'], nodes.Const) + self.assertIsInstance(builtin_ast['False'], nodes.Const) + if six.PY3: + self.assertIsInstance(builtin_ast['Exception'], nodes.ClassDef) + self.assertIsInstance(builtin_ast['NotImplementedError'], nodes.ClassDef) + else: + self.assertIsInstance(builtin_ast['Exception'], nodes.ImportFrom) + self.assertIsInstance(builtin_ast['NotImplementedError'], nodes.ImportFrom) + + def test_inspect_build1(self): + time_ast = MANAGER.ast_from_module_name('time') + self.assertTrue(time_ast) + self.assertEqual(time_ast['time'].args.defaults, []) + + if os.name == 'java': + test_inspect_build1 = unittest.expectedFailure(test_inspect_build1) + + def test_inspect_build2(self): + """test astroid tree build from a living object""" + try: + from mx import DateTime + except ImportError: + self.skipTest('test skipped: mxDateTime is not available') + else: + dt_ast = self.builder.inspect_build(DateTime) + dt_ast.getattr('DateTime') + # this one is failing since DateTimeType.__module__ = 'builtins' ! + #dt_ast.getattr('DateTimeType') + + def test_inspect_build3(self): + self.builder.inspect_build(unittest) + + @test_utils.require_version(maxver='3.0') + def test_inspect_build_instance(self): + """test astroid tree build from a living object""" + import exceptions + builtin_ast = self.builder.inspect_build(exceptions) + fclass = builtin_ast['OSError'] + # things like OSError.strerror are now (2.5) data descriptors on the + # class instead of entries in the __dict__ of an instance + container = fclass + self.assertIn('errno', container) + self.assertIn('strerror', container) + self.assertIn('filename', container) + + def test_inspect_build_type_object(self): + builtin_ast = MANAGER.ast_from_module_name(BUILTINS) + + inferred = list(builtin_ast.igetattr('object')) + self.assertEqual(len(inferred), 1) + inferred = inferred[0] + self.assertEqual(inferred.name, 'object') + inferred.as_string() # no crash test + + inferred = list(builtin_ast.igetattr('type')) + self.assertEqual(len(inferred), 1) + inferred = inferred[0] + self.assertEqual(inferred.name, 'type') + inferred.as_string() # no crash test + + def test_inspect_transform_module(self): + # ensure no cached version of the time module + MANAGER._mod_file_cache.pop(('time', None), None) + MANAGER.astroid_cache.pop('time', None) + def transform_time(node): + if node.name == 'time': + node.transformed = True + MANAGER.register_transform(nodes.Module, transform_time) + try: + time_ast = MANAGER.ast_from_module_name('time') + self.assertTrue(getattr(time_ast, 'transformed', False)) + finally: + MANAGER.unregister_transform(nodes.Module, transform_time) + + def test_package_name(self): + """test base properties and method of a astroid module""" + datap = resources.build_file('data/__init__.py', 'data') + self.assertEqual(datap.name, 'data') + self.assertEqual(datap.package, 1) + datap = resources.build_file('data/__init__.py', 'data.__init__') + self.assertEqual(datap.name, 'data') + self.assertEqual(datap.package, 1) + + def test_yield_parent(self): + """check if we added discard nodes as yield parent (w/ compiler)""" + code = """ + def yiell(): #@ + yield 0 + if noe: + yield more + """ + func = test_utils.extract_node(code) + self.assertIsInstance(func, nodes.FunctionDef) + stmt = func.body[0] + self.assertIsInstance(stmt, nodes.Expr) + self.assertIsInstance(stmt.value, nodes.Yield) + self.assertIsInstance(func.body[1].body[0], nodes.Expr) + self.assertIsInstance(func.body[1].body[0].value, nodes.Yield) + + def test_object(self): + obj_ast = self.builder.inspect_build(object) + self.assertIn('__setattr__', obj_ast) + + def test_newstyle_detection(self): + data = ''' + class A: + "old style" + + class B(A): + "old style" + + class C(object): + "new style" + + class D(C): + "new style" + + __metaclass__ = type + + class E(A): + "old style" + + class F: + "new style" + ''' + mod_ast = builder.parse(data, __name__) + if six.PY3: + self.assertTrue(mod_ast['A'].newstyle) + self.assertTrue(mod_ast['B'].newstyle) + self.assertTrue(mod_ast['E'].newstyle) + else: + self.assertFalse(mod_ast['A'].newstyle) + self.assertFalse(mod_ast['B'].newstyle) + self.assertFalse(mod_ast['E'].newstyle) + self.assertTrue(mod_ast['C'].newstyle) + self.assertTrue(mod_ast['D'].newstyle) + self.assertTrue(mod_ast['F'].newstyle) + + def test_globals(self): + data = ''' + CSTE = 1 + + def update_global(): + global CSTE + CSTE += 1 + + def global_no_effect(): + global CSTE2 + print (CSTE) + ''' + astroid = builder.parse(data, __name__) + self.assertEqual(len(astroid.getattr('CSTE')), 2) + self.assertIsInstance(astroid.getattr('CSTE')[0], nodes.AssignName) + self.assertEqual(astroid.getattr('CSTE')[0].fromlineno, 2) + self.assertEqual(astroid.getattr('CSTE')[1].fromlineno, 6) + with self.assertRaises(exceptions.NotFoundError): + astroid.getattr('CSTE2') + with self.assertRaises(exceptions.InferenceError): + next(astroid['global_no_effect'].ilookup('CSTE2')) + + @unittest.skipIf(os.name == 'java', + 'This test is skipped on Jython, because the ' + 'socket object is patched later on with the ' + 'methods we are looking for. Since we do not ' + 'understand setattr in for loops yet, we skip this') + def test_socket_build(self): + import socket + astroid = self.builder.module_build(socket) + # XXX just check the first one. Actually 3 objects are inferred (look at + # the socket module) but the last one as those attributes dynamically + # set and astroid is missing this. + for fclass in astroid.igetattr('socket'): + self.assertIn('connect', fclass) + self.assertIn('send', fclass) + self.assertIn('close', fclass) + break + + def test_gen_expr_var_scope(self): + data = 'l = list(n for n in range(10))\n' + astroid = builder.parse(data, __name__) + # n unavailable outside gen expr scope + self.assertNotIn('n', astroid) + # test n is inferable anyway + n = test_utils.get_name_node(astroid, 'n') + self.assertIsNot(n.scope(), astroid) + self.assertEqual([i.__class__ for i in n.infer()], + [util.YES.__class__]) + + def test_no_future_imports(self): + mod = builder.parse("import sys") + self.assertEqual(set(), mod._future_imports) + + def test_future_imports(self): + mod = builder.parse("from __future__ import print_function") + self.assertEqual(set(['print_function']), mod._future_imports) + + def test_two_future_imports(self): + mod = builder.parse(""" + from __future__ import print_function + from __future__ import absolute_import + """) + self.assertEqual(set(['print_function', 'absolute_import']), mod._future_imports) + + def test_inferred_build(self): + code = ''' + class A: pass + A.type = "class" + + def A_assign_type(self): + print (self) + A.assign_type = A_assign_type + ''' + astroid = builder.parse(code) + lclass = list(astroid.igetattr('A')) + self.assertEqual(len(lclass), 1) + lclass = lclass[0] + self.assertIn('assign_type', lclass._locals) + self.assertIn('type', lclass._locals) + + def test_augassign_attr(self): + builder.parse(""" + class Counter: + v = 0 + def inc(self): + self.v += 1 + """, __name__) + # TODO: Check self.v += 1 generate AugAssign(AssAttr(...)), + # not AugAssign(GetAttr(AssName...)) + + def test_inferred_dont_pollute(self): + code = ''' + def func(a=None): + a.custom_attr = 0 + def func2(a={}): + a.custom_attr = 0 + ''' + builder.parse(code) + nonetype = nodes.const_factory(None) + self.assertNotIn('custom_attr', nonetype._locals) + self.assertNotIn('custom_attr', nonetype._instance_attrs) + nonetype = nodes.const_factory({}) + self.assertNotIn('custom_attr', nonetype._locals) + self.assertNotIn('custom_attr', nonetype._instance_attrs) + + def test_asstuple(self): + code = 'a, b = range(2)' + astroid = builder.parse(code) + self.assertIn('b', astroid._locals) + code = ''' + def visit_if(self, node): + node.test, body = node.tests[0] + ''' + astroid = builder.parse(code) + self.assertIn('body', astroid['visit_if']._locals) + + def test_build_constants(self): + '''test expected values of constants after rebuilding''' + code = ''' + def func(): + return None + return + return 'None' + ''' + astroid = builder.parse(code) + none, nothing, chain = [ret.value for ret in astroid.body[0].body] + self.assertIsInstance(none, nodes.Const) + self.assertIsNone(none.value) + self.assertIsNone(nothing) + self.assertIsInstance(chain, nodes.Const) + self.assertEqual(chain.value, 'None') + + def test_not_implemented(self): + node = test_utils.extract_node(''' + NotImplemented #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, NotImplemented) + + +class FileBuildTest(unittest.TestCase): + def setUp(self): + self.module = resources.build_file('data/module.py', 'data.module') + + def test_module_base_props(self): + """test base properties and method of a astroid module""" + module = self.module + self.assertEqual(module.name, 'data.module') + self.assertEqual(module.doc, "test module for astroid\n") + self.assertEqual(module.fromlineno, 0) + self.assertIsNone(module.parent) + self.assertEqual(module.frame(), module) + self.assertEqual(module.root(), module) + self.assertEqual(module.source_file, os.path.abspath(resources.find('data/module.py'))) + self.assertEqual(module.pure_python, 1) + self.assertEqual(module.package, 0) + self.assertFalse(module.is_statement) + self.assertEqual(module.statement(), module) + self.assertEqual(module.statement(), module) + + def test_module_locals(self): + """test the 'locals' dictionary of a astroid module""" + module = self.module + _locals = module._locals + self.assertIs(_locals, module._globals) + keys = sorted(_locals.keys()) + should = ['MY_DICT', 'NameNode', 'YO', 'YOUPI', + '__revision__', 'global_access', 'modutils', 'four_args', + 'os', 'redirect'] + should.sort() + self.assertEqual(keys, sorted(should)) + + def test_function_base_props(self): + """test base properties and method of a astroid function""" + module = self.module + function = module['global_access'] + self.assertEqual(function.name, 'global_access') + self.assertEqual(function.doc, 'function test') + self.assertEqual(function.fromlineno, 11) + self.assertTrue(function.parent) + self.assertEqual(function.frame(), function) + self.assertEqual(function.parent.frame(), module) + self.assertEqual(function.root(), module) + self.assertEqual([n.name for n in function.args.args], ['key', 'val']) + self.assertEqual(function.type, 'function') + + def test_function_locals(self): + """test the 'locals' dictionary of a astroid function""" + _locals = self.module['global_access']._locals + self.assertEqual(len(_locals), 4) + keys = sorted(_locals.keys()) + self.assertEqual(keys, ['i', 'key', 'local', 'val']) + + def test_class_base_props(self): + """test base properties and method of a astroid class""" + module = self.module + klass = module['YO'] + self.assertEqual(klass.name, 'YO') + self.assertEqual(klass.doc, 'hehe') + self.assertEqual(klass.fromlineno, 25) + self.assertTrue(klass.parent) + self.assertEqual(klass.frame(), klass) + self.assertEqual(klass.parent.frame(), module) + self.assertEqual(klass.root(), module) + self.assertEqual(klass.basenames, []) + if six.PY3: + self.assertTrue(klass.newstyle) + else: + self.assertFalse(klass.newstyle) + + def test_class_locals(self): + """test the 'locals' dictionary of a astroid class""" + module = self.module + klass1 = module['YO'] + locals1 = klass1._locals + keys = sorted(locals1.keys()) + self.assertEqual(keys, ['__init__', 'a']) + klass2 = module['YOUPI'] + locals2 = klass2._locals + keys = locals2.keys() + self.assertEqual(sorted(keys), + ['__init__', 'class_attr', 'class_method', + 'method', 'static_method']) + + def test_class_instance_attrs(self): + module = self.module + klass1 = module['YO'] + klass2 = module['YOUPI'] + self.assertEqual(list(klass1._instance_attrs.keys()), ['yo']) + self.assertEqual(list(klass2._instance_attrs.keys()), ['member']) + + def test_class_basenames(self): + module = self.module + klass1 = module['YO'] + klass2 = module['YOUPI'] + self.assertEqual(klass1.basenames, []) + self.assertEqual(klass2.basenames, ['YO']) + + def test_method_base_props(self): + """test base properties and method of a astroid method""" + klass2 = self.module['YOUPI'] + # "normal" method + method = klass2['method'] + self.assertEqual(method.name, 'method') + self.assertEqual([n.name for n in method.args.args], ['self']) + self.assertEqual(method.doc, 'method test') + self.assertEqual(method.fromlineno, 47) + self.assertEqual(method.type, 'method') + # class method + method = klass2['class_method'] + self.assertEqual([n.name for n in method.args.args], ['cls']) + self.assertEqual(method.type, 'classmethod') + # static method + method = klass2['static_method'] + self.assertEqual(method.args.args, []) + self.assertEqual(method.type, 'staticmethod') + + def test_method_locals(self): + """test the 'locals' dictionary of a astroid method""" + method = self.module['YOUPI']['method'] + _locals = method._locals + keys = sorted(_locals) + if sys.version_info < (3, 0): + self.assertEqual(len(_locals), 5) + self.assertEqual(keys, ['a', 'autre', 'b', 'local', 'self']) + else:# ListComp variables are no more accessible outside + self.assertEqual(len(_locals), 3) + self.assertEqual(keys, ['autre', 'local', 'self']) + + +class ModuleBuildTest(resources.SysPathSetup, FileBuildTest): + + def setUp(self): + super(ModuleBuildTest, self).setUp() + abuilder = builder.AstroidBuilder() + try: + import data.module + except ImportError: + # Make pylint happy. + self.skipTest('Unable to load data.module') + else: + self.module = abuilder.module_build(data.module, 'data.module') + +@unittest.skipIf(six.PY3, "guess_encoding not used on Python 3") +class TestGuessEncoding(unittest.TestCase): + def setUp(self): + self.guess_encoding = builder._guess_encoding + + def testEmacs(self): + e = self.guess_encoding('# -*- coding: UTF-8 -*-') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding('# -*- coding:UTF-8 -*-') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding(''' + ### -*- coding: ISO-8859-1 -*- + ''') + self.assertEqual(e, 'ISO-8859-1') + e = self.guess_encoding(''' + + ### -*- coding: ISO-8859-1 -*- + ''') + self.assertIsNone(e) + + def testVim(self): + e = self.guess_encoding('# vim:fileencoding=UTF-8') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding(''' + ### vim:fileencoding=ISO-8859-1 + ''') + self.assertEqual(e, 'ISO-8859-1') + e = self.guess_encoding(''' + + ### vim:fileencoding= ISO-8859-1 + ''') + self.assertIsNone(e) + + def test_wrong_coding(self): + # setting "coding" varaible + e = self.guess_encoding("coding = UTF-8") + self.assertIsNone(e) + # setting a dictionnary entry + e = self.guess_encoding("coding:UTF-8") + self.assertIsNone(e) + # setting an arguement + e = self.guess_encoding("def do_something(a_word_with_coding=None):") + self.assertIsNone(e) + + def testUTF8(self): + e = self.guess_encoding('\xef\xbb\xbf any UTF-8 data') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding(' any UTF-8 data \xef\xbb\xbf') + self.assertIsNone(e) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_inference.py b/pymode/libs/astroid/tests/unittest_inference.py new file mode 100644 index 00000000..86497727 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_inference.py @@ -0,0 +1,2130 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for the astroid inference capabilities +""" +import sys +from functools import partial +import unittest +import warnings + +import six + +from astroid import InferenceError, builder, nodes +from astroid.builder import parse +from astroid.inference import infer_end as inference_infer_end +from astroid.bases import Instance, BoundMethod, UnboundMethod,\ + path_wrapper, BUILTINS +from astroid import arguments +from astroid import objects +from astroid import test_utils +from astroid import util +from astroid.tests import resources + + +def get_node_of_class(start_from, klass): + return next(start_from.nodes_of_class(klass)) + +builder = builder.AstroidBuilder() + +if sys.version_info < (3, 0): + EXC_MODULE = 'exceptions' +else: + EXC_MODULE = BUILTINS + + +class InferenceUtilsTest(unittest.TestCase): + + def test_path_wrapper(self): + def infer_default(self, *args): + raise InferenceError + infer_default = path_wrapper(infer_default) + infer_end = path_wrapper(inference_infer_end) + with self.assertRaises(InferenceError): + next(infer_default(1)) + self.assertEqual(next(infer_end(1)), 1) + + +def _assertInferElts(node_type, self, node, elts): + inferred = next(node.infer()) + self.assertIsInstance(inferred, node_type) + self.assertEqual(sorted(elt.value for elt in inferred.elts), + elts) + +def partialmethod(func, arg): + """similar to functools.partial but return a lambda instead of a class so returned value may be + turned into a method. + """ + return lambda *args, **kwargs: func(arg, *args, **kwargs) + +class InferenceTest(resources.SysPathSetup, unittest.TestCase): + + # additional assertInfer* method for builtin types + + def assertInferConst(self, node, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected) + + def assertInferDict(self, node, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + + elts = set([(key.value, value.value) + for (key, value) in inferred.items]) + self.assertEqual(sorted(elts), sorted(expected.items())) + + assertInferTuple = partialmethod(_assertInferElts, nodes.Tuple) + assertInferList = partialmethod(_assertInferElts, nodes.List) + assertInferSet = partialmethod(_assertInferElts, nodes.Set) + assertInferFrozenSet = partialmethod(_assertInferElts, objects.FrozenSet) + + CODE = ''' + class C(object): + "new style" + attr = 4 + + def meth1(self, arg1, optarg=0): + var = object() + print ("yo", arg1, optarg) + self.iattr = "hop" + return var + + def meth2(self): + self.meth1(*self.meth3) + + def meth3(self, d=attr): + b = self.attr + c = self.iattr + return b, c + + ex = Exception("msg") + v = C().meth1(1) + m_unbound = C.meth1 + m_bound = C().meth1 + a, b, c = ex, 1, "bonjour" + [d, e, f] = [ex, 1.0, ("bonjour", v)] + g, h = f + i, (j, k) = "glup", f + + a, b= b, a # Gasp ! + ''' + + ast = parse(CODE, __name__) + + def test_infer_abstract_property_return_values(self): + module = parse(''' + import abc + + class A(object): + @abc.abstractproperty + def test(self): + return 42 + + a = A() + x = a.test + ''') + inferred = next(module['x'].infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_module_inference(self): + inferred = self.ast.infer() + obj = next(inferred) + self.assertEqual(obj.name, __name__) + self.assertEqual(obj.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_class_inference(self): + inferred = self.ast['C'].infer() + obj = next(inferred) + self.assertEqual(obj.name, 'C') + self.assertEqual(obj.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_function_inference(self): + inferred = self.ast['C']['meth1'].infer() + obj = next(inferred) + self.assertEqual(obj.name, 'meth1') + self.assertEqual(obj.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_builtin_name_inference(self): + inferred = self.ast['C']['meth1']['var'].infer() + var = next(inferred) + self.assertEqual(var.name, 'object') + self.assertEqual(var.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_tupleassign_name_inference(self): + inferred = self.ast['a'].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['b'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 1) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['c'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "bonjour") + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_listassign_name_inference(self): + inferred = self.ast['d'].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['e'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 1.0) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['f'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Tuple) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_advanced_tupleassign_name_inference1(self): + inferred = self.ast['g'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "bonjour") + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['h'].infer() + var = next(inferred) + self.assertEqual(var.name, 'object') + self.assertEqual(var.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_advanced_tupleassign_name_inference2(self): + inferred = self.ast['i'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, u"glup") + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['j'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "bonjour") + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['k'].infer() + var = next(inferred) + self.assertEqual(var.name, 'object') + self.assertEqual(var.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_swap_assign_inference(self): + inferred = self.ast._locals['a'][1].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 1) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast._locals['b'][1].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference1(self): + inferred = self.ast['ex'].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference2(self): + inferred = get_node_of_class(self.ast['C']['meth2'], nodes.Attribute).infer() + meth1 = next(inferred) + self.assertEqual(meth1.name, 'meth1') + self.assertEqual(meth1.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference3(self): + inferred = self.ast['C']['meth3']['b'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 4) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference4(self): + inferred = self.ast['C']['meth3']['c'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "hop") + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_callfunc_inference(self): + inferred = self.ast['v'].infer() + meth1 = next(inferred) + self.assertIsInstance(meth1, Instance) + self.assertEqual(meth1.name, 'object') + self.assertEqual(meth1.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_unbound_method_inference(self): + inferred = self.ast['m_unbound'].infer() + meth1 = next(inferred) + self.assertIsInstance(meth1, UnboundMethod) + self.assertEqual(meth1.name, 'meth1') + self.assertEqual(meth1.parent.frame().name, 'C') + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_bound_method_inference(self): + inferred = self.ast['m_bound'].infer() + meth1 = next(inferred) + self.assertIsInstance(meth1, BoundMethod) + self.assertEqual(meth1.name, 'meth1') + self.assertEqual(meth1.parent.frame().name, 'C') + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_args_default_inference1(self): + optarg = test_utils.get_name_node(self.ast['C']['meth1'], 'optarg') + inferred = optarg.infer() + obj1 = next(inferred) + self.assertIsInstance(obj1, nodes.Const) + self.assertEqual(obj1.value, 0) + obj1 = next(inferred) + self.assertIs(obj1, util.YES, obj1) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_args_default_inference2(self): + inferred = self.ast['C']['meth3'].ilookup('d') + obj1 = next(inferred) + self.assertIsInstance(obj1, nodes.Const) + self.assertEqual(obj1.value, 4) + obj1 = next(inferred) + self.assertIs(obj1, util.YES, obj1) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_inference_restrictions(self): + inferred = test_utils.get_name_node(self.ast['C']['meth1'], 'arg1').infer() + obj1 = next(inferred) + self.assertIs(obj1, util.YES, obj1) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_ancestors_inference(self): + code = ''' + class A(object): #@ + pass + + class A(A): #@ + pass + ''' + a1, a2 = test_utils.extract_node(code, __name__) + a2_ancestors = list(a2.ancestors()) + self.assertEqual(len(a2_ancestors), 2) + self.assertIs(a2_ancestors[0], a1) + + def test_ancestors_inference2(self): + code = ''' + class A(object): #@ + pass + + class B(A): #@ + pass + + class A(B): #@ + pass + ''' + a1, b, a2 = test_utils.extract_node(code, __name__) + a2_ancestors = list(a2.ancestors()) + self.assertEqual(len(a2_ancestors), 3) + self.assertIs(a2_ancestors[0], b) + self.assertIs(a2_ancestors[1], a1) + + def test_f_arg_f(self): + code = ''' + def f(f=1): + return f + + a = f() + ''' + ast = parse(code, __name__) + a = ast['a'] + a_inferred = a.inferred() + self.assertEqual(a_inferred[0].value, 1) + self.assertEqual(len(a_inferred), 1) + + def test_infered_warning(self): + code = ''' + def f(f=1): + return f + + a = f() + ''' + ast = parse(code, __name__) + a = ast['a'] + + warnings.simplefilter('always') + with warnings.catch_warnings(record=True) as w: + a.infered() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + + def test_exc_ancestors(self): + code = ''' + def f(): + raise __(NotImplementedError) + ''' + error = test_utils.extract_node(code, __name__) + nie = error.inferred()[0] + self.assertIsInstance(nie, nodes.ClassDef) + nie_ancestors = [c.name for c in nie.ancestors()] + if sys.version_info < (3, 0): + self.assertEqual(nie_ancestors, ['RuntimeError', 'StandardError', 'Exception', 'BaseException', 'object']) + else: + self.assertEqual(nie_ancestors, ['RuntimeError', 'Exception', 'BaseException', 'object']) + + def test_except_inference(self): + code = ''' + try: + print (hop) + except NameError as ex: + ex1 = ex + except Exception as ex: + ex2 = ex + raise + ''' + ast = parse(code, __name__) + ex1 = ast['ex1'] + ex1_infer = ex1.infer() + ex1 = next(ex1_infer) + self.assertIsInstance(ex1, Instance) + self.assertEqual(ex1.name, 'NameError') + self.assertRaises(StopIteration, partial(next, ex1_infer)) + ex2 = ast['ex2'] + ex2_infer = ex2.infer() + ex2 = next(ex2_infer) + self.assertIsInstance(ex2, Instance) + self.assertEqual(ex2.name, 'Exception') + self.assertRaises(StopIteration, partial(next, ex2_infer)) + + def test_del1(self): + code = ''' + del undefined_attr + ''' + delete = test_utils.extract_node(code, __name__) + self.assertRaises(InferenceError, delete.infer) + + def test_del2(self): + code = ''' + a = 1 + b = a + del a + c = a + a = 2 + d = a + ''' + ast = parse(code, __name__) + n = ast['b'] + n_infer = n.infer() + inferred = next(n_infer) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 1) + self.assertRaises(StopIteration, partial(next, n_infer)) + n = ast['c'] + n_infer = n.infer() + self.assertRaises(InferenceError, partial(next, n_infer)) + n = ast['d'] + n_infer = n.infer() + inferred = next(n_infer) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 2) + self.assertRaises(StopIteration, partial(next, n_infer)) + + def test_builtin_types(self): + code = ''' + l = [1] + t = (2,) + d = {} + s = '' + s2 = '_' + ''' + ast = parse(code, __name__) + n = ast['l'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.List) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.getitem(0).value, 1) + self.assertIsInstance(inferred._proxied, nodes.ClassDef) + self.assertEqual(inferred._proxied.name, 'list') + self.assertIn('append', inferred._proxied._locals) + n = ast['t'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.getitem(0).value, 2) + self.assertIsInstance(inferred._proxied, nodes.ClassDef) + self.assertEqual(inferred._proxied.name, 'tuple') + n = ast['d'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Dict) + self.assertIsInstance(inferred, Instance) + self.assertIsInstance(inferred._proxied, nodes.ClassDef) + self.assertEqual(inferred._proxied.name, 'dict') + self.assertIn('get', inferred._proxied._locals) + n = ast['s'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'str') + self.assertIn('lower', inferred._proxied._locals) + n = ast['s2'] + inferred = next(n.infer()) + self.assertEqual(inferred.getitem(0).value, '_') + + code = 's = {1}' + ast = parse(code, __name__) + n = ast['s'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Set) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'set') + self.assertIn('remove', inferred._proxied._locals) + + @test_utils.require_version(maxver='3.0') + def test_unicode_type(self): + code = '''u = u""''' + ast = parse(code, __name__) + n = ast['u'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'unicode') + self.assertIn('lower', inferred._proxied._locals) + + @unittest.expectedFailure + def test_descriptor_are_callable(self): + code = ''' + class A: + statm = staticmethod(open) + clsm = classmethod('whatever') + ''' + ast = parse(code, __name__) + statm = next(ast['A'].igetattr('statm')) + self.assertTrue(statm.callable()) + clsm = next(ast['A'].igetattr('clsm')) + self.assertFalse(clsm.callable()) + + def test_bt_ancestor_crash(self): + code = ''' + class Warning(Warning): + pass + ''' + ast = parse(code, __name__) + w = ast['Warning'] + ancestors = w.ancestors() + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'Warning') + self.assertEqual(ancestor.root().name, EXC_MODULE) + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'Exception') + self.assertEqual(ancestor.root().name, EXC_MODULE) + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'BaseException') + self.assertEqual(ancestor.root().name, EXC_MODULE) + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'object') + self.assertEqual(ancestor.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, ancestors)) + + def test_qqch(self): + code = ''' + from astroid.modutils import load_module_from_name + xxx = load_module_from_name('__pkginfo__') + ''' + ast = parse(code, __name__) + xxx = ast['xxx'] + self.assertSetEqual({n.__class__ for n in xxx.inferred()}, + {nodes.Const, util.YES.__class__}) + + def test_method_argument(self): + code = ''' + class ErudiEntitySchema: + """a entity has a type, a set of subject and or object relations""" + def __init__(self, e_type, **kwargs): + kwargs['e_type'] = e_type.capitalize().encode() + + def meth(self, e_type, *args, **kwargs): + kwargs['e_type'] = e_type.capitalize().encode() + print(args) + ''' + ast = parse(code, __name__) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'e_type') + self.assertEqual([n.__class__ for n in arg.infer()], + [util.YES.__class__]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'kwargs') + self.assertEqual([n.__class__ for n in arg.infer()], + [nodes.Dict]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'e_type') + self.assertEqual([n.__class__ for n in arg.infer()], + [util.YES.__class__]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'args') + self.assertEqual([n.__class__ for n in arg.infer()], + [nodes.Tuple]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'kwargs') + self.assertEqual([n.__class__ for n in arg.infer()], + [nodes.Dict]) + + def test_tuple_then_list(self): + code = ''' + def test_view(rql, vid, tags=()): + tags = list(tags) + __(tags).append(vid) + ''' + name = test_utils.extract_node(code, __name__) + it = name.infer() + tags = next(it) + self.assertIsInstance(tags, nodes.List) + self.assertEqual(tags.elts, []) + with self.assertRaises(StopIteration): + next(it) + + def test_mulassign_inference(self): + code = ''' + def first_word(line): + """Return the first word of a line""" + + return line.split()[0] + + def last_word(line): + """Return last word of a line""" + + return line.split()[-1] + + def process_line(word_pos): + """Silly function: returns (ok, callable) based on argument. + + For test purpose only. + """ + + if word_pos > 0: + return (True, first_word) + elif word_pos < 0: + return (True, last_word) + else: + return (False, None) + + if __name__ == '__main__': + + line_number = 0 + for a_line in file('test_callable.py'): + tupletest = process_line(line_number) + (ok, fct) = process_line(line_number) + if ok: + fct(a_line) + ''' + ast = parse(code, __name__) + self.assertEqual(len(list(ast['process_line'].infer_call_result(None))), 3) + self.assertEqual(len(list(ast['tupletest'].infer())), 3) + values = ['FunctionDef(first_word)', 'FunctionDef(last_word)', 'Const(NoneType)'] + self.assertEqual([str(inferred) + for inferred in ast['fct'].infer()], values) + + def test_float_complex_ambiguity(self): + code = ''' + def no_conjugate_member(magic_flag): #@ + """should not raise E1101 on something.conjugate""" + if magic_flag: + something = 1.0 + else: + something = 1.0j + if isinstance(something, float): + return something + return __(something).conjugate() + ''' + func, retval = test_utils.extract_node(code, __name__) + self.assertEqual( + [i.value for i in func.ilookup('something')], + [1.0, 1.0j]) + self.assertEqual( + [i.value for i in retval.infer()], + [1.0, 1.0j]) + + def test_lookup_cond_branches(self): + code = ''' + def no_conjugate_member(magic_flag): + """should not raise E1101 on something.conjugate""" + something = 1.0 + if magic_flag: + something = 1.0j + return something.conjugate() + ''' + ast = parse(code, __name__) + values = [i.value for i in test_utils.get_name_node(ast, 'something', -1).infer()] + self.assertEqual(values, [1.0, 1.0j]) + + + def test_simple_subscript(self): + code = ''' + class A(object): + def __getitem__(self, index): + return index + 42 + [1, 2, 3][0] #@ + (1, 2, 3)[1] #@ + (1, 2, 3)[-1] #@ + [1, 2, 3][0] + (2, )[0] + (3, )[-1] #@ + e = {'key': 'value'} + e['key'] #@ + "first"[0] #@ + list([1, 2, 3])[-1] #@ + tuple((4, 5, 6))[2] #@ + A()[0] #@ + A()[-1] #@ + ''' + ast_nodes = test_utils.extract_node(code, __name__) + expected = [1, 2, 3, 6, 'value', 'f', 3, 6, 42, 41] + for node, expected_value in zip(ast_nodes, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + def test_invalid_subscripts(self): + ast_nodes = test_utils.extract_node(''' + class NoGetitem(object): + pass + class InvalidGetitem(object): + def __getitem__(self): pass + class InvalidGetitem2(object): + __getitem__ = 42 + NoGetitem()[4] #@ + InvalidGetitem()[5] #@ + InvalidGetitem2()[10] #@ + [1, 2, 3][None] #@ + 'lala'['bala'] #@ + ''') + for node in ast_nodes[:3]: + self.assertRaises(InferenceError, next, node.infer()) + for node in ast_nodes[3:]: + self.assertEqual(next(node.infer()), util.YES) + + def test_bytes_subscript(self): + node = test_utils.extract_node('''b'a'[0]''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + if six.PY2: + self.assertEqual(inferred.value, 'a') + else: + self.assertEqual(inferred.value, 97) + + #def test_simple_tuple(self): + #"""test case for a simple tuple value""" + ## XXX tuple inference is not implemented ... + #code = """ +#a = (1,) +#b = (22,) +#some = a + b +#""" + #ast = builder.string_build(code, __name__, __file__) + #self.assertEqual(ast['some'].infer.next().as_string(), "(1, 22)") + + def test_simple_for(self): + code = ''' + for a in [1, 2, 3]: + print (a) + for b,c in [(1,2), (3,4)]: + print (b) + print (c) + + print ([(d,e) for e,d in ([1,2], [3,4])]) + ''' + ast = parse(code, __name__) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'a', -1).infer()], [1, 2, 3]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'b', -1).infer()], [1, 3]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'c', -1).infer()], [2, 4]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'd', -1).infer()], [2, 4]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'e', -1).infer()], [1, 3]) + + def test_simple_for_genexpr(self): + code = ''' + print ((d,e) for e,d in ([1,2], [3,4])) + ''' + ast = parse(code, __name__) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'd', -1).infer()], [2, 4]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'e', -1).infer()], [1, 3]) + + + def test_builtin_help(self): + code = ''' + help() + ''' + # XXX failing since __builtin__.help assignment has + # been moved into a function... + node = test_utils.extract_node(code, __name__) + inferred = list(node.func.infer()) + self.assertEqual(len(inferred), 1, inferred) + self.assertIsInstance(inferred[0], Instance) + self.assertEqual(inferred[0].name, "_Helper") + + def test_builtin_open(self): + code = ''' + open("toto.txt") + ''' + node = test_utils.extract_node(code, __name__).func + inferred = list(node.infer()) + self.assertEqual(len(inferred), 1) + if hasattr(sys, 'pypy_version_info'): + self.assertIsInstance(inferred[0], nodes.ClassDef) + self.assertEqual(inferred[0].name, 'file') + else: + self.assertIsInstance(inferred[0], nodes.FunctionDef) + self.assertEqual(inferred[0].name, 'open') + + def test_callfunc_context_func(self): + code = ''' + def mirror(arg=None): + return arg + + un = mirror(1) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('un')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, 1) + + def test_callfunc_context_lambda(self): + code = ''' + mirror = lambda x=None: x + + un = mirror(1) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('mirror')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Lambda) + inferred = list(ast.igetattr('un')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, 1) + + def test_factory_method(self): + code = ''' + class Super(object): + @classmethod + def instance(cls): + return cls() + + class Sub(Super): + def method(self): + print ('method called') + + sub = Sub.instance() + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('sub')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], Instance) + self.assertEqual(inferred[0]._proxied.name, 'Sub') + + + def test_import_as(self): + code = ''' + import os.path as osp + print (osp.dirname(__file__)) + + from os.path import exists as e + assert e(__file__) + + from new import code as make_code + print (make_code) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('osp')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Module) + self.assertEqual(inferred[0].name, 'os.path') + inferred = list(ast.igetattr('e')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + self.assertEqual(inferred[0].name, 'exists') + if sys.version_info >= (3, 0): + self.skipTest(' module has been removed') + inferred = list(ast.igetattr('make_code')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], Instance) + self.assertEqual(str(inferred[0]), + 'Instance of %s.type' % BUILTINS) + + def _test_const_inferred(self, node, value): + inferred = list(node.infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, value) + + def test_unary_not(self): + for code in ('a = not (1,); b = not ()', + 'a = not {1:2}; b = not {}'): + ast = builder.string_build(code, __name__, __file__) + self._test_const_inferred(ast['a'], False) + self._test_const_inferred(ast['b'], True) + + @test_utils.require_version(minver='3.5') + def test_matmul(self): + node = test_utils.extract_node(''' + class Array: + def __matmul__(self, other): + return 42 + Array() @ Array() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_binary_op_int_add(self): + ast = builder.string_build('a = 1 + 2', __name__, __file__) + self._test_const_inferred(ast['a'], 3) + + def test_binary_op_int_sub(self): + ast = builder.string_build('a = 1 - 2', __name__, __file__) + self._test_const_inferred(ast['a'], -1) + + def test_binary_op_float_div(self): + ast = builder.string_build('a = 1 / 2.', __name__, __file__) + self._test_const_inferred(ast['a'], 1 / 2.) + + def test_binary_op_str_mul(self): + ast = builder.string_build('a = "*" * 40', __name__, __file__) + self._test_const_inferred(ast['a'], "*" * 40) + + def test_binary_op_bitand(self): + ast = builder.string_build('a = 23&20', __name__, __file__) + self._test_const_inferred(ast['a'], 23&20) + + def test_binary_op_bitor(self): + ast = builder.string_build('a = 23|8', __name__, __file__) + self._test_const_inferred(ast['a'], 23|8) + + def test_binary_op_bitxor(self): + ast = builder.string_build('a = 23^9', __name__, __file__) + self._test_const_inferred(ast['a'], 23^9) + + def test_binary_op_shiftright(self): + ast = builder.string_build('a = 23 >>1', __name__, __file__) + self._test_const_inferred(ast['a'], 23>>1) + + def test_binary_op_shiftleft(self): + ast = builder.string_build('a = 23 <<1', __name__, __file__) + self._test_const_inferred(ast['a'], 23<<1) + + + def test_binary_op_list_mul(self): + for code in ('a = [[]] * 2', 'a = 2 * [[]]'): + ast = builder.string_build(code, __name__, __file__) + inferred = list(ast['a'].infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.List) + self.assertEqual(len(inferred[0].elts), 2) + self.assertIsInstance(inferred[0].elts[0], nodes.List) + self.assertIsInstance(inferred[0].elts[1], nodes.List) + + def test_binary_op_list_mul_none(self): + 'test correct handling on list multiplied by None' + ast = builder.string_build('a = [1] * None\nb = [1] * "r"') + inferred = ast['a'].inferred() + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0], util.YES) + inferred = ast['b'].inferred() + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0], util.YES) + + def test_binary_op_list_mul_int(self): + 'test correct handling on list multiplied by int when there are more than one' + code = ''' + from ctypes import c_int + seq = [c_int()] * 4 + ''' + ast = parse(code, __name__) + inferred = ast['seq'].inferred() + self.assertEqual(len(inferred), 1) + listval = inferred[0] + self.assertIsInstance(listval, nodes.List) + self.assertEqual(len(listval.itered()), 4) + + def test_binary_op_tuple_add(self): + ast = builder.string_build('a = (1,) + (2,)', __name__, __file__) + inferred = list(ast['a'].infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Tuple) + self.assertEqual(len(inferred[0].elts), 2) + self.assertEqual(inferred[0].elts[0].value, 1) + self.assertEqual(inferred[0].elts[1].value, 2) + + def test_binary_op_custom_class(self): + code = ''' + class myarray: + def __init__(self, array): + self.array = array + def __mul__(self, x): + return myarray([2,4,6]) + def astype(self): + return "ASTYPE" + + def randint(maximum): + if maximum is not None: + return myarray([1,2,3]) * 2 + else: + return int(5) + + x = randint(1) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('x')) + self.assertEqual(len(inferred), 2) + value = [str(v) for v in inferred] + # The __name__ trick here makes it work when invoked directly + # (__name__ == '__main__') and through pytest (__name__ == + # 'unittest_inference') + self.assertEqual(value, ['Instance of %s.myarray' % __name__, + 'Instance of %s.int' % BUILTINS]) + + def test_nonregr_lambda_arg(self): + code = ''' + def f(g = lambda: None): + __(g()).x +''' + callfuncnode = test_utils.extract_node(code) + inferred = list(callfuncnode.infer()) + self.assertEqual(len(inferred), 2, inferred) + inferred.remove(util.YES) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertIsNone(inferred[0].value) + + def test_nonregr_getitem_empty_tuple(self): + code = ''' + def f(x): + a = ()[x] + ''' + ast = parse(code, __name__) + inferred = list(ast['f'].ilookup('a')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0], util.YES) + + def test_nonregr_instance_attrs(self): + """non regression for instance_attrs infinite loop : pylint / #4""" + + code = """ + class Foo(object): + + def set_42(self): + self.attr = 42 + + class Bar(Foo): + + def __init__(self): + self.attr = 41 + """ + ast = parse(code, __name__) + foo_class = ast['Foo'] + bar_class = ast['Bar'] + bar_self = ast['Bar']['__init__']['self'] + assattr = bar_class._instance_attrs['attr'][0] + self.assertEqual(len(foo_class._instance_attrs['attr']), 1) + self.assertEqual(len(bar_class._instance_attrs['attr']), 1) + self.assertEqual(bar_class._instance_attrs, {'attr': [assattr]}) + # call 'instance_attr' via 'Instance.getattr' to trigger the bug: + instance = bar_self.inferred()[0] + instance.getattr('attr') + self.assertEqual(len(bar_class._instance_attrs['attr']), 1) + self.assertEqual(len(foo_class._instance_attrs['attr']), 1) + self.assertEqual(bar_class._instance_attrs, {'attr': [assattr]}) + + def test_python25_generator_exit(self): + # pylint: disable=redefined-variable-type + buffer = six.StringIO() + sys.stderr = buffer + try: + data = "b = {}[str(0)+''].a" + ast = builder.string_build(data, __name__, __file__) + list(ast['b'].infer()) + output = buffer.getvalue() + finally: + sys.stderr = sys.__stderr__ + # I have no idea how to test for this in another way... + msg = ("Exception exceptions.RuntimeError: " + "'generator ignored GeneratorExit' in " + "ignored") + self.assertNotIn("RuntimeError", output, msg) + + def test_python25_no_relative_import(self): + ast = resources.build_file('data/package/absimport.py') + self.assertTrue(ast.absolute_import_activated(), True) + inferred = next(test_utils.get_name_node(ast, 'import_package_subpackage_module').infer()) + # failed to import since absolute_import is activated + self.assertIs(inferred, util.YES) + + def test_nonregr_absolute_import(self): + ast = resources.build_file('data/absimp/string.py', 'data.absimp.string') + self.assertTrue(ast.absolute_import_activated(), True) + inferred = next(test_utils.get_name_node(ast, 'string').infer()) + self.assertIsInstance(inferred, nodes.Module) + self.assertEqual(inferred.name, 'string') + self.assertIn('ascii_letters', inferred._locals) + + def test_mechanize_open(self): + try: + import mechanize # pylint: disable=unused-variable + except ImportError: + self.skipTest('require mechanize installed') + data = ''' + from mechanize import Browser + print(Browser) + b = Browser() + ''' + ast = parse(data, __name__) + browser = next(test_utils.get_name_node(ast, 'Browser').infer()) + self.assertIsInstance(browser, nodes.ClassDef) + bopen = list(browser.igetattr('open')) + self.skipTest('the commit said: "huum, see that later"') + self.assertEqual(len(bopen), 1) + self.assertIsInstance(bopen[0], nodes.FunctionDef) + self.assertTrue(bopen[0].callable()) + b = next(test_utils.get_name_node(ast, 'b').infer()) + self.assertIsInstance(b, Instance) + bopen = list(b.igetattr('open')) + self.assertEqual(len(bopen), 1) + self.assertIsInstance(bopen[0], BoundMethod) + self.assertTrue(bopen[0].callable()) + + def test_property(self): + code = ''' + from smtplib import SMTP + class SendMailController(object): + + @property + def smtp(self): + return SMTP(mailhost, port) + + @property + def me(self): + return self + + my_smtp = SendMailController().smtp + my_me = SendMailController().me + ''' + decorators = set(['%s.property' % BUILTINS]) + ast = parse(code, __name__) + self.assertEqual(ast['SendMailController']['smtp'].decoratornames(), + decorators) + propinferred = list(ast.body[2].value.infer()) + self.assertEqual(len(propinferred), 1) + propinferred = propinferred[0] + self.assertIsInstance(propinferred, Instance) + self.assertEqual(propinferred.name, 'SMTP') + self.assertEqual(propinferred.root().name, 'smtplib') + self.assertEqual(ast['SendMailController']['me'].decoratornames(), + decorators) + propinferred = list(ast.body[3].value.infer()) + self.assertEqual(len(propinferred), 1) + propinferred = propinferred[0] + self.assertIsInstance(propinferred, Instance) + self.assertEqual(propinferred.name, 'SendMailController') + self.assertEqual(propinferred.root().name, __name__) + + def test_im_func_unwrap(self): + code = ''' + class EnvBasedTC: + def pactions(self): + pass + pactions = EnvBasedTC.pactions.im_func + print (pactions) + + class EnvBasedTC2: + pactions = EnvBasedTC.pactions.im_func + print (pactions) + ''' + ast = parse(code, __name__) + pactions = test_utils.get_name_node(ast, 'pactions') + inferred = list(pactions.infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + pactions = test_utils.get_name_node(ast['EnvBasedTC2'], 'pactions') + inferred = list(pactions.infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + + def test_augassign(self): + code = ''' + a = 1 + a += 2 + print (a) + ''' + ast = parse(code, __name__) + inferred = list(test_utils.get_name_node(ast, 'a').infer()) + + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, 3) + + def test_nonregr_func_arg(self): + code = ''' + def foo(self, bar): + def baz(): + pass + def qux(): + return baz + spam = bar(None, qux) + print (spam) + ''' + ast = parse(code, __name__) + inferred = list(test_utils.get_name_node(ast['foo'], 'spam').infer()) + self.assertEqual(len(inferred), 1) + self.assertIs(inferred[0], util.YES) + + def test_nonregr_func_global(self): + code = ''' + active_application = None + + def get_active_application(): + global active_application + return active_application + + class Application(object): + def __init__(self): + global active_application + active_application = self + + class DataManager(object): + def __init__(self, app=None): + self.app = get_active_application() + def test(self): + p = self.app + print (p) + ''' + ast = parse(code, __name__) + inferred = list(Instance(ast['DataManager']).igetattr('app')) + self.assertEqual(len(inferred), 2, inferred) # None / Instance(Application) + inferred = list(test_utils.get_name_node(ast['DataManager']['test'], 'p').infer()) + self.assertEqual(len(inferred), 2, inferred) + for node in inferred: + if isinstance(node, Instance) and node.name == 'Application': + break + else: + self.fail('expected to find an instance of Application in %s' % inferred) + + def test_list_inference(self): + """#20464""" + code = ''' + from unknown import Unknown + A = [] + B = [] + + def test(): + xyz = [ + Unknown + ] + A + B + return xyz + + Z = test() + ''' + ast = parse(code, __name__) + inferred = next(ast['Z'].infer()) + self.assertIsInstance(inferred, nodes.List) + self.assertEqual(len(inferred.elts), 1) + self.assertIs(inferred.elts[0], util.YES) + + def test__new__(self): + code = ''' + class NewTest(object): + "doc" + def __new__(cls, arg): + self = object.__new__(cls) + self.arg = arg + return self + + n = NewTest() + ''' + ast = parse(code, __name__) + self.assertRaises(InferenceError, list, ast['NewTest'].igetattr('arg')) + n = next(ast['n'].infer()) + inferred = list(n.igetattr('arg')) + self.assertEqual(len(inferred), 1, inferred) + + def test__new__bound_methods(self): + node = test_utils.extract_node(''' + class cls(object): pass + cls().__new__(cls) #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred._proxied, node.root()['cls']) + + def test_two_parents_from_same_module(self): + code = ''' + from data import nonregr + class Xxx(nonregr.Aaa, nonregr.Ccc): + "doc" + ''' + ast = parse(code, __name__) + parents = list(ast['Xxx'].ancestors()) + self.assertEqual(len(parents), 3, parents) # Aaa, Ccc, object + + def test_pluggable_inference(self): + code = ''' + from collections import namedtuple + A = namedtuple('A', ['a', 'b']) + B = namedtuple('B', 'a b') + ''' + ast = parse(code, __name__) + aclass = ast['A'].inferred()[0] + self.assertIsInstance(aclass, nodes.ClassDef) + self.assertIn('a', aclass._instance_attrs) + self.assertIn('b', aclass._instance_attrs) + bclass = ast['B'].inferred()[0] + self.assertIsInstance(bclass, nodes.ClassDef) + self.assertIn('a', bclass._instance_attrs) + self.assertIn('b', bclass._instance_attrs) + + def test_infer_arguments(self): + code = ''' + class A(object): + def first(self, arg1, arg2): + return arg1 + @classmethod + def method(cls, arg1, arg2): + return arg2 + @classmethod + def empty(cls): + return 2 + @staticmethod + def static(arg1, arg2): + return arg1 + def empty_method(self): + return [] + x = A().first(1, []) + y = A.method(1, []) + z = A.static(1, []) + empty = A.empty() + empty_list = A().empty_method() + ''' + ast = parse(code, __name__) + int_node = ast['x'].inferred()[0] + self.assertIsInstance(int_node, nodes.Const) + self.assertEqual(int_node.value, 1) + list_node = ast['y'].inferred()[0] + self.assertIsInstance(list_node, nodes.List) + int_node = ast['z'].inferred()[0] + self.assertIsInstance(int_node, nodes.Const) + self.assertEqual(int_node.value, 1) + empty = ast['empty'].inferred()[0] + self.assertIsInstance(empty, nodes.Const) + self.assertEqual(empty.value, 2) + empty_list = ast['empty_list'].inferred()[0] + self.assertIsInstance(empty_list, nodes.List) + + def test_infer_variable_arguments(self): + code = ''' + def test(*args, **kwargs): + vararg = args + kwarg = kwargs + ''' + ast = parse(code, __name__) + func = ast['test'] + vararg = func.body[0].value + kwarg = func.body[1].value + + kwarg_inferred = kwarg.inferred()[0] + self.assertIsInstance(kwarg_inferred, nodes.Dict) + self.assertIs(kwarg_inferred.parent, func.args) + + vararg_inferred = vararg.inferred()[0] + self.assertIsInstance(vararg_inferred, nodes.Tuple) + self.assertIs(vararg_inferred.parent, func.args) + + def test_infer_nested(self): + code = """ + def nested(): + from threading import Thread + + class NestedThread(Thread): + def __init__(self): + Thread.__init__(self) + """ + # Test that inferring Thread.__init__ looks up in + # the nested scope. + ast = parse(code, __name__) + callfunc = next(ast.nodes_of_class(nodes.Call)) + func = callfunc.func + inferred = func.inferred()[0] + self.assertIsInstance(inferred, UnboundMethod) + + def test_instance_binary_operations(self): + code = """ + class A(object): + def __mul__(self, other): + return 42 + a = A() + b = A() + sub = a - b + mul = a * b + """ + ast = parse(code, __name__) + sub = ast['sub'].inferred()[0] + mul = ast['mul'].inferred()[0] + self.assertIs(sub, util.YES) + self.assertIsInstance(mul, nodes.Const) + self.assertEqual(mul.value, 42) + + def test_instance_binary_operations_parent(self): + code = """ + class A(object): + def __mul__(self, other): + return 42 + class B(A): + pass + a = B() + b = B() + sub = a - b + mul = a * b + """ + ast = parse(code, __name__) + sub = ast['sub'].inferred()[0] + mul = ast['mul'].inferred()[0] + self.assertIs(sub, util. YES) + self.assertIsInstance(mul, nodes.Const) + self.assertEqual(mul.value, 42) + + def test_instance_binary_operations_multiple_methods(self): + code = """ + class A(object): + def __mul__(self, other): + return 42 + class B(A): + def __mul__(self, other): + return [42] + a = B() + b = B() + sub = a - b + mul = a * b + """ + ast = parse(code, __name__) + sub = ast['sub'].inferred()[0] + mul = ast['mul'].inferred()[0] + self.assertIs(sub, util.YES) + self.assertIsInstance(mul, nodes.List) + self.assertIsInstance(mul.elts[0], nodes.Const) + self.assertEqual(mul.elts[0].value, 42) + + def test_infer_call_result_crash(self): + code = """ + class A(object): + def __mul__(self, other): + return type.__new__() + + a = A() + b = A() + c = a * b + """ + ast = parse(code, __name__) + node = ast['c'] + self.assertEqual(node.inferred(), [util.YES]) + + def test_infer_empty_nodes(self): + # Should not crash when trying to infer EmptyNodes. + node = nodes.EmptyNode() + self.assertEqual(node.inferred(), [util.YES]) + + def test_infinite_loop_for_decorators(self): + # Issue https://bitbucket.org/logilab/astroid/issue/50 + # A decorator that returns itself leads to an infinite loop. + code = """ + def decorator(): + def wrapper(): + return decorator() + return wrapper + + @decorator() + def do_a_thing(): + pass + """ + ast = parse(code, __name__) + node = ast['do_a_thing'] + self.assertEqual(node.type, 'function') + + def test_no_infinite_ancestor_loop(self): + klass = test_utils.extract_node(""" + import datetime + + def method(self): + datetime.datetime = something() + + class something(datetime.datetime): #@ + pass + """) + self.assertIn( + 'object', + [base.name for base in klass.ancestors()]) + + def test_stop_iteration_leak(self): + code = """ + class Test: + def __init__(self): + self.config = {0: self.config[0]} + self.config[0].test() #@ + """ + ast = test_utils.extract_node(code, __name__) + expr = ast.func.expr + self.assertRaises(InferenceError, next, expr.infer()) + + def test_tuple_builtin_inference(self): + code = """ + var = (1, 2) + tuple() #@ + tuple([1]) #@ + tuple({2}) #@ + tuple("abc") #@ + tuple({1: 2}) #@ + tuple(var) #@ + tuple(tuple([1])) #@ + + tuple(None) #@ + tuple(1) #@ + tuple(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + + self.assertInferTuple(ast[0], []) + self.assertInferTuple(ast[1], [1]) + self.assertInferTuple(ast[2], [2]) + self.assertInferTuple(ast[3], ["a", "b", "c"]) + self.assertInferTuple(ast[4], [1]) + self.assertInferTuple(ast[5], [1, 2]) + self.assertInferTuple(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.tuple".format(BUILTINS)) + + def test_frozenset_builtin_inference(self): + code = """ + var = (1, 2) + frozenset() #@ + frozenset([1, 2, 1]) #@ + frozenset({2, 3, 1}) #@ + frozenset("abcab") #@ + frozenset({1: 2}) #@ + frozenset(var) #@ + frozenset(tuple([1])) #@ + + frozenset(set(tuple([4, 5, set([2])]))) #@ + frozenset(None) #@ + frozenset(1) #@ + frozenset(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + + self.assertInferFrozenSet(ast[0], []) + self.assertInferFrozenSet(ast[1], [1, 2]) + self.assertInferFrozenSet(ast[2], [1, 2, 3]) + self.assertInferFrozenSet(ast[3], ["a", "b", "c"]) + self.assertInferFrozenSet(ast[4], [1]) + self.assertInferFrozenSet(ast[5], [1, 2]) + self.assertInferFrozenSet(ast[6], [1]) + + for node in ast[7:]: + infered = next(node.infer()) + self.assertIsInstance(infered, Instance) + self.assertEqual(infered.qname(), "{}.frozenset".format(BUILTINS)) + + def test_set_builtin_inference(self): + code = """ + var = (1, 2) + set() #@ + set([1, 2, 1]) #@ + set({2, 3, 1}) #@ + set("abcab") #@ + set({1: 2}) #@ + set(var) #@ + set(tuple([1])) #@ + + set(set(tuple([4, 5, set([2])]))) #@ + set(None) #@ + set(1) #@ + set(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + + self.assertInferSet(ast[0], []) + self.assertInferSet(ast[1], [1, 2]) + self.assertInferSet(ast[2], [1, 2, 3]) + self.assertInferSet(ast[3], ["a", "b", "c"]) + self.assertInferSet(ast[4], [1]) + self.assertInferSet(ast[5], [1, 2]) + self.assertInferSet(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.set".format(BUILTINS)) + + def test_list_builtin_inference(self): + code = """ + var = (1, 2) + list() #@ + list([1, 2, 1]) #@ + list({2, 3, 1}) #@ + list("abcab") #@ + list({1: 2}) #@ + list(var) #@ + list(tuple([1])) #@ + + list(list(tuple([4, 5, list([2])]))) #@ + list(None) #@ + list(1) #@ + list(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferList(ast[0], []) + self.assertInferList(ast[1], [1, 1, 2]) + self.assertInferList(ast[2], [1, 2, 3]) + self.assertInferList(ast[3], ["a", "a", "b", "b", "c"]) + self.assertInferList(ast[4], [1]) + self.assertInferList(ast[5], [1, 2]) + self.assertInferList(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.list".format(BUILTINS)) + + @test_utils.require_version('3.0') + def test_builtin_inference_py3k(self): + code = """ + list(b"abc") #@ + tuple(b"abc") #@ + set(b"abc") #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferList(ast[0], [97, 98, 99]) + self.assertInferTuple(ast[1], [97, 98, 99]) + self.assertInferSet(ast[2], [97, 98, 99]) + + def test_dict_inference(self): + code = """ + dict() #@ + dict(a=1, b=2, c=3) #@ + dict([(1, 2), (2, 3)]) #@ + dict([[1, 2], [2, 3]]) #@ + dict([(1, 2), [2, 3]]) #@ + dict([('a', 2)], b=2, c=3) #@ + dict({1: 2}) #@ + dict({'c': 2}, a=4, b=5) #@ + def func(): + return dict(a=1, b=2) + func() #@ + var = {'x': 2, 'y': 3} + dict(var, a=1, b=2) #@ + + dict([1, 2, 3]) #@ + dict([(1, 2), (1, 2, 3)]) #@ + dict({1: 2}, {1: 2}) #@ + dict({1: 2}, (1, 2)) #@ + dict({1: 2}, (1, 2), a=4) #@ + dict([(1, 2), ([4, 5], 2)]) #@ + dict([None, None]) #@ + + def using_unknown_kwargs(**kwargs): + return dict(**kwargs) + using_unknown_kwargs(a=1, b=2) #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferDict(ast[0], {}) + self.assertInferDict(ast[1], {'a': 1, 'b': 2, 'c': 3}) + for i in range(2, 5): + self.assertInferDict(ast[i], {1: 2, 2: 3}) + self.assertInferDict(ast[5], {'a': 2, 'b': 2, 'c': 3}) + self.assertInferDict(ast[6], {1: 2}) + self.assertInferDict(ast[7], {'c': 2, 'a': 4, 'b': 5}) + self.assertInferDict(ast[8], {'a': 1, 'b': 2}) + self.assertInferDict(ast[9], {'x': 2, 'y': 3, 'a': 1, 'b': 2}) + + for node in ast[10:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) + + def test_dict_inference_kwargs(self): + ast_node = test_utils.extract_node('''dict(a=1, b=2, **{'c': 3})''') + self.assertInferDict(ast_node, {'a': 1, 'b': 2, 'c': 3}) + + @test_utils.require_version('3.5') + def test_dict_inference_for_multiple_starred(self): + pairs = [ + ('dict(a=1, **{"b": 2}, **{"c":3})', {'a':1, 'b':2, 'c':3}), + ('dict(a=1, **{"b": 2}, d=4, **{"c":3})', {'a':1, 'b':2, 'c':3, 'd':4}), + ('dict({"a":1}, b=2, **{"c":3})', {'a':1, 'b':2, 'c':3}), + ] + for code, expected_value in pairs: + node = test_utils.extract_node(code) + self.assertInferDict(node, expected_value) + + def test_dict_invalid_args(self): + invalid_values = [ + 'dict(*1)', + 'dict(**lala)', + 'dict(**[])', + ] + for invalid in invalid_values: + ast_node = test_utils.extract_node(invalid) + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) + + def test_str_methods(self): + code = """ + ' '.decode() #@ + + ' '.encode() #@ + ' '.join('abcd') #@ + ' '.replace('a', 'b') #@ + ' '.format('a') #@ + ' '.capitalize() #@ + ' '.title() #@ + ' '.lower() #@ + ' '.upper() #@ + ' '.swapcase() #@ + ' '.strip() #@ + ' '.rstrip() #@ + ' '.lstrip() #@ + ' '.rjust() #@ + ' '.ljust() #@ + ' '.center() #@ + + ' '.index() #@ + ' '.find() #@ + ' '.count() #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferConst(ast[0], u'') + for i in range(1, 16): + self.assertInferConst(ast[i], '') + for i in range(16, 19): + self.assertInferConst(ast[i], 0) + + def test_unicode_methods(self): + code = """ + u' '.encode() #@ + + u' '.decode() #@ + u' '.join('abcd') #@ + u' '.replace('a', 'b') #@ + u' '.format('a') #@ + u' '.capitalize() #@ + u' '.title() #@ + u' '.lower() #@ + u' '.upper() #@ + u' '.swapcase() #@ + u' '.strip() #@ + u' '.rstrip() #@ + u' '.lstrip() #@ + u' '.rjust() #@ + u' '.ljust() #@ + u' '.center() #@ + + u' '.index() #@ + u' '.find() #@ + u' '.count() #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferConst(ast[0], '') + for i in range(1, 16): + self.assertInferConst(ast[i], u'') + for i in range(16, 19): + self.assertInferConst(ast[i], 0) + + def test_scope_lookup_same_attributes(self): + code = ''' + import collections + class Second(collections.Counter): + def collections(self): + return "second" + + ''' + ast = parse(code, __name__) + bases = ast['Second'].bases[0] + inferred = next(bases.infer()) + self.assertTrue(inferred) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertEqual(inferred.qname(), 'collections.Counter') + + +class ArgumentsTest(unittest.TestCase): + + @staticmethod + def _get_dict_value(inferred): + items = inferred.items + return sorted((key.value, value.value) for key, value in items) + + @staticmethod + def _get_tuple_value(inferred): + elts = inferred.elts + return tuple(elt.value for elt in elts) + + def test_args(self): + expected_values = [(), (1, ), (2, 3), (4, 5), + (3, ), (), (3, 4, 5), + (), (), (4, ), (4, 5), + (), (3, ), (), (), (3, ), (42, )] + ast_nodes = test_utils.extract_node(''' + def func(*args): + return args + func() #@ + func(1) #@ + func(2, 3) #@ + func(*(4, 5)) #@ + def func(a, b, *args): + return args + func(1, 2, 3) #@ + func(1, 2) #@ + func(1, 2, 3, 4, 5) #@ + def func(a, b, c=42, *args): + return args + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, 3, 4) #@ + func(1, 2, 3, 4, 5) #@ + func = lambda a, b, *args: args + func(1, 2) #@ + func(1, 2, 3) #@ + func = lambda a, b=42, *args: args + func(1) #@ + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, *(42, )) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertEqual(self._get_tuple_value(inferred), expected_value) + + @test_utils.require_version('3.5') + def test_multiple_starred_args(self): + expected_values = [ + (1, 2, 3), + (1, 4, 2, 3, 5, 6, 7), + ] + ast_nodes = test_utils.extract_node(''' + def func(a, b, *args): + return args + func(1, 2, *(1, ), *(2, 3)) #@ + func(1, 2, *(1, ), 4, *(2, 3), 5, *(6, 7)) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertEqual(self._get_tuple_value(inferred), expected_value) + + def test_defaults(self): + expected_values = [42, 3, 41, 42] + ast_nodes = test_utils.extract_node(''' + def func(a, b, c=42, *args): + return c + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, c=41) #@ + func(1, 2, 42, 41) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + @test_utils.require_version('3.0') + def test_kwonly_args(self): + expected_values = [24, 24, 42, 23, 24, 24, 54] + ast_nodes = test_utils.extract_node(''' + def test(*, f, b): return f + test(f=24, b=33) #@ + def test(a, *, f): return f + test(1, f=24) #@ + def test(a, *, f=42): return f + test(1) #@ + test(1, f=23) #@ + def test(a, b, c=42, *args, f=24): + return f + test(1, 2, 3) #@ + test(1, 2, 3, 4) #@ + test(1, 2, 3, 4, 5, f=54) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + def test_kwargs(self): + expected = [ + [('a', 1), ('b', 2), ('c', 3)], + [('a', 1)], + [('a', 'b')], + ] + ast_nodes = test_utils.extract_node(''' + def test(**kwargs): + return kwargs + test(a=1, b=2, c=3) #@ + test(a=1) #@ + test(**{'a': 'b'}) #@ + ''') + for node, expected_value in zip(ast_nodes, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + value = self._get_dict_value(inferred) + self.assertEqual(value, expected_value) + + def test_kwargs_and_other_named_parameters(self): + ast_nodes = test_utils.extract_node(''' + def test(a=42, b=24, **kwargs): + return kwargs + test(42, 24, c=3, d=4) #@ + test(49, b=24, d=4) #@ + test(a=42, b=33, c=3, d=42) #@ + test(a=42, **{'c':42}) #@ + ''') + expected_values = [ + [('c', 3), ('d', 4)], + [('d', 4)], + [('c', 3), ('d', 42)], + [('c', 42)], + ] + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + value = self._get_dict_value(inferred) + self.assertEqual(value, expected_value) + + def test_kwargs_access_by_name(self): + expected_values = [42, 42, 42, 24] + ast_nodes = test_utils.extract_node(''' + def test(**kwargs): + return kwargs['f'] + test(f=42) #@ + test(**{'f': 42}) #@ + test(**dict(f=42)) #@ + def test(f=42, **kwargs): + return kwargs['l'] + test(l=24) #@ + ''') + for ast_node, value in zip(ast_nodes, expected_values): + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, value) + + def test_infer_call_result_invalid_dunder_call_on_instance(self): + ast_nodes = test_utils.extract_node(''' + class A: + __call__ = 42 + class B: + __call__ = A() + class C: + __call = None + A() #@ + B() #@ + C() #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertRaises(InferenceError, next, inferred.infer_call_result(node)) + + + def test_subscript_inference_error(self): + # Used to raise StopIteration + ast_node = test_utils.extract_node(''' + class AttributeDict(dict): + def __getitem__(self, name): + return self + flow = AttributeDict() + flow['app'] = AttributeDict() + flow['app']['config'] = AttributeDict() + flow['app']['config']['doffing'] = AttributeDict() #@ + ''') + self.assertIsNone(util.safe_infer(ast_node.targets[0])) + + def test_classmethod_inferred_by_context(self): + ast_node = test_utils.extract_node(''' + class Super(object): + def instance(cls): + return cls() + instance = classmethod(instance) + + class Sub(Super): + def method(self): + return self + + # should see the Sub.instance() is returning a Sub + # instance, not a Super instance + Sub.instance().method() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'Sub') + + @test_utils.require_version('3.5') + def test_multiple_kwargs(self): + expected_value = [ + ('a', 1), + ('b', 2), + ('c', 3), + ('d', 4), + ('f', 42), + ] + ast_node = test_utils.extract_node(''' + def test(**kwargs): + return kwargs + test(a=1, b=2, **{'c': 3}, **{'d': 4}, f=42) #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + value = self._get_dict_value(inferred) + self.assertEqual(value, expected_value) + + def test_kwargs_are_overriden(self): + ast_nodes = test_utils.extract_node(''' + def test(f): + return f + test(f=23, **{'f': 34}) #@ + def test(f=None): + return f + test(f=23, **{'f':23}) #@ + ''') + for ast_node in ast_nodes: + inferred = next(ast_node.infer()) + self.assertEqual(inferred, util.YES) + + def test_fail_to_infer_args(self): + ast_nodes = test_utils.extract_node(''' + def test(a, **kwargs): return a + test(*missing) #@ + test(*object) #@ + test(*1) #@ + + + def test(**kwargs): return kwargs + test(**miss) #@ + test(**(1, 2)) #@ + test(**1) #@ + test(**{misss:1}) #@ + test(**{object:1}) #@ + test(**{1:1}) #@ + test(**{'a':1, 'a':1}) #@ + + def test(a): return a + test() #@ + test(1, 2, 3) #@ + + from unknown import unknown + test(*unknown) #@ + def test(*args): return args + test(*unknown) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertEqual(inferred, util.YES) + +class CallSiteTest(unittest.TestCase): + + @staticmethod + def _call_site_from_call(call): + return arguments.CallSite.from_call(call) + + def _test_call_site_pair(self, code, expected_args, expected_keywords): + ast_node = test_utils.extract_node(code) + call_site = self._call_site_from_call(ast_node) + self.assertEqual(len(call_site.positional_arguments), len(expected_args)) + self.assertEqual([arg.value for arg in call_site.positional_arguments], + expected_args) + self.assertEqual(len(call_site.keyword_arguments), len(expected_keywords)) + for keyword, value in expected_keywords.items(): + self.assertIn(keyword, call_site.keyword_arguments) + self.assertEqual(call_site.keyword_arguments[keyword].value, value) + + def _test_call_site(self, pairs): + for pair in pairs: + self._test_call_site_pair(*pair) + + @test_utils.require_version('3.5') + def test_call_site_starred_args(self): + pairs = [ + ( + "f(*(1, 2), *(2, 3), *(3, 4), **{'a':1}, **{'b': 2})", + [1, 2, 2, 3, 3, 4], + {'a': 1, 'b': 2} + ), + ( + "f(1, 2, *(3, 4), 5, *(6, 7), f=24, **{'c':3})", + [1, 2, 3, 4, 5, 6, 7], + {'f':24, 'c': 3}, + ), + # Too many fs passed into. + ( + "f(f=24, **{'f':24})", [], {}, + ), + ] + self._test_call_site(pairs) + + def test_call_site(self): + pairs = [ + ( + "f(1, 2)", [1, 2], {} + ), + ( + "f(1, 2, *(1, 2))", [1, 2, 1, 2], {} + ), + ( + "f(a=1, b=2, c=3)", [], {'a':1, 'b':2, 'c':3} + ) + ] + self._test_call_site(pairs) + + def _test_call_site_valid_arguments(self, values, invalid): + for value in values: + ast_node = test_utils.extract_node(value) + call_site = self._call_site_from_call(ast_node) + self.assertEqual(call_site.has_invalid_arguments(), invalid) + + def test_call_site_valid_arguments(self): + values = [ + "f(*lala)", "f(*1)", "f(*object)", + ] + self._test_call_site_valid_arguments(values, invalid=True) + values = [ + "f()", "f(*(1, ))", "f(1, 2, *(2, 3))", + ] + self._test_call_site_valid_arguments(values, invalid=False) + + def test_duplicated_keyword_arguments(self): + ast_node = test_utils.extract_node('f(f=24, **{"f": 25})') + site = self._call_site_from_call(ast_node) + self.assertIn('f', site.duplicated_keywords) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_lookup.py b/pymode/libs/astroid/tests/unittest_lookup.py new file mode 100644 index 00000000..bd1786d5 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_lookup.py @@ -0,0 +1,352 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for the astroid variable lookup capabilities +""" +import functools +import sys +import unittest + +from astroid import builder +from astroid import exceptions +from astroid import nodes +from astroid import scoped_nodes +from astroid import test_utils +from astroid import util +from astroid.tests import resources + + +class LookupTest(resources.SysPathSetup, unittest.TestCase): + + def setUp(self): + super(LookupTest, self).setUp() + self.module = resources.build_file('data/module.py', 'data.module') + self.module2 = resources.build_file('data/module2.py', 'data.module2') + self.nonregr = resources.build_file('data/nonregr.py', 'data.nonregr') + + def test_limit(self): + code = ''' + l = [a + for a,b in list] + + a = 1 + b = a + a = None + + def func(): + c = 1 + ''' + astroid = builder.parse(code, __name__) + # a & b + a = next(astroid.nodes_of_class(nodes.Name)) + self.assertEqual(a.lineno, 2) + if sys.version_info < (3, 0): + self.assertEqual(len(astroid.lookup('b')[1]), 1) + self.assertEqual(len(astroid.lookup('a')[1]), 1) + b = astroid._locals['b'][1] + else: + self.assertEqual(len(astroid.lookup('b')[1]), 1) + self.assertEqual(len(astroid.lookup('a')[1]), 1) + b = astroid._locals['b'][0] + + stmts = a.lookup('a')[1] + self.assertEqual(len(stmts), 1) + self.assertEqual(b.lineno, 6) + b_infer = b.infer() + b_value = next(b_infer) + self.assertEqual(b_value.value, 1) + # c + self.assertRaises(StopIteration, functools.partial(next, b_infer)) + func = astroid._locals['func'][0] + self.assertEqual(len(func.lookup('c')[1]), 1) + + def test_module(self): + astroid = builder.parse('pass', __name__) + # built-in objects + none = next(astroid.ilookup('None')) + self.assertIsNone(none.value) + obj = next(astroid.ilookup('object')) + self.assertIsInstance(obj, nodes.ClassDef) + self.assertEqual(obj.name, 'object') + self.assertRaises(exceptions.InferenceError, + functools.partial(next, astroid.ilookup('YOAA'))) + + # XXX + self.assertEqual(len(list(self.nonregr.ilookup('enumerate'))), 2) + + def test_class_ancestor_name(self): + code = ''' + class A: + pass + + class A(A): + pass + ''' + astroid = builder.parse(code, __name__) + cls1 = astroid._locals['A'][0] + cls2 = astroid._locals['A'][1] + name = next(cls2.nodes_of_class(nodes.Name)) + self.assertEqual(next(name.infer()), cls1) + + ### backport those test to inline code + def test_method(self): + method = self.module['YOUPI']['method'] + my_dict = next(method.ilookup('MY_DICT')) + self.assertTrue(isinstance(my_dict, nodes.Dict), my_dict) + none = next(method.ilookup('None')) + self.assertIsNone(none.value) + self.assertRaises(exceptions.InferenceError, + functools.partial(next, method.ilookup('YOAA'))) + + def test_function_argument_with_default(self): + make_class = self.module2['make_class'] + base = next(make_class.ilookup('base')) + self.assertTrue(isinstance(base, nodes.ClassDef), base.__class__) + self.assertEqual(base.name, 'YO') + self.assertEqual(base.root().name, 'data.module') + + def test_class(self): + klass = self.module['YOUPI'] + my_dict = next(klass.ilookup('MY_DICT')) + self.assertIsInstance(my_dict, nodes.Dict) + none = next(klass.ilookup('None')) + self.assertIsNone(none.value) + obj = next(klass.ilookup('object')) + self.assertIsInstance(obj, nodes.ClassDef) + self.assertEqual(obj.name, 'object') + self.assertRaises(exceptions.InferenceError, + functools.partial(next, klass.ilookup('YOAA'))) + + def test_inner_classes(self): + ddd = list(self.nonregr['Ccc'].ilookup('Ddd')) + self.assertEqual(ddd[0].name, 'Ddd') + + def test_loopvar_hiding(self): + astroid = builder.parse(""" + x = 10 + for x in range(5): + print (x) + + if x > 0: + print ('#' * x) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'x'] + # inside the loop, only one possible assignment + self.assertEqual(len(xnames[0].lookup('x')[1]), 1) + # outside the loop, two possible assignments + self.assertEqual(len(xnames[1].lookup('x')[1]), 2) + self.assertEqual(len(xnames[2].lookup('x')[1]), 2) + + def test_list_comps(self): + astroid = builder.parse(""" + print ([ i for i in range(10) ]) + print ([ i for i in range(10) ]) + print ( list( i for i in range(10) ) ) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + self.assertEqual(len(xnames[2].lookup('i')[1]), 1) + self.assertEqual(xnames[2].lookup('i')[1][0].lineno, 4) + + def test_list_comp_target(self): + """test the list comprehension target""" + astroid = builder.parse(""" + ten = [ var for var in range(10) ] + var + """) + var = astroid.body[1].value + if sys.version_info < (3, 0): + self.assertEqual(var.inferred(), [util.YES]) + else: + self.assertRaises(exceptions.UnresolvableName, var.inferred) + + def test_dict_comps(self): + astroid = builder.parse(""" + print ({ i: j for i in range(10) for j in range(10) }) + print ({ i: j for i in range(10) for j in range(10) }) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'j'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + + def test_set_comps(self): + astroid = builder.parse(""" + print ({ i for i in range(10) }) + print ({ i for i in range(10) }) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + + def test_set_comp_closure(self): + astroid = builder.parse(""" + ten = { var for var in range(10) } + var + """) + var = astroid.body[1].value + self.assertRaises(exceptions.UnresolvableName, var.inferred) + + def test_generator_attributes(self): + tree = builder.parse(""" + def count(): + "test" + yield 0 + + iterer = count() + num = iterer.next() + """) + next_node = tree.body[2].value.func + gener = next_node.expr.inferred()[0] + if sys.version_info < (3, 0): + self.assertIsInstance(gener.getattr('next')[0], nodes.FunctionDef) + else: + self.assertIsInstance(gener.getattr('__next__')[0], nodes.FunctionDef) + self.assertIsInstance(gener.getattr('send')[0], nodes.FunctionDef) + self.assertIsInstance(gener.getattr('throw')[0], nodes.FunctionDef) + self.assertIsInstance(gener.getattr('close')[0], nodes.FunctionDef) + + def test_explicit___name__(self): + code = ''' + class Pouet: + __name__ = "pouet" + p1 = Pouet() + + class PouetPouet(Pouet): pass + p2 = Pouet() + + class NoName: pass + p3 = NoName() + ''' + astroid = builder.parse(code, __name__) + p1 = next(astroid['p1'].infer()) + self.assertTrue(p1.getattr('__name__')) + p2 = next(astroid['p2'].infer()) + self.assertTrue(p2.getattr('__name__')) + self.assertTrue(astroid['NoName'].getattr('__name__')) + p3 = next(astroid['p3'].infer()) + self.assertRaises(exceptions.NotFoundError, p3.getattr, '__name__') + + def test_function_module_special(self): + astroid = builder.parse(''' + def initialize(linter): + """initialize linter with checkers in this package """ + package_load(linter, __path__[0]) + ''', 'data.__init__') + path = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == '__path__'][0] + self.assertEqual(len(path.lookup('__path__')[1]), 1) + + def test_builtin_lookup(self): + self.assertEqual(scoped_nodes.builtin_lookup('__dict__')[1], ()) + intstmts = scoped_nodes.builtin_lookup('int')[1] + self.assertEqual(len(intstmts), 1) + self.assertIsInstance(intstmts[0], nodes.ClassDef) + self.assertEqual(intstmts[0].name, 'int') + self.assertIs(intstmts[0], nodes.const_factory(1)._proxied) + + def test_decorator_arguments_lookup(self): + code = ''' + def decorator(value): + def wrapper(function): + return function + return wrapper + + class foo: + member = 10 #@ + + @decorator(member) #This will cause pylint to complain + def test(self): + pass + ''' + member = test_utils.extract_node(code, __name__).targets[0] + it = member.infer() + obj = next(it) + self.assertIsInstance(obj, nodes.Const) + self.assertEqual(obj.value, 10) + self.assertRaises(StopIteration, functools.partial(next, it)) + + def test_inner_decorator_member_lookup(self): + code = ''' + class FileA: + def decorator(bla): + return bla + + @__(decorator) + def funcA(): + return 4 + ''' + decname = test_utils.extract_node(code, __name__) + it = decname.infer() + obj = next(it) + self.assertIsInstance(obj, nodes.FunctionDef) + self.assertRaises(StopIteration, functools.partial(next, it)) + + def test_static_method_lookup(self): + code = ''' + class FileA: + @staticmethod + def funcA(): + return 4 + + + class Test: + FileA = [1,2,3] + + def __init__(self): + print (FileA.funcA()) + ''' + astroid = builder.parse(code, __name__) + it = astroid['Test']['__init__'].ilookup('FileA') + obj = next(it) + self.assertIsInstance(obj, nodes.ClassDef) + self.assertRaises(StopIteration, functools.partial(next, it)) + + def test_global_delete(self): + code = ''' + def run2(): + f = Frobble() + + class Frobble: + pass + Frobble.mumble = True + + del Frobble + + def run1(): + f = Frobble() + ''' + astroid = builder.parse(code, __name__) + stmts = astroid['run2'].lookup('Frobbel')[1] + self.assertEqual(len(stmts), 0) + stmts = astroid['run1'].lookup('Frobbel')[1] + self.assertEqual(len(stmts), 0) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_manager.py b/pymode/libs/astroid/tests/unittest_manager.py new file mode 100644 index 00000000..452b759e --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_manager.py @@ -0,0 +1,216 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import os +import platform +import sys +import unittest + +import six + +from astroid import exceptions +from astroid import manager +from astroid.tests import resources + + +BUILTINS = six.moves.builtins.__name__ + + +def _get_file_from_object(obj): + if platform.python_implementation() == 'Jython': + return obj.__file__.split("$py.class")[0] + ".py" + if sys.version_info > (3, 0): + return obj.__file__ + if not obj.__file__.endswith(".py"): + return obj.__file__[:-1] + return obj.__file__ + + +class AstroidManagerTest(resources.SysPathSetup, + resources.AstroidCacheSetupMixin, + unittest.TestCase): + + def setUp(self): + super(AstroidManagerTest, self).setUp() + self.manager = manager.AstroidManager() + self.manager.clear_cache(self._builtins) # take care of borg + + def test_ast_from_file(self): + filepath = unittest.__file__ + astroid = self.manager.ast_from_file(filepath) + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_file_cache(self): + filepath = unittest.__file__ + self.manager.ast_from_file(filepath) + astroid = self.manager.ast_from_file('unhandledName', 'unittest') + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_file_astro_builder(self): + filepath = unittest.__file__ + astroid = self.manager.ast_from_file(filepath, None, True, True) + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_file_name_astro_builder_exception(self): + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.ast_from_file, 'unhandledName') + + def test_do_not_expose_main(self): + obj = self.manager.ast_from_module_name('__main__') + self.assertEqual(obj.name, '__main__') + self.assertEqual(obj.items(), []) + + def test_ast_from_module_name(self): + astroid = self.manager.ast_from_module_name('unittest') + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_module_name_not_python_source(self): + astroid = self.manager.ast_from_module_name('time') + self.assertEqual(astroid.name, 'time') + self.assertIn('time', self.manager.astroid_cache) + self.assertEqual(astroid.pure_python, False) + + def test_ast_from_module_name_astro_builder_exception(self): + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.ast_from_module_name, + 'unhandledModule') + + def _test_ast_from_zip(self, archive): + origpath = sys.path[:] + sys.modules.pop('mypypa', None) + archive_path = resources.find(archive) + sys.path.insert(0, archive_path) + try: + module = self.manager.ast_from_module_name('mypypa') + self.assertEqual(module.name, 'mypypa') + end = os.path.join(archive, 'mypypa') + self.assertTrue(module.source_file.endswith(end), + "%s doesn't endswith %s" % (module.source_file, end)) + finally: + # remove the module, else after importing egg, we don't get the zip + if 'mypypa' in self.manager.astroid_cache: + del self.manager.astroid_cache['mypypa'] + del self.manager._mod_file_cache[('mypypa', None)] + if archive_path in sys.path_importer_cache: + del sys.path_importer_cache[archive_path] + sys.path = origpath + + def test_ast_from_module_name_egg(self): + self._test_ast_from_zip( + os.path.sep.join(['data', os.path.normcase('MyPyPa-0.1.0-py2.5.egg')]) + ) + + def test_ast_from_module_name_zip(self): + self._test_ast_from_zip( + os.path.sep.join(['data', os.path.normcase('MyPyPa-0.1.0-py2.5.zip')]) + ) + + def test_zip_import_data(self): + """check if zip_import_data works""" + filepath = resources.find('data/MyPyPa-0.1.0-py2.5.zip/mypypa') + astroid = self.manager.zip_import_data(filepath) + self.assertEqual(astroid.name, 'mypypa') + + def test_zip_import_data_without_zipimport(self): + """check if zip_import_data return None without zipimport""" + self.assertEqual(self.manager.zip_import_data('path'), None) + + def test_file_from_module(self): + """check if the unittest filepath is equals to the result of the method""" + self.assertEqual( + _get_file_from_object(unittest), + self.manager.file_from_module_name('unittest', None)[0]) + + def test_file_from_module_name_astro_building_exception(self): + """check if the method launch a exception with a wrong module name""" + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.file_from_module_name, 'unhandledModule', None) + + def test_ast_from_module(self): + astroid = self.manager.ast_from_module(unittest) + self.assertEqual(astroid.pure_python, True) + import time + astroid = self.manager.ast_from_module(time) + self.assertEqual(astroid.pure_python, False) + + def test_ast_from_module_cache(self): + """check if the module is in the cache manager""" + astroid = self.manager.ast_from_module(unittest) + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_class(self): + astroid = self.manager.ast_from_class(int) + self.assertEqual(astroid.name, 'int') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + + astroid = self.manager.ast_from_class(object) + self.assertEqual(astroid.name, 'object') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + self.assertIn('__setattr__', astroid) + + def test_ast_from_class_with_module(self): + """check if the method works with the module name""" + astroid = self.manager.ast_from_class(int, int.__module__) + self.assertEqual(astroid.name, 'int') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + + astroid = self.manager.ast_from_class(object, object.__module__) + self.assertEqual(astroid.name, 'object') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + self.assertIn('__setattr__', astroid) + + def test_ast_from_class_attr_error(self): + """give a wrong class at the ast_from_class method""" + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.ast_from_class, None) + + def testFailedImportHooks(self): + def hook(modname): + if modname == 'foo.bar': + return unittest + else: + raise exceptions.AstroidBuildingException() + + with self.assertRaises(exceptions.AstroidBuildingException): + self.manager.ast_from_module_name('foo.bar') + self.manager.register_failed_import_hook(hook) + self.assertEqual(unittest, self.manager.ast_from_module_name('foo.bar')) + with self.assertRaises(exceptions.AstroidBuildingException): + self.manager.ast_from_module_name('foo.bar.baz') + del self.manager._failed_import_hooks[0] + + +class BorgAstroidManagerTC(unittest.TestCase): + + def test_borg(self): + """test that the AstroidManager is really a borg, i.e. that two different + instances has same cache""" + first_manager = manager.AstroidManager() + built = first_manager.ast_from_module_name(BUILTINS) + + second_manager = manager.AstroidManager() + second_built = second_manager.ast_from_module_name(BUILTINS) + self.assertIs(built, second_built) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_modutils.py b/pymode/libs/astroid/tests/unittest_modutils.py new file mode 100644 index 00000000..dffc3b8d --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_modutils.py @@ -0,0 +1,269 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# astroid is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +""" +unit tests for module modutils (module manipulation utilities) +""" +import os +import sys +import unittest + +from astroid import modutils +from astroid.tests import resources + + +def _get_file_from_object(obj): + return modutils._path_from_filename(obj.__file__) + + +class ModuleFileTest(unittest.TestCase): + package = "mypypa" + + def tearDown(self): + for k in list(sys.path_importer_cache.keys()): + if 'MyPyPa' in k: + del sys.path_importer_cache[k] + + def test_find_zipped_module(self): + mtype, mfile = modutils._module_file( + [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.zip')]) + self.assertEqual(mtype, modutils.PY_ZIPMODULE) + self.assertEqual(mfile.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.zip", self.package]) + + def test_find_egg_module(self): + mtype, mfile = modutils._module_file( + [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.egg')]) + self.assertEqual(mtype, modutils.PY_ZIPMODULE) + self.assertEqual(mfile.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.egg", self.package]) + + +class LoadModuleFromNameTest(unittest.TestCase): + """ load a python module from it's name """ + + def test_knownValues_load_module_from_name_1(self): + self.assertEqual(modutils.load_module_from_name('sys'), sys) + + def test_knownValues_load_module_from_name_2(self): + self.assertEqual(modutils.load_module_from_name('os.path'), os.path) + + def test_raise_load_module_from_name_1(self): + self.assertRaises(ImportError, + modutils.load_module_from_name, 'os.path', use_sys=0) + + +class GetModulePartTest(unittest.TestCase): + """given a dotted name return the module part of the name""" + + def test_knownValues_get_module_part_1(self): + self.assertEqual(modutils.get_module_part('astroid.modutils'), + 'astroid.modutils') + + def test_knownValues_get_module_part_2(self): + self.assertEqual(modutils.get_module_part('astroid.modutils.get_module_part'), + 'astroid.modutils') + + def test_knownValues_get_module_part_3(self): + """relative import from given file""" + self.assertEqual(modutils.get_module_part('node_classes.AssName', + modutils.__file__), 'node_classes') + + def test_knownValues_get_compiled_module_part(self): + self.assertEqual(modutils.get_module_part('math.log10'), 'math') + self.assertEqual(modutils.get_module_part('math.log10', __file__), 'math') + + def test_knownValues_get_builtin_module_part(self): + self.assertEqual(modutils.get_module_part('sys.path'), 'sys') + self.assertEqual(modutils.get_module_part('sys.path', '__file__'), 'sys') + + def test_get_module_part_exception(self): + self.assertRaises(ImportError, modutils.get_module_part, 'unknown.module', + modutils.__file__) + + +class ModPathFromFileTest(unittest.TestCase): + """ given an absolute file path return the python module's path as a list """ + + def test_knownValues_modpath_from_file_1(self): + from xml.etree import ElementTree + self.assertEqual(modutils.modpath_from_file(ElementTree.__file__), + ['xml', 'etree', 'ElementTree']) + + def test_knownValues_modpath_from_file_2(self): + self.assertEqual(modutils.modpath_from_file('unittest_modutils.py', + {os.getcwd(): 'arbitrary.pkg'}), + ['arbitrary', 'pkg', 'unittest_modutils']) + + def test_raise_modpath_from_file_Exception(self): + self.assertRaises(Exception, modutils.modpath_from_file, '/turlututu') + + +class LoadModuleFromPathTest(resources.SysPathSetup, unittest.TestCase): + + def test_do_not_load_twice(self): + modutils.load_module_from_modpath(['data', 'lmfp', 'foo']) + modutils.load_module_from_modpath(['data', 'lmfp']) + self.assertEqual(len(sys.just_once), 1) + del sys.just_once + + +class FileFromModPathTest(resources.SysPathSetup, unittest.TestCase): + """given a mod path (i.e. splited module / package name), return the + corresponding file, giving priority to source file over precompiled file + if it exists""" + + def test_site_packages(self): + filename = _get_file_from_object(modutils) + result = modutils.file_from_modpath(['astroid', 'modutils']) + self.assertEqual(os.path.realpath(result), os.path.realpath(filename)) + + def test_std_lib(self): + from os import path + self.assertEqual(os.path.realpath(modutils.file_from_modpath(['os', 'path']).replace('.pyc', '.py')), + os.path.realpath(path.__file__.replace('.pyc', '.py'))) + + def test_xmlplus(self): + try: + # don't fail if pyxml isn't installed + from xml.dom import ext + except ImportError: + pass + else: + self.assertEqual(os.path.realpath(modutils.file_from_modpath(['xml', 'dom', 'ext']).replace('.pyc', '.py')), + os.path.realpath(ext.__file__.replace('.pyc', '.py'))) + + def test_builtin(self): + self.assertEqual(modutils.file_from_modpath(['sys']), + None) + + + def test_unexisting(self): + self.assertRaises(ImportError, modutils.file_from_modpath, ['turlututu']) + + def test_unicode_in_package_init(self): + # file_from_modpath should not crash when reading an __init__ + # file with unicode characters. + modutils.file_from_modpath(["data", "unicode_package", "core"]) + + +class GetSourceFileTest(unittest.TestCase): + + def test(self): + filename = _get_file_from_object(os.path) + self.assertEqual(modutils.get_source_file(os.path.__file__), + os.path.normpath(filename)) + + def test_raise(self): + self.assertRaises(modutils.NoSourceFile, modutils.get_source_file, 'whatever') + + +class StandardLibModuleTest(resources.SysPathSetup, unittest.TestCase): + """ + return true if the module may be considered as a module from the standard + library + """ + + def test_datetime(self): + # This is an interesting example, since datetime, on pypy, + # is under lib_pypy, rather than the usual Lib directory. + self.assertTrue(modutils.is_standard_module('datetime')) + + def test_builtins(self): + if sys.version_info < (3, 0): + self.assertEqual(modutils.is_standard_module('__builtin__'), True) + self.assertEqual(modutils.is_standard_module('builtins'), False) + else: + self.assertEqual(modutils.is_standard_module('__builtin__'), False) + self.assertEqual(modutils.is_standard_module('builtins'), True) + + def test_builtin(self): + self.assertEqual(modutils.is_standard_module('sys'), True) + self.assertEqual(modutils.is_standard_module('marshal'), True) + + def test_nonstandard(self): + self.assertEqual(modutils.is_standard_module('astroid'), False) + + def test_unknown(self): + self.assertEqual(modutils.is_standard_module('unknown'), False) + + def test_4(self): + self.assertEqual(modutils.is_standard_module('hashlib'), True) + self.assertEqual(modutils.is_standard_module('pickle'), True) + self.assertEqual(modutils.is_standard_module('email'), True) + self.assertEqual(modutils.is_standard_module('io'), sys.version_info >= (2, 6)) + self.assertEqual(modutils.is_standard_module('StringIO'), sys.version_info < (3, 0)) + self.assertEqual(modutils.is_standard_module('unicodedata'), True) + + def test_custom_path(self): + datadir = resources.find('') + if datadir.startswith(modutils.EXT_LIB_DIR): + self.skipTest('known breakage of is_standard_module on installed package') + self.assertEqual(modutils.is_standard_module('data.module', (datadir,)), True) + self.assertEqual(modutils.is_standard_module('data.module', (os.path.abspath(datadir),)), True) + + def test_failing_edge_cases(self): + from xml import etree + # using a subpackage/submodule path as std_path argument + self.assertEqual(modutils.is_standard_module('xml.etree', etree.__path__), False) + # using a module + object name as modname argument + self.assertEqual(modutils.is_standard_module('sys.path'), True) + # this is because only the first package/module is considered + self.assertEqual(modutils.is_standard_module('sys.whatever'), True) + self.assertEqual(modutils.is_standard_module('xml.whatever', etree.__path__), False) + + +class IsRelativeTest(unittest.TestCase): + + + def test_knownValues_is_relative_1(self): + import email + self.assertEqual(modutils.is_relative('utils', email.__path__[0]), + True) + + def test_knownValues_is_relative_2(self): + from xml.etree import ElementTree + self.assertEqual(modutils.is_relative('ElementPath', ElementTree.__file__), + True) + + def test_knownValues_is_relative_3(self): + import astroid + self.assertEqual(modutils.is_relative('astroid', astroid.__path__[0]), + False) + + +class GetModuleFilesTest(unittest.TestCase): + + def test_get_module_files_1(self): + package = resources.find('data/find_test') + modules = set(modutils.get_module_files(package, [])) + expected = ['__init__.py', 'module.py', 'module2.py', + 'noendingnewline.py', 'nonregr.py'] + self.assertEqual(modules, + {os.path.join(package, x) for x in expected}) + + def test_load_module_set_attribute(self): + import xml.etree.ElementTree + import xml + del xml.etree.ElementTree + del sys.modules['xml.etree.ElementTree'] + m = modutils.load_module_from_modpath(['xml', 'etree', 'ElementTree']) + self.assertTrue(hasattr(xml, 'etree')) + self.assertTrue(hasattr(xml.etree, 'ElementTree')) + self.assertTrue(m is xml.etree.ElementTree) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_nodes.py b/pymode/libs/astroid/tests/unittest_nodes.py new file mode 100644 index 00000000..6fa4b6f3 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_nodes.py @@ -0,0 +1,764 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for specific behaviour of astroid nodes +""" +import os +import sys +import textwrap +import unittest +import warnings + +import six + +from astroid import bases +from astroid import builder +from astroid import context as contextmod +from astroid import exceptions +from astroid import node_classes +from astroid import nodes +from astroid import parse +from astroid import util +from astroid import test_utils +from astroid import transforms +from astroid.tests import resources + + +abuilder = builder.AstroidBuilder() +BUILTINS = six.moves.builtins.__name__ + + +class AsStringTest(resources.SysPathSetup, unittest.TestCase): + + def test_tuple_as_string(self): + def build(string): + return abuilder.string_build(string).body[0].value + + self.assertEqual(build('1,').as_string(), '(1, )') + self.assertEqual(build('1, 2, 3').as_string(), '(1, 2, 3)') + self.assertEqual(build('(1, )').as_string(), '(1, )') + self.assertEqual(build('1, 2, 3').as_string(), '(1, 2, 3)') + + def test_as_string_for_list_containing_uninferable(self): + node = test_utils.extract_node(''' + def foo(arg): + bar = [arg] * 1 + ''') + binop = node.body[0].value + inferred = next(binop.infer()) + self.assertEqual(inferred.as_string(), '[Uninferable]') + self.assertEqual(binop.as_string(), '([arg]) * (1)') + + def test_frozenset_as_string(self): + nodes = test_utils.extract_node(''' + frozenset((1, 2, 3)) #@ + frozenset({1, 2, 3}) #@ + frozenset([1, 2, 3,]) #@ + + frozenset(None) #@ + frozenset(1) #@ + ''') + nodes = [next(node.infer()) for node in nodes] + + self.assertEqual(nodes[0].as_string(), 'frozenset((1, 2, 3))') + self.assertEqual(nodes[1].as_string(), 'frozenset({1, 2, 3})') + self.assertEqual(nodes[2].as_string(), 'frozenset([1, 2, 3])') + + self.assertNotEqual(nodes[3].as_string(), 'frozenset(None)') + self.assertNotEqual(nodes[4].as_string(), 'frozenset(1)') + + @test_utils.require_version(minver='3.0') + def test_func_signature_issue_185(self): + code = textwrap.dedent(''' + def test(a, b, c=42, *, x=42, **kwargs): + print(a, b, c, args) + ''') + node = parse(code) + self.assertEqual(node.as_string().strip(), code.strip()) + def test_varargs_kwargs_as_string(self): + ast = abuilder.string_build('raise_string(*args, **kwargs)').body[0] + self.assertEqual(ast.as_string(), 'raise_string(*args, **kwargs)') + + def test_module_as_string(self): + """check as_string on a whole module prepared to be returned identically + """ + module = resources.build_file('data/module.py', 'data.module') + with open(resources.find('data/module.py'), 'r') as fobj: + self.assertMultiLineEqual(module.as_string(), fobj.read()) + + def test_module2_as_string(self): + """check as_string on a whole module prepared to be returned identically + """ + module2 = resources.build_file('data/module2.py', 'data.module2') + with open(resources.find('data/module2.py'), 'r') as fobj: + self.assertMultiLineEqual(module2.as_string(), fobj.read()) + + def test_as_string(self): + """check as_string for python syntax >= 2.7""" + code = '''one_two = {1, 2} +b = {v: k for (k, v) in enumerate('string')} +cdd = {k for k in b}\n\n''' + ast = abuilder.string_build(code) + self.assertMultiLineEqual(ast.as_string(), code) + + @test_utils.require_version('3.0') + def test_3k_as_string(self): + """check as_string for python 3k syntax""" + code = '''print() + +def function(var): + nonlocal counter + try: + hello + except NameError as nexc: + (*hell, o) = b'hello' + raise AttributeError from nexc +\n''' + ast = abuilder.string_build(code) + self.assertEqual(ast.as_string(), code) + + @test_utils.require_version('3.0') + @unittest.expectedFailure + def test_3k_annotations_and_metaclass(self): + code_annotations = textwrap.dedent(''' + def function(var:int): + nonlocal counter + + class Language(metaclass=Natural): + """natural language""" + ''') + + ast = abuilder.string_build(code_annotations) + self.assertEqual(ast.as_string(), code_annotations) + + def test_ellipsis(self): + ast = abuilder.string_build('a[...]').body[0] + self.assertEqual(ast.as_string(), 'a[...]') + + def test_slices(self): + for code in ('a[0]', 'a[1:3]', 'a[:-1:step]', 'a[:,newaxis]', + 'a[newaxis,:]', 'del L[::2]', 'del A[1]', 'del Br[:]'): + ast = abuilder.string_build(code).body[0] + self.assertEqual(ast.as_string(), code) + + def test_slice_and_subscripts(self): + code = """a[:1] = bord[2:] +a[:1] = bord[2:] +del bree[3:d] +bord[2:] +del av[d::f], a[df:] +a[:1] = bord[2:] +del SRC[::1,newaxis,1:] +tous[vals] = 1010 +del thousand[key] +del a[::2], a[:-1:step] +del Fee.form[left:] +aout.vals = miles.of_stuff +del (ccok, (name.thing, foo.attrib.value)), Fee.form[left:] +if all[1] == bord[0:]: + pass\n\n""" + ast = abuilder.string_build(code) + self.assertEqual(ast.as_string(), code) + + +class _NodeTest(unittest.TestCase): + """test transformation of If Node""" + CODE = None + + @property + def astroid(self): + try: + return self.__class__.__dict__['CODE_Astroid'] + except KeyError: + astroid = builder.parse(self.CODE) + self.__class__.CODE_Astroid = astroid + return astroid + + +class IfNodeTest(_NodeTest): + """test transformation of If Node""" + CODE = """ + if 0: + print() + + if True: + print() + else: + pass + + if "": + print() + elif []: + raise + + if 1: + print() + elif True: + print() + elif func(): + pass + else: + raise + """ + + def test_if_elif_else_node(self): + """test transformation for If node""" + self.assertEqual(len(self.astroid.body), 4) + for stmt in self.astroid.body: + self.assertIsInstance(stmt, nodes.If) + self.assertFalse(self.astroid.body[0].orelse) # simple If + self.assertIsInstance(self.astroid.body[1].orelse[0], nodes.Pass) # If / else + self.assertIsInstance(self.astroid.body[2].orelse[0], nodes.If) # If / elif + self.assertIsInstance(self.astroid.body[3].orelse[0].orelse[0], nodes.If) + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.block_range(1), (0, 22)) + self.assertEqual(self.astroid.block_range(10), (0, 22)) # XXX (10, 22) ? + self.assertEqual(self.astroid.body[1].block_range(5), (5, 6)) + self.assertEqual(self.astroid.body[1].block_range(6), (6, 6)) + self.assertEqual(self.astroid.body[1].orelse[0].block_range(7), (7, 8)) + self.assertEqual(self.astroid.body[1].orelse[0].block_range(8), (8, 8)) + + +class TryExceptNodeTest(_NodeTest): + CODE = """ + try: + print ('pouet') + except IOError: + pass + except UnicodeError: + print() + else: + print() + """ + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.body[0].block_range(1), (1, 8)) + self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 8)) + self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) + self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) + self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) + self.assertEqual(self.astroid.body[0].block_range(7), (7, 7)) + self.assertEqual(self.astroid.body[0].block_range(8), (8, 8)) + + +class TryFinallyNodeTest(_NodeTest): + CODE = """ + try: + print ('pouet') + finally: + print ('pouet') + """ + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.body[0].block_range(1), (1, 4)) + self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) + self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) + + +class TryExceptFinallyNodeTest(_NodeTest): + CODE = """ + try: + print('pouet') + except Exception: + print ('oops') + finally: + print ('pouet') + """ + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.body[0].block_range(1), (1, 6)) + self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) + self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) + self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) + self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) + + +@unittest.skipIf(six.PY3, "Python 2 specific test.") +class TryExcept2xNodeTest(_NodeTest): + CODE = """ + try: + hello + except AttributeError, (retval, desc): + pass + """ + + + def test_tuple_attribute(self): + handler = self.astroid.body[0].handlers[0] + self.assertIsInstance(handler.name, nodes.Tuple) + + +class ImportNodeTest(resources.SysPathSetup, unittest.TestCase): + def setUp(self): + super(ImportNodeTest, self).setUp() + self.module = resources.build_file('data/module.py', 'data.module') + self.module2 = resources.build_file('data/module2.py', 'data.module2') + + def test_import_self_resolve(self): + myos = next(self.module2.igetattr('myos')) + self.assertTrue(isinstance(myos, nodes.Module), myos) + self.assertEqual(myos.name, 'os') + self.assertEqual(myos.qname(), 'os') + self.assertEqual(myos.pytype(), '%s.module' % BUILTINS) + + def test_from_self_resolve(self): + namenode = next(self.module.igetattr('NameNode')) + self.assertTrue(isinstance(namenode, nodes.ClassDef), namenode) + self.assertEqual(namenode.root().name, 'astroid.node_classes') + self.assertEqual(namenode.qname(), 'astroid.node_classes.Name') + self.assertEqual(namenode.pytype(), '%s.type' % BUILTINS) + abspath = next(self.module2.igetattr('abspath')) + self.assertTrue(isinstance(abspath, nodes.FunctionDef), abspath) + self.assertEqual(abspath.root().name, 'os.path') + self.assertEqual(abspath.qname(), 'os.path.abspath') + self.assertEqual(abspath.pytype(), '%s.function' % BUILTINS) + + def test_real_name(self): + from_ = self.module['NameNode'] + self.assertEqual(from_.real_name('NameNode'), 'Name') + imp_ = self.module['os'] + self.assertEqual(imp_.real_name('os'), 'os') + self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'os.path') + imp_ = self.module['NameNode'] + self.assertEqual(imp_.real_name('NameNode'), 'Name') + self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'Name') + imp_ = self.module2['YO'] + self.assertEqual(imp_.real_name('YO'), 'YO') + self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'data') + + def test_as_string(self): + ast = self.module['modutils'] + self.assertEqual(ast.as_string(), "from astroid import modutils") + ast = self.module['NameNode'] + self.assertEqual(ast.as_string(), "from astroid.node_classes import Name as NameNode") + ast = self.module['os'] + self.assertEqual(ast.as_string(), "import os.path") + code = """from . import here +from .. import door +from .store import bread +from ..cave import wine\n\n""" + ast = abuilder.string_build(code) + self.assertMultiLineEqual(ast.as_string(), code) + + def test_bad_import_inference(self): + # Explication of bug + '''When we import PickleError from nonexistent, a call to the infer + method of this From node will be made by unpack_infer. + inference.infer_from will try to import this module, which will fail and + raise a InferenceException (by mixins.do_import_module). The infer_name + will catch this exception and yield and YES instead. + ''' + + code = ''' + try: + from pickle import PickleError + except ImportError: + from nonexistent import PickleError + + try: + pass + except PickleError: + pass + ''' + astroid = builder.parse(code) + handler_type = astroid.body[1].handlers[0].type + + excs = list(node_classes.unpack_infer(handler_type)) + # The number of returned object can differ on Python 2 + # and Python 3. In one version, an additional item will + # be returned, from the _pickle module, which is not + # present in the other version. + self.assertIsInstance(excs[0], nodes.ClassDef) + self.assertEqual(excs[0].name, 'PickleError') + self.assertIs(excs[-1], util.YES) + + def test_absolute_import(self): + astroid = resources.build_file('data/absimport.py') + ctx = contextmod.InferenceContext() + # will fail if absolute import failed + ctx.lookupname = 'message' + next(astroid['message'].infer(ctx)) + ctx.lookupname = 'email' + m = next(astroid['email'].infer(ctx)) + self.assertFalse(m.source_file.startswith(os.path.join('data', 'email.py'))) + + def test_more_absolute_import(self): + astroid = resources.build_file('data/module1abs/__init__.py', 'data.module1abs') + self.assertIn('sys', astroid._locals) + + +class CmpNodeTest(unittest.TestCase): + def test_as_string(self): + ast = abuilder.string_build("a == 2").body[0] + self.assertEqual(ast.as_string(), "a == 2") + + +class ConstNodeTest(unittest.TestCase): + + def _test(self, value): + node = nodes.const_factory(value) + self.assertIsInstance(node._proxied, nodes.ClassDef) + self.assertEqual(node._proxied.name, value.__class__.__name__) + self.assertIs(node.value, value) + self.assertTrue(node._proxied.parent) + self.assertEqual(node._proxied.root().name, value.__class__.__module__) + + def test_none(self): + self._test(None) + + def test_bool(self): + self._test(True) + + def test_int(self): + self._test(1) + + def test_float(self): + self._test(1.0) + + def test_complex(self): + self._test(1.0j) + + def test_str(self): + self._test('a') + + def test_unicode(self): + self._test(u'a') + + +class NameNodeTest(unittest.TestCase): + def test_assign_to_True(self): + """test that True and False assignements don't crash""" + code = """ + True = False + def hello(False): + pass + del True + """ + if sys.version_info >= (3, 0): + with self.assertRaises(exceptions.AstroidBuildingException): + builder.parse(code) + else: + ast = builder.parse(code) + assign_true = ast['True'] + self.assertIsInstance(assign_true, nodes.AssignName) + self.assertEqual(assign_true.name, "True") + del_true = ast.body[2].targets[0] + self.assertIsInstance(del_true, nodes.DelName) + self.assertEqual(del_true.name, "True") + + +class ArgumentsNodeTC(unittest.TestCase): + def test_linenumbering(self): + ast = builder.parse(''' + def func(a, + b): pass + x = lambda x: None + ''') + self.assertEqual(ast['func'].args.fromlineno, 2) + self.assertFalse(ast['func'].args.is_statement) + xlambda = next(ast['x'].infer()) + self.assertEqual(xlambda.args.fromlineno, 4) + self.assertEqual(xlambda.args.tolineno, 4) + self.assertFalse(xlambda.args.is_statement) + if sys.version_info < (3, 0): + self.assertEqual(ast['func'].args.tolineno, 3) + else: + self.skipTest('FIXME http://bugs.python.org/issue10445 ' + '(no line number on function args)') + + def test_builtin_fromlineno_missing(self): + cls = test_utils.extract_node(''' + class Foo(Exception): #@ + pass + ''') + new = cls.getattr('__new__')[-1] + self.assertEqual(new.args.fromlineno, 0) + + +class UnboundMethodNodeTest(unittest.TestCase): + + def test_no_super_getattr(self): + # This is a test for issue + # https://bitbucket.org/logilab/astroid/issue/91, which tests + # that UnboundMethod doesn't call super when doing .getattr. + + ast = builder.parse(''' + class A(object): + def test(self): + pass + meth = A.test + ''') + node = next(ast['meth'].infer()) + with self.assertRaises(exceptions.NotFoundError): + node.getattr('__missssing__') + name = node.getattr('__name__')[0] + self.assertIsInstance(name, nodes.Const) + self.assertEqual(name.value, 'test') + + +class BoundMethodNodeTest(unittest.TestCase): + + def test_is_property(self): + ast = builder.parse(''' + import abc + + def cached_property(): + # Not a real decorator, but we don't care + pass + def reify(): + # Same as cached_property + pass + def lazy_property(): + pass + def lazyproperty(): + pass + def lazy(): pass + class A(object): + @property + def builtin_property(self): + return 42 + @abc.abstractproperty + def abc_property(self): + return 42 + @cached_property + def cached_property(self): return 42 + @reify + def reified(self): return 42 + @lazy_property + def lazy_prop(self): return 42 + @lazyproperty + def lazyprop(self): return 42 + def not_prop(self): pass + @lazy + def decorated_with_lazy(self): return 42 + + cls = A() + builtin_property = cls.builtin_property + abc_property = cls.abc_property + cached_p = cls.cached_property + reified = cls.reified + not_prop = cls.not_prop + lazy_prop = cls.lazy_prop + lazyprop = cls.lazyprop + decorated_with_lazy = cls.decorated_with_lazy + ''') + for prop in ('builtin_property', 'abc_property', 'cached_p', 'reified', + 'lazy_prop', 'lazyprop', 'decorated_with_lazy'): + inferred = next(ast[prop].infer()) + self.assertIsInstance(inferred, nodes.Const, prop) + self.assertEqual(inferred.value, 42, prop) + + inferred = next(ast['not_prop'].infer()) + self.assertIsInstance(inferred, bases.BoundMethod) + + +class AliasesTest(unittest.TestCase): + + def setUp(self): + self.transformer = transforms.TransformVisitor() + + def parse_transform(self, code): + module = parse(code, apply_transforms=False) + return self.transformer.visit(module) + + def test_aliases(self): + def test_from(node): + node.names = node.names + [('absolute_import', None)] + return node + + def test_class(node): + node.name = 'Bar' + return node + + def test_function(node): + node.name = 'another_test' + return node + + def test_callfunc(node): + if node.func.name == 'Foo': + node.func.name = 'Bar' + return node + + def test_assname(node): + if node.name == 'foo': + n = nodes.AssignName() + n.name = 'bar' + return n + def test_assattr(node): + if node.attrname == 'a': + node.attrname = 'b' + return node + + def test_getattr(node): + if node.attrname == 'a': + node.attrname = 'b' + return node + + def test_genexpr(node): + if node.elt.value == 1: + node.elt = nodes.Const(2) + return node + + self.transformer.register_transform(nodes.From, test_from) + self.transformer.register_transform(nodes.Class, test_class) + self.transformer.register_transform(nodes.Function, test_function) + self.transformer.register_transform(nodes.CallFunc, test_callfunc) + self.transformer.register_transform(nodes.AssName, test_assname) + self.transformer.register_transform(nodes.AssAttr, test_assattr) + self.transformer.register_transform(nodes.Getattr, test_getattr) + self.transformer.register_transform(nodes.GenExpr, test_genexpr) + + string = ''' + from __future__ import print_function + + class Foo: pass + + def test(a): return a + + foo = Foo() + foo.a = test(42) + foo.a + (1 for _ in range(0, 42)) + ''' + + module = self.parse_transform(string) + + self.assertEqual(len(module.body[0].names), 2) + self.assertIsInstance(module.body[0], nodes.ImportFrom) + self.assertEqual(module.body[1].name, 'Bar') + self.assertIsInstance(module.body[1], nodes.ClassDef) + self.assertEqual(module.body[2].name, 'another_test') + self.assertIsInstance(module.body[2], nodes.FunctionDef) + self.assertEqual(module.body[3].targets[0].name, 'bar') + self.assertIsInstance(module.body[3].targets[0], nodes.AssignName) + self.assertEqual(module.body[3].value.func.name, 'Bar') + self.assertIsInstance(module.body[3].value, nodes.Call) + self.assertEqual(module.body[4].targets[0].attrname, 'b') + self.assertIsInstance(module.body[4].targets[0], nodes.AssignAttr) + self.assertIsInstance(module.body[5], nodes.Expr) + self.assertEqual(module.body[5].value.attrname, 'b') + self.assertIsInstance(module.body[5].value, nodes.Attribute) + self.assertEqual(module.body[6].value.elt.value, 2) + self.assertIsInstance(module.body[6].value, nodes.GeneratorExp) + + @unittest.skipIf(six.PY3, "Python 3 doesn't have Repr nodes.") + def test_repr(self): + def test_backquote(node): + node.value.name = 'bar' + return node + + self.transformer.register_transform(nodes.Backquote, test_backquote) + + module = self.parse_transform('`foo`') + + self.assertEqual(module.body[0].value.value.name, 'bar') + self.assertIsInstance(module.body[0].value, nodes.Repr) + + +class DeprecationWarningsTest(unittest.TestCase): + def test_asstype_warnings(self): + string = ''' + class C: pass + c = C() + with warnings.catch_warnings(record=True) as w: + pass + ''' + module = parse(string) + filter_stmts_mixin = module.body[0] + assign_type_mixin = module.body[1].targets[0] + parent_assign_type_mixin = module.body[2] + + warnings.simplefilter('always') + + with warnings.catch_warnings(record=True) as w: + filter_stmts_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + with warnings.catch_warnings(record=True) as w: + assign_type_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + with warnings.catch_warnings(record=True) as w: + parent_assign_type_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + + def test_isinstance_warnings(self): + msg_format = ("%r is deprecated and slated for removal in astroid " + "2.0, use %r instead") + for cls in (nodes.Discard, nodes.Backquote, nodes.AssName, + nodes.AssAttr, nodes.Getattr, nodes.CallFunc, nodes.From): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + isinstance(42, cls) + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + actual_msg = msg_format % (cls.__class__.__name__, cls.__wrapped__.__name__) + self.assertEqual(str(w[0].message), actual_msg) + + +@test_utils.require_version('3.5') +class Python35AsyncTest(unittest.TestCase): + + def test_async_await_keywords(self): + async_def, async_for, async_with, await_node = test_utils.extract_node(''' + async def func(): #@ + async for i in range(10): #@ + f = __(await i) + async with test(): #@ + pass + ''') + self.assertIsInstance(async_def, nodes.AsyncFunctionDef) + self.assertIsInstance(async_for, nodes.AsyncFor) + self.assertIsInstance(async_with, nodes.AsyncWith) + self.assertIsInstance(await_node, nodes.Await) + self.assertIsInstance(await_node.value, nodes.Name) + + def _test_await_async_as_string(self, code): + ast_node = parse(code) + self.assertEqual(ast_node.as_string().strip(), code.strip()) + + def test_await_as_string(self): + code = textwrap.dedent(''' + async def function(): + await 42 + ''') + self._test_await_async_as_string(code) + + def test_asyncwith_as_string(self): + code = textwrap.dedent(''' + async def function(): + async with (42): + pass + ''') + self._test_await_async_as_string(code) + + def test_asyncfor_as_string(self): + code = textwrap.dedent(''' + async def function(): + async for i in range(10): + await 42 + ''') + self._test_await_async_as_string(code) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_objects.py b/pymode/libs/astroid/tests/unittest_objects.py new file mode 100644 index 00000000..62d3f4ff --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_objects.py @@ -0,0 +1,530 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +import unittest + +from astroid import bases +from astroid import exceptions +from astroid import nodes +from astroid import objects +from astroid import test_utils + + +class ObjectsTest(unittest.TestCase): + + def test_frozenset(self): + node = test_utils.extract_node(""" + frozenset({1: 2, 2: 3}) #@ + """) + infered = next(node.infer()) + self.assertIsInstance(infered, objects.FrozenSet) + + self.assertEqual(infered.pytype(), "%s.frozenset" % bases.BUILTINS) + + itered = infered.itered() + self.assertEqual(len(itered), 2) + self.assertIsInstance(itered[0], nodes.Const) + self.assertEqual([const.value for const in itered], [1, 2]) + + proxied = infered._proxied + self.assertEqual(infered.qname(), "%s.frozenset" % bases.BUILTINS) + self.assertIsInstance(proxied, nodes.ClassDef) + + +class SuperTests(unittest.TestCase): + + def test_inferring_super_outside_methods(self): + ast_nodes = test_utils.extract_node(''' + class Module(object): + pass + class StaticMethod(object): + @staticmethod + def static(): + # valid, but we don't bother with it. + return super(StaticMethod, StaticMethod) #@ + # super outside methods aren't inferred + super(Module, Module) #@ + # no argument super is not recognised outside methods as well. + super() #@ + ''') + in_static = next(ast_nodes[0].value.infer()) + self.assertIsInstance(in_static, bases.Instance) + self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) + + module_level = next(ast_nodes[1].infer()) + self.assertIsInstance(module_level, bases.Instance) + self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) + + no_arguments = next(ast_nodes[2].infer()) + self.assertIsInstance(no_arguments, bases.Instance) + self.assertEqual(no_arguments.qname(), "%s.super" % bases.BUILTINS) + + def test_inferring_unbound_super_doesnt_work(self): + node = test_utils.extract_node(''' + class Test(object): + def __init__(self): + super(Test) #@ + ''') + unbounded = next(node.infer()) + self.assertIsInstance(unbounded, bases.Instance) + self.assertEqual(unbounded.qname(), "%s.super" % bases.BUILTINS) + + def test_use_default_inference_on_not_inferring_args(self): + ast_nodes = test_utils.extract_node(''' + class Test(object): + def __init__(self): + super(Lala, self) #@ + super(Test, lala) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.Instance) + self.assertEqual(first.qname(), "%s.super" % bases.BUILTINS) + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.Instance) + self.assertEqual(second.qname(), "%s.super" % bases.BUILTINS) + + @test_utils.require_version(maxver='3.0') + def test_super_on_old_style_class(self): + # super doesn't work on old style class, but leave + # that as an error for pylint. We'll infer Super objects, + # but every call will result in a failure at some point. + node = test_utils.extract_node(''' + class OldStyle: + def __init__(self): + super(OldStyle, self) #@ + ''') + old = next(node.infer()) + self.assertIsInstance(old, objects.Super) + self.assertIsInstance(old.mro_pointer, nodes.ClassDef) + self.assertEqual(old.mro_pointer.name, 'OldStyle') + with self.assertRaises(exceptions.SuperError) as cm: + old.super_mro() + self.assertEqual(str(cm.exception), + "Unable to call super on old-style classes.") + + @test_utils.require_version(minver='3.0') + def test_no_arguments_super(self): + ast_nodes = test_utils.extract_node(''' + class First(object): pass + class Second(First): + def test(self): + super() #@ + @classmethod + def test_classmethod(cls): + super() #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + self.assertIsInstance(first.type, bases.Instance) + self.assertEqual(first.type.name, 'Second') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(first.mro_pointer.name, 'Second') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, objects.Super) + self.assertIsInstance(second.type, nodes.ClassDef) + self.assertEqual(second.type.name, 'Second') + self.assertIsInstance(second.mro_pointer, nodes.ClassDef) + self.assertEqual(second.mro_pointer.name, 'Second') + + def test_super_simple_cases(self): + ast_nodes = test_utils.extract_node(''' + class First(object): pass + class Second(First): pass + class Third(First): + def test(self): + super(Third, self) #@ + super(Second, self) #@ + + # mro position and the type + super(Third, Third) #@ + super(Third, Second) #@ + super(Fourth, Fourth) #@ + + class Fourth(Third): + pass + ''') + + # .type is the object which provides the mro. + # .mro_pointer is the position in the mro from where + # the lookup should be done. + + # super(Third, self) + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + self.assertIsInstance(first.type, bases.Instance) + self.assertEqual(first.type.name, 'Third') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(first.mro_pointer.name, 'Third') + + # super(Second, self) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, objects.Super) + self.assertIsInstance(second.type, bases.Instance) + self.assertEqual(second.type.name, 'Third') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(second.mro_pointer.name, 'Second') + + # super(Third, Third) + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, objects.Super) + self.assertIsInstance(third.type, nodes.ClassDef) + self.assertEqual(third.type.name, 'Third') + self.assertIsInstance(third.mro_pointer, nodes.ClassDef) + self.assertEqual(third.mro_pointer.name, 'Third') + + # super(Third, second) + fourth = next(ast_nodes[3].infer()) + self.assertIsInstance(fourth, objects.Super) + self.assertIsInstance(fourth.type, nodes.ClassDef) + self.assertEqual(fourth.type.name, 'Second') + self.assertIsInstance(fourth.mro_pointer, nodes.ClassDef) + self.assertEqual(fourth.mro_pointer.name, 'Third') + + # Super(Fourth, Fourth) + fifth = next(ast_nodes[4].infer()) + self.assertIsInstance(fifth, objects.Super) + self.assertIsInstance(fifth.type, nodes.ClassDef) + self.assertEqual(fifth.type.name, 'Fourth') + self.assertIsInstance(fifth.mro_pointer, nodes.ClassDef) + self.assertEqual(fifth.mro_pointer.name, 'Fourth') + + def test_super_infer(self): + node = test_utils.extract_node(''' + class Super(object): + def __init__(self): + super(Super, self) #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, objects.Super) + reinferred = next(inferred.infer()) + self.assertIsInstance(reinferred, objects.Super) + self.assertIs(inferred, reinferred) + + def test_inferring_invalid_supers(self): + ast_nodes = test_utils.extract_node(''' + class Super(object): + def __init__(self): + # MRO pointer is not a type + super(1, self) #@ + # MRO type is not a subtype + super(Super, 1) #@ + # self is not a subtype of Bupper + super(Bupper, self) #@ + class Bupper(Super): + pass + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + with self.assertRaises(exceptions.SuperError) as cm: + first.super_mro() + self.assertEqual(str(cm.exception), "The first super argument must be type.") + + for node in ast_nodes[1:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, objects.Super, node) + with self.assertRaises(exceptions.SuperArgumentTypeError) as cm: + inferred.super_mro() + self.assertEqual(str(cm.exception), + "super(type, obj): obj must be an instance " + "or subtype of type", node) + + def test_proxied(self): + node = test_utils.extract_node(''' + class Super(object): + def __init__(self): + super(Super, self) #@ + ''') + infered = next(node.infer()) + proxied = infered._proxied + self.assertEqual(proxied.qname(), "%s.super" % bases.BUILTINS) + self.assertIsInstance(proxied, nodes.ClassDef) + + def test_super_bound_model(self): + ast_nodes = test_utils.extract_node(''' + class First(object): + def method(self): + pass + @classmethod + def class_method(cls): + pass + class Super_Type_Type(First): + def method(self): + super(Super_Type_Type, Super_Type_Type).method #@ + super(Super_Type_Type, Super_Type_Type).class_method #@ + @classmethod + def class_method(cls): + super(Super_Type_Type, Super_Type_Type).method #@ + super(Super_Type_Type, Super_Type_Type).class_method #@ + + class Super_Type_Object(First): + def method(self): + super(Super_Type_Object, self).method #@ + super(Super_Type_Object, self).class_method #@ + ''') + # Super(type, type) is the same for both functions and classmethods. + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.FunctionDef) + self.assertEqual(first.name, 'method') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'First') + self.assertEqual(second.type, 'classmethod') + + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, nodes.FunctionDef) + self.assertEqual(third.name, 'method') + + fourth = next(ast_nodes[3].infer()) + self.assertIsInstance(fourth, bases.BoundMethod) + self.assertEqual(fourth.bound.name, 'First') + self.assertEqual(fourth.type, 'classmethod') + + # Super(type, obj) can lead to different attribute bindings + # depending on the type of the place where super was called. + fifth = next(ast_nodes[4].infer()) + self.assertIsInstance(fifth, bases.BoundMethod) + self.assertEqual(fifth.bound.name, 'First') + self.assertEqual(fifth.type, 'method') + + sixth = next(ast_nodes[5].infer()) + self.assertIsInstance(sixth, bases.BoundMethod) + self.assertEqual(sixth.bound.name, 'First') + self.assertEqual(sixth.type, 'classmethod') + + def test_super_getattr_single_inheritance(self): + ast_nodes = test_utils.extract_node(''' + class First(object): + def test(self): pass + class Second(First): + def test2(self): pass + class Third(Second): + test3 = 42 + def __init__(self): + super(Third, self).test2 #@ + super(Third, self).test #@ + # test3 is local, no MRO lookup is done. + super(Third, self).test3 #@ + super(Third, self) #@ + + # Unbounds. + super(Third, Third).test2 #@ + super(Third, Third).test #@ + + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.BoundMethod) + self.assertEqual(first.bound.name, 'Second') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'First') + + with self.assertRaises(exceptions.InferenceError): + next(ast_nodes[2].infer()) + fourth = next(ast_nodes[3].infer()) + with self.assertRaises(exceptions.NotFoundError): + fourth.getattr('test3') + with self.assertRaises(exceptions.NotFoundError): + next(fourth.igetattr('test3')) + + first_unbound = next(ast_nodes[4].infer()) + self.assertIsInstance(first_unbound, nodes.FunctionDef) + self.assertEqual(first_unbound.name, 'test2') + self.assertEqual(first_unbound.parent.name, 'Second') + + second_unbound = next(ast_nodes[5].infer()) + self.assertIsInstance(second_unbound, nodes.FunctionDef) + self.assertEqual(second_unbound.name, 'test') + self.assertEqual(second_unbound.parent.name, 'First') + + def test_super_invalid_mro(self): + node = test_utils.extract_node(''' + class A(object): + test = 42 + class Super(A, A): + def __init__(self): + super(Super, self) #@ + ''') + inferred = next(node.infer()) + with self.assertRaises(exceptions.NotFoundError): + next(inferred.getattr('test')) + + def test_super_complex_mro(self): + ast_nodes = test_utils.extract_node(''' + class A(object): + def spam(self): return "A" + def foo(self): return "A" + @staticmethod + def static(self): pass + class B(A): + def boo(self): return "B" + def spam(self): return "B" + class C(A): + def boo(self): return "C" + class E(C, B): + def __init__(self): + super(E, self).boo #@ + super(C, self).boo #@ + super(E, self).spam #@ + super(E, self).foo #@ + super(E, self).static #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.BoundMethod) + self.assertEqual(first.bound.name, 'C') + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'B') + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, bases.BoundMethod) + self.assertEqual(third.bound.name, 'B') + fourth = next(ast_nodes[3].infer()) + self.assertEqual(fourth.bound.name, 'A') + static = next(ast_nodes[4].infer()) + self.assertIsInstance(static, nodes.FunctionDef) + self.assertEqual(static.parent.scope().name, 'A') + + def test_super_data_model(self): + ast_nodes = test_utils.extract_node(''' + class X(object): pass + class A(X): + def __init__(self): + super(A, self) #@ + super(A, A) #@ + super(X, A) #@ + ''') + first = next(ast_nodes[0].infer()) + thisclass = first.getattr('__thisclass__')[0] + self.assertIsInstance(thisclass, nodes.ClassDef) + self.assertEqual(thisclass.name, 'A') + selfclass = first.getattr('__self_class__')[0] + self.assertIsInstance(selfclass, nodes.ClassDef) + self.assertEqual(selfclass.name, 'A') + self_ = first.getattr('__self__')[0] + self.assertIsInstance(self_, bases.Instance) + self.assertEqual(self_.name, 'A') + cls = first.getattr('__class__')[0] + self.assertEqual(cls, first._proxied) + + second = next(ast_nodes[1].infer()) + thisclass = second.getattr('__thisclass__')[0] + self.assertEqual(thisclass.name, 'A') + self_ = second.getattr('__self__')[0] + self.assertIsInstance(self_, nodes.ClassDef) + self.assertEqual(self_.name, 'A') + + third = next(ast_nodes[2].infer()) + thisclass = third.getattr('__thisclass__')[0] + self.assertEqual(thisclass.name, 'X') + selfclass = third.getattr('__self_class__')[0] + self.assertEqual(selfclass.name, 'A') + + def assertEqualMro(self, klass, expected_mro): + self.assertEqual( + [member.name for member in klass.super_mro()], + expected_mro) + + def test_super_mro(self): + ast_nodes = test_utils.extract_node(''' + class A(object): pass + class B(A): pass + class C(A): pass + class E(C, B): + def __init__(self): + super(E, self) #@ + super(C, self) #@ + super(B, self) #@ + + super(B, 1) #@ + super(1, B) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertEqualMro(first, ['C', 'B', 'A', 'object']) + second = next(ast_nodes[1].infer()) + self.assertEqualMro(second, ['B', 'A', 'object']) + third = next(ast_nodes[2].infer()) + self.assertEqualMro(third, ['A', 'object']) + + fourth = next(ast_nodes[3].infer()) + with self.assertRaises(exceptions.SuperError): + fourth.super_mro() + fifth = next(ast_nodes[4].infer()) + with self.assertRaises(exceptions.SuperError): + fifth.super_mro() + + def test_super_yes_objects(self): + ast_nodes = test_utils.extract_node(''' + from collections import Missing + class A(object): + def __init__(self): + super(Missing, self) #@ + super(A, Missing) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.Instance) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.Instance) + + def test_super_invalid_types(self): + node = test_utils.extract_node(''' + import collections + class A(object): + def __init__(self): + super(A, collections) #@ + ''') + inferred = next(node.infer()) + with self.assertRaises(exceptions.SuperError): + inferred.super_mro() + with self.assertRaises(exceptions.SuperArgumentTypeError): + inferred.super_mro() + + def test_super_pytype_display_type_name(self): + node = test_utils.extract_node(''' + class A(object): + def __init__(self): + super(A, self) #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred.pytype(), "%s.super" % bases.BUILTINS) + self.assertEqual(inferred.display_type(), 'Super of') + self.assertEqual(inferred.name, 'A') + + def test_super_properties(self): + node = test_utils.extract_node(''' + class Foo(object): + @property + def dict(self): + return 42 + + class Bar(Foo): + @property + def dict(self): + return super(Bar, self).dict + + Bar().dict + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_peephole.py b/pymode/libs/astroid/tests/unittest_peephole.py new file mode 100644 index 00000000..78349898 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_peephole.py @@ -0,0 +1,121 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Tests for the astroid AST peephole optimizer.""" + +import ast +import textwrap +import unittest + +import astroid +from astroid import astpeephole +from astroid import builder +from astroid import manager +from astroid import test_utils +from astroid.tests import resources + + +MANAGER = manager.AstroidManager() + + +class PeepholeOptimizer(unittest.TestCase): + @classmethod + def setUpClass(cls): + MANAGER.optimize_ast = True + + @classmethod + def tearDownClass(cls): + MANAGER.optimize_ast = False + + def setUp(self): + self._optimizer = astpeephole.ASTPeepholeOptimizer() + + @staticmethod + def _get_binops(code): + module = ast.parse(textwrap.dedent(code)) + return [node.value for node in module.body + if isinstance(node, ast.Expr)] + + @test_utils.require_version(maxver='3.0') + def test_optimize_binop_unicode(self): + nodes = self._get_binops(""" + u"a" + u"b" + u"c" + + u"a" + "c" + "b" + u"a" + b"c" + """) + + result = self._optimizer.optimize_binop(nodes[0]) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, u"abc") + + self.assertIsNone(self._optimizer.optimize_binop(nodes[1])) + self.assertIsNone(self._optimizer.optimize_binop(nodes[2])) + + def test_optimize_binop(self): + nodes = self._get_binops(""" + "a" + "b" + "c" + "d" + b"a" + b"b" + b"c" + b"d" + "a" + "b" + + "a" + "b" + 1 + object + var = 4 + "a" + "b" + var + "c" + "a" + "b" + "c" - "4" + "a" + "b" + "c" + "d".format() + "a" - "b" + "a" + 1 + 4 + 5 + 6 + """) + + result = self._optimizer.optimize_binop(nodes[0]) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, "abcd") + + result = self._optimizer.optimize_binop(nodes[1]) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, b"abcd") + + for node in nodes[2:]: + self.assertIsNone(self._optimizer.optimize_binop(node)) + + def test_big_binop_crash(self): + # Test that we don't fail on a lot of joined strings + # through the addition operator. + module = resources.build_file('data/joined_strings.py') + element = next(module['x'].infer()) + self.assertIsInstance(element, astroid.Const) + self.assertEqual(len(element.value), 61660) + + def test_optimisation_disabled(self): + try: + MANAGER.optimize_ast = False + module = builder.parse(""" + '1' + '2' + '3' + """) + self.assertIsInstance(module.body[0], astroid.Expr) + self.assertIsInstance(module.body[0].value, astroid.BinOp) + self.assertIsInstance(module.body[0].value.left, astroid.BinOp) + self.assertIsInstance(module.body[0].value.left.left, + astroid.Const) + finally: + MANAGER.optimize_ast = True + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_protocols.py b/pymode/libs/astroid/tests/unittest_protocols.py new file mode 100644 index 00000000..16745129 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_protocols.py @@ -0,0 +1,176 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +import contextlib +import unittest + +import astroid +from astroid.test_utils import extract_node, require_version +from astroid import InferenceError +from astroid import nodes +from astroid import util +from astroid.node_classes import AssignName, Const, Name, Starred + + +@contextlib.contextmanager +def _add_transform(manager, node, transform, predicate=None): + manager.register_transform(node, transform, predicate) + try: + yield + finally: + manager.unregister_transform(node, transform, predicate) + + +class ProtocolTests(unittest.TestCase): + + def assertConstNodesEqual(self, nodes_list_expected, nodes_list_got): + self.assertEqual(len(nodes_list_expected), len(nodes_list_got)) + for node in nodes_list_got: + self.assertIsInstance(node, Const) + for node, expected_value in zip(nodes_list_got, nodes_list_expected): + self.assertEqual(expected_value, node.value) + + def assertNameNodesEqual(self, nodes_list_expected, nodes_list_got): + self.assertEqual(len(nodes_list_expected), len(nodes_list_got)) + for node in nodes_list_got: + self.assertIsInstance(node, Name) + for node, expected_name in zip(nodes_list_got, nodes_list_expected): + self.assertEqual(expected_name, node.name) + + def test_assigned_stmts_simple_for(self): + assign_stmts = extract_node(""" + for a in (1, 2, 3): #@ + pass + + for b in range(3): #@ + pass + """) + + for1_assnode = next(assign_stmts[0].nodes_of_class(AssignName)) + assigned = list(for1_assnode.assigned_stmts()) + self.assertConstNodesEqual([1, 2, 3], assigned) + + for2_assnode = next(assign_stmts[1].nodes_of_class(AssignName)) + self.assertRaises(InferenceError, + list, for2_assnode.assigned_stmts()) + + @require_version(minver='3.0') + def test_assigned_stmts_starred_for(self): + assign_stmts = extract_node(""" + for *a, b in ((1, 2, 3), (4, 5, 6, 7)): #@ + pass + """) + + for1_starred = next(assign_stmts.nodes_of_class(Starred)) + assigned = next(for1_starred.assigned_stmts()) + self.assertEqual(assigned, util.YES) + + def _get_starred_stmts(self, code): + assign_stmt = extract_node("{} #@".format(code)) + starred = next(assign_stmt.nodes_of_class(Starred)) + return next(starred.assigned_stmts()) + + def _helper_starred_expected_const(self, code, expected): + stmts = self._get_starred_stmts(code) + self.assertIsInstance(stmts, nodes.List) + stmts = stmts.elts + self.assertConstNodesEqual(expected, stmts) + + def _helper_starred_expected(self, code, expected): + stmts = self._get_starred_stmts(code) + self.assertEqual(expected, stmts) + + def _helper_starred_inference_error(self, code): + assign_stmt = extract_node("{} #@".format(code)) + starred = next(assign_stmt.nodes_of_class(Starred)) + self.assertRaises(InferenceError, list, starred.assigned_stmts()) + + @require_version(minver='3.0') + def test_assigned_stmts_starred_assnames(self): + self._helper_starred_expected_const( + "a, *b = (1, 2, 3, 4) #@", [2, 3, 4]) + self._helper_starred_expected_const( + "*a, b = (1, 2, 3) #@", [1, 2]) + self._helper_starred_expected_const( + "a, *b, c = (1, 2, 3, 4, 5) #@", + [2, 3, 4]) + self._helper_starred_expected_const( + "a, *b = (1, 2) #@", [2]) + self._helper_starred_expected_const( + "*b, a = (1, 2) #@", [1]) + self._helper_starred_expected_const( + "[*b] = (1, 2) #@", [1, 2]) + + @require_version(minver='3.0') + def test_assigned_stmts_starred_yes(self): + # Not something iterable and known + self._helper_starred_expected("a, *b = range(3) #@", util.YES) + # Not something inferrable + self._helper_starred_expected("a, *b = balou() #@", util.YES) + # In function, unknown. + self._helper_starred_expected(""" + def test(arg): + head, *tail = arg #@""", util.YES) + # These cases aren't worth supporting. + self._helper_starred_expected( + "a, (*b, c), d = (1, (2, 3, 4), 5) #@", util.YES) + + @require_version(minver='3.0') + def test_assign_stmts_starred_fails(self): + # Too many starred + self._helper_starred_inference_error("a, *b, *c = (1, 2, 3) #@") + # Too many lhs values + self._helper_starred_inference_error("a, *b, c = (1, 2) #@") + # This could be solved properly, but it complicates needlessly the + # code for assigned_stmts, without oferring real benefit. + self._helper_starred_inference_error( + "(*a, b), (c, *d) = (1, 2, 3), (4, 5, 6) #@") + + def test_assigned_stmts_assignments(self): + assign_stmts = extract_node(""" + c = a #@ + + d, e = b, c #@ + """) + + simple_assnode = next(assign_stmts[0].nodes_of_class(AssignName)) + assigned = list(simple_assnode.assigned_stmts()) + self.assertNameNodesEqual(['a'], assigned) + + assnames = assign_stmts[1].nodes_of_class(AssignName) + simple_mul_assnode_1 = next(assnames) + assigned = list(simple_mul_assnode_1.assigned_stmts()) + self.assertNameNodesEqual(['b'], assigned) + simple_mul_assnode_2 = next(assnames) + assigned = list(simple_mul_assnode_2.assigned_stmts()) + self.assertNameNodesEqual(['c'], assigned) + + def test_sequence_assigned_stmts_not_accepting_empty_node(self): + def transform(node): + node.root().locals['__all__'] = [node.value] + + manager = astroid.MANAGER + with _add_transform(manager, astroid.Assign, transform): + module = astroid.parse(''' + __all__ = ['a'] + ''') + module.wildcard_import_names() + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_python3.py b/pymode/libs/astroid/tests/unittest_python3.py new file mode 100644 index 00000000..87010571 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_python3.py @@ -0,0 +1,254 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +from textwrap import dedent +import unittest + +from astroid import nodes +from astroid.node_classes import Assign, Expr, YieldFrom, Name, Const +from astroid.builder import AstroidBuilder +from astroid.scoped_nodes import ClassDef, FunctionDef +from astroid.test_utils import require_version, extract_node + + +class Python3TC(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.builder = AstroidBuilder() + + @require_version('3.0') + def test_starred_notation(self): + astroid = self.builder.string_build("*a, b = [1, 2, 3]", 'test', 'test') + + # Get the star node + node = next(next(next(astroid.get_children()).get_children()).get_children()) + + self.assertTrue(isinstance(node.assign_type(), Assign)) + + @require_version('3.3') + def test_yield_from(self): + body = dedent(""" + def func(): + yield from iter([1, 2]) + """) + astroid = self.builder.string_build(body) + func = astroid.body[0] + self.assertIsInstance(func, FunctionDef) + yieldfrom_stmt = func.body[0] + + self.assertIsInstance(yieldfrom_stmt, Expr) + self.assertIsInstance(yieldfrom_stmt.value, YieldFrom) + self.assertEqual(yieldfrom_stmt.as_string(), + 'yield from iter([1, 2])') + + @require_version('3.3') + def test_yield_from_is_generator(self): + body = dedent(""" + def func(): + yield from iter([1, 2]) + """) + astroid = self.builder.string_build(body) + func = astroid.body[0] + self.assertIsInstance(func, FunctionDef) + self.assertTrue(func.is_generator()) + + @require_version('3.3') + def test_yield_from_as_string(self): + body = dedent(""" + def func(): + yield from iter([1, 2]) + value = yield from other() + """) + astroid = self.builder.string_build(body) + func = astroid.body[0] + self.assertEqual(func.as_string().strip(), body.strip()) + + # metaclass tests + + @require_version('3.0') + def test_simple_metaclass(self): + astroid = self.builder.string_build("class Test(metaclass=type): pass") + klass = astroid.body[0] + + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, ClassDef) + self.assertEqual(metaclass.name, 'type') + + @require_version('3.0') + def test_metaclass_error(self): + astroid = self.builder.string_build("class Test(metaclass=typ): pass") + klass = astroid.body[0] + self.assertFalse(klass.metaclass()) + + @require_version('3.0') + def test_metaclass_imported(self): + astroid = self.builder.string_build(dedent(""" + from abc import ABCMeta + class Test(metaclass=ABCMeta): pass""")) + klass = astroid.body[1] + + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + @require_version('3.0') + def test_as_string(self): + body = dedent(""" + from abc import ABCMeta + class Test(metaclass=ABCMeta): pass""") + astroid = self.builder.string_build(body) + klass = astroid.body[1] + + self.assertEqual(klass.as_string(), + '\n\nclass Test(metaclass=ABCMeta):\n pass\n') + + @require_version('3.0') + def test_old_syntax_works(self): + astroid = self.builder.string_build(dedent(""" + class Test: + __metaclass__ = type + class SubTest(Test): pass + """)) + klass = astroid['SubTest'] + metaclass = klass.metaclass() + self.assertIsNone(metaclass) + + @require_version('3.0') + def test_metaclass_yes_leak(self): + astroid = self.builder.string_build(dedent(""" + # notice `ab` instead of `abc` + from ab import ABCMeta + + class Meta(metaclass=ABCMeta): pass + """)) + klass = astroid['Meta'] + self.assertIsNone(klass.metaclass()) + + @require_version('3.0') + def test_parent_metaclass(self): + astroid = self.builder.string_build(dedent(""" + from abc import ABCMeta + class Test(metaclass=ABCMeta): pass + class SubTest(Test): pass + """)) + klass = astroid['SubTest'] + self.assertTrue(klass.newstyle) + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + @require_version('3.0') + def test_metaclass_ancestors(self): + astroid = self.builder.string_build(dedent(""" + from abc import ABCMeta + + class FirstMeta(metaclass=ABCMeta): pass + class SecondMeta(metaclass=type): + pass + + class Simple: + pass + + class FirstImpl(FirstMeta): pass + class SecondImpl(FirstImpl): pass + class ThirdImpl(Simple, SecondMeta): + pass + """)) + classes = { + 'ABCMeta': ('FirstImpl', 'SecondImpl'), + 'type': ('ThirdImpl', ) + } + for metaclass, names in classes.items(): + for name in names: + impl = astroid[name] + meta = impl.metaclass() + self.assertIsInstance(meta, ClassDef) + self.assertEqual(meta.name, metaclass) + + @require_version('3.0') + def test_annotation_support(self): + astroid = self.builder.string_build(dedent(""" + def test(a: int, b: str, c: None, d, e, + *args: float, **kwargs: int)->int: + pass + """)) + func = astroid['test'] + self.assertIsInstance(func.args.varargannotation, Name) + self.assertEqual(func.args.varargannotation.name, 'float') + self.assertIsInstance(func.args.kwargannotation, Name) + self.assertEqual(func.args.kwargannotation.name, 'int') + self.assertIsInstance(func.returns, Name) + self.assertEqual(func.returns.name, 'int') + arguments = func.args + self.assertIsInstance(arguments.annotations[0], Name) + self.assertEqual(arguments.annotations[0].name, 'int') + self.assertIsInstance(arguments.annotations[1], Name) + self.assertEqual(arguments.annotations[1].name, 'str') + self.assertIsInstance(arguments.annotations[2], Const) + self.assertIsNone(arguments.annotations[2].value) + self.assertIsNone(arguments.annotations[3]) + self.assertIsNone(arguments.annotations[4]) + + astroid = self.builder.string_build(dedent(""" + def test(a: int=1, b: str=2): + pass + """)) + func = astroid['test'] + self.assertIsInstance(func.args.annotations[0], Name) + self.assertEqual(func.args.annotations[0].name, 'int') + self.assertIsInstance(func.args.annotations[1], Name) + self.assertEqual(func.args.annotations[1].name, 'str') + self.assertIsNone(func.returns) + + @require_version('3.0') + def test_annotation_as_string(self): + code1 = dedent(''' + def test(a, b:int=4, c=2, f:'lala'=4)->2: + pass''') + code2 = dedent(''' + def test(a:typing.Generic[T], c:typing.Any=24)->typing.Iterable: + pass''') + for code in (code1, code2): + func = extract_node(code) + self.assertEqual(func.as_string(), code) + + @require_version('3.5') + def test_unpacking_in_dicts(self): + code = "{'x': 1, **{'y': 2}}" + node = extract_node(code) + self.assertEqual(node.as_string(), code) + keys = [key for (key, _) in node.items] + self.assertIsInstance(keys[0], nodes.Const) + self.assertIsInstance(keys[1], nodes.DictUnpack) + + @require_version('3.5') + def test_nested_unpacking_in_dicts(self): + code = "{'x': 1, **{'y': 2, **{'z': 3}}}" + node = extract_node(code) + self.assertEqual(node.as_string(), code) + + @require_version('3.5') + def test_unpacking_in_dict_getitem(self): + node = extract_node('{1:2, **{2:3, 3:4}, **{5: 6}}') + for key, expected in ((1, 2), (2, 3), (3, 4), (5, 6)): + value = node.getitem(key) + self.assertIsInstance(value, nodes.Const) + self.assertEqual(value.value, expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_raw_building.py b/pymode/libs/astroid/tests/unittest_raw_building.py new file mode 100644 index 00000000..2bdaac17 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_raw_building.py @@ -0,0 +1,85 @@ +import inspect +import os +import unittest + +from six.moves import builtins # pylint: disable=import-error + +from astroid.builder import AstroidBuilder +from astroid.raw_building import ( + attach_dummy_node, build_module, + build_class, build_function, build_from_import +) +from astroid import test_utils +from astroid import nodes +from astroid.bases import BUILTINS + + +class RawBuildingTC(unittest.TestCase): + + def test_attach_dummy_node(self): + node = build_module('MyModule') + attach_dummy_node(node, 'DummyNode') + self.assertEqual(1, len(list(node.get_children()))) + + def test_build_module(self): + node = build_module('MyModule') + self.assertEqual(node.name, 'MyModule') + self.assertEqual(node.pure_python, False) + self.assertEqual(node.package, False) + self.assertEqual(node.parent, None) + + def test_build_class(self): + node = build_class('MyClass') + self.assertEqual(node.name, 'MyClass') + self.assertEqual(node.doc, None) + + def test_build_function(self): + node = build_function('MyFunction') + self.assertEqual(node.name, 'MyFunction') + self.assertEqual(node.doc, None) + + def test_build_function_args(self): + args = ['myArgs1', 'myArgs2'] + node = build_function('MyFunction', args) + self.assertEqual('myArgs1', node.args.args[0].name) + self.assertEqual('myArgs2', node.args.args[1].name) + self.assertEqual(2, len(node.args.args)) + + def test_build_function_defaults(self): + defaults = ['defaults1', 'defaults2'] + node = build_function('MyFunction', None, defaults) + self.assertEqual(2, len(node.args.defaults)) + + def test_build_from_import(self): + names = ['exceptions, inference, inspector'] + node = build_from_import('astroid', names) + self.assertEqual(len(names), len(node.names)) + + @test_utils.require_version(minver='3.0') + def test_io_is__io(self): + # _io module calls itself io. This leads + # to cyclic dependencies when astroid tries to resolve + # what io.BufferedReader is. The code that handles this + # is in astroid.raw_building.imported_member, which verifies + # the true name of the module. + import _io + + builder = AstroidBuilder() + module = builder.inspect_build(_io) + buffered_reader = module.getattr('BufferedReader')[0] + self.assertEqual(buffered_reader.root().name, 'io') + + @unittest.skipUnless(os.name == 'java', 'Requires Jython') + def test_open_is_inferred_correctly(self): + # Lot of Jython builtins don't have a __module__ attribute. + for name, _ in inspect.getmembers(builtins, predicate=inspect.isbuiltin): + if name == 'print': + continue + node = test_utils.extract_node('{0} #@'.format(name)) + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.FunctionDef, name) + self.assertEqual(inferred.root().name, BUILTINS, name) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_regrtest.py b/pymode/libs/astroid/tests/unittest_regrtest.py new file mode 100644 index 00000000..158c7119 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_regrtest.py @@ -0,0 +1,364 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import sys +import unittest +import textwrap + +import six + +from astroid import MANAGER, Instance, nodes +from astroid.bases import BUILTINS +from astroid.builder import AstroidBuilder +from astroid import exceptions +from astroid.raw_building import build_module +from astroid.manager import AstroidManager +from astroid.test_utils import require_version, extract_node +from astroid.tests import resources +from astroid import transforms + + +class NonRegressionTests(resources.AstroidCacheSetupMixin, + unittest.TestCase): + + def setUp(self): + sys.path.insert(0, resources.find('data')) + MANAGER.always_load_extensions = True + MANAGER.astroid_cache[BUILTINS] = self._builtins + + def tearDown(self): + # Since we may have created a brainless manager, leading + # to a new cache builtin module and proxy classes in the constants, + # clear out the global manager cache. + MANAGER.clear_cache(self._builtins) + MANAGER.always_load_extensions = False + sys.path.pop(0) + sys.path_importer_cache.pop(resources.find('data'), None) + + def brainless_manager(self): + manager = AstroidManager() + # avoid caching into the AstroidManager borg since we get problems + # with other tests : + manager.__dict__ = {} + manager._failed_import_hooks = [] + manager.astroid_cache = {} + manager._mod_file_cache = {} + manager._transform = transforms.TransformVisitor() + manager.clear_cache() # trigger proper bootstraping + return manager + + def test_module_path(self): + man = self.brainless_manager() + mod = man.ast_from_module_name('package.import_package_subpackage_module') + package = next(mod.igetattr('package')) + self.assertEqual(package.name, 'package') + subpackage = next(package.igetattr('subpackage')) + self.assertIsInstance(subpackage, nodes.Module) + self.assertTrue(subpackage.package) + self.assertEqual(subpackage.name, 'package.subpackage') + module = next(subpackage.igetattr('module')) + self.assertEqual(module.name, 'package.subpackage.module') + + + def test_package_sidepackage(self): + manager = self.brainless_manager() + assert 'package.sidepackage' not in MANAGER.astroid_cache + package = manager.ast_from_module_name('absimp') + self.assertIsInstance(package, nodes.Module) + self.assertTrue(package.package) + subpackage = next(package.getattr('sidepackage')[0].infer()) + self.assertIsInstance(subpackage, nodes.Module) + self.assertTrue(subpackage.package) + self.assertEqual(subpackage.name, 'absimp.sidepackage') + + + def test_living_property(self): + builder = AstroidBuilder() + builder._done = {} + builder._module = sys.modules[__name__] + builder.object_build(build_module('module_name', ''), Whatever) + + + def test_new_style_class_detection(self): + try: + import pygtk # pylint: disable=unused-variable + except ImportError: + self.skipTest('test skipped: pygtk is not available') + # XXX may fail on some pygtk version, because objects in + # gobject._gobject have __module__ set to gobject :( + builder = AstroidBuilder() + data = """ +import pygtk +pygtk.require("2.6") +import gobject + +class A(gobject.GObject): + pass +""" + astroid = builder.string_build(data, __name__, __file__) + a = astroid['A'] + self.assertTrue(a.newstyle) + + + def test_pylint_config_attr(self): + try: + from pylint import lint # pylint: disable=unused-variable + except ImportError: + self.skipTest('pylint not available') + mod = MANAGER.ast_from_module_name('pylint.lint') + pylinter = mod['PyLinter'] + expect = ['OptionsManagerMixIn', 'object', 'MessagesHandlerMixIn', + 'ReportsHandlerMixIn', 'BaseTokenChecker', 'BaseChecker', + 'OptionsProviderMixIn'] + self.assertListEqual([c.name for c in pylinter.ancestors()], + expect) + self.assertTrue(list(Instance(pylinter).getattr('config'))) + inferred = list(Instance(pylinter).igetattr('config')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0].root().name, 'optparse') + self.assertEqual(inferred[0].name, 'Values') + + def test_numpy_crash(self): + """test don't crash on numpy""" + #a crash occured somewhere in the past, and an + # InferenceError instead of a crash was better, but now we even infer! + try: + import numpy # pylint: disable=unused-variable + except ImportError: + self.skipTest('test skipped: numpy is not available') + builder = AstroidBuilder() + data = """ +from numpy import multiply + +multiply(1, 2, 3) +""" + astroid = builder.string_build(data, __name__, __file__) + callfunc = astroid.body[1].value.func + inferred = callfunc.inferred() + self.assertEqual(len(inferred), 2) + + @require_version('3.0') + def test_nameconstant(self): + # used to fail for Python 3.4 + builder = AstroidBuilder() + astroid = builder.string_build("def test(x=True): pass") + default = astroid.body[0].args.args[0] + self.assertEqual(default.name, 'x') + self.assertEqual(next(default.infer()).value, True) + + @require_version('2.7') + def test_with_infer_assignnames(self): + builder = AstroidBuilder() + data = """ +with open('a.txt') as stream, open('b.txt'): + stream.read() +""" + astroid = builder.string_build(data, __name__, __file__) + # Used to crash due to the fact that the second + # context manager didn't use an assignment name. + list(astroid.nodes_of_class(nodes.Call))[-1].inferred() + + def test_recursion_regression_issue25(self): + builder = AstroidBuilder() + data = """ +import recursion as base + +_real_Base = base.Base + +class Derived(_real_Base): + pass + +def run(): + base.Base = Derived +""" + astroid = builder.string_build(data, __name__, __file__) + # Used to crash in _is_metaclass, due to wrong + # ancestors chain + classes = astroid.nodes_of_class(nodes.ClassDef) + for klass in classes: + # triggers the _is_metaclass call + klass.type # pylint: disable=pointless-statement + + def test_decorator_callchain_issue42(self): + builder = AstroidBuilder() + data = """ + +def test(): + def factory(func): + def newfunc(): + func() + return newfunc + return factory + +@test() +def crash(): + pass +""" + astroid = builder.string_build(data, __name__, __file__) + self.assertEqual(astroid['crash'].type, 'function') + + def test_filter_stmts_scoping(self): + builder = AstroidBuilder() + data = """ +def test(): + compiler = int() + class B(compiler.__class__): + pass + compiler = B() + return compiler +""" + astroid = builder.string_build(data, __name__, __file__) + test = astroid['test'] + result = next(test.infer_call_result(astroid)) + self.assertIsInstance(result, Instance) + base = next(result._proxied.bases[0].infer()) + self.assertEqual(base.name, 'int') + + def test_ancestors_patching_class_recursion(self): + node = AstroidBuilder().string_build(textwrap.dedent(""" + import string + Template = string.Template + + class A(Template): + pass + + class B(A): + pass + + def test(x=False): + if x: + string.Template = A + else: + string.Template = B + """)) + klass = node['A'] + ancestors = list(klass.ancestors()) + self.assertEqual(ancestors[0].qname(), 'string.Template') + + def test_ancestors_yes_in_bases(self): + # Test for issue https://bitbucket.org/logilab/astroid/issue/84 + # This used to crash astroid with a TypeError, because an YES + # node was present in the bases + node = extract_node(""" + def with_metaclass(meta, *bases): + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + import lala + + class A(with_metaclass(object, lala.lala)): #@ + pass + """) + ancestors = list(node.ancestors()) + if six.PY3: + self.assertEqual(len(ancestors), 1) + self.assertEqual(ancestors[0].qname(), + "{}.object".format(BUILTINS)) + else: + self.assertEqual(len(ancestors), 0) + + def test_ancestors_missing_from_function(self): + # Test for https://www.logilab.org/ticket/122793 + node = extract_node(''' + def gen(): yield + GEN = gen() + next(GEN) + ''') + self.assertRaises(exceptions.InferenceError, next, node.infer()) + + def test_unicode_in_docstring(self): + # Crashed for astroid==1.4.1 + # Test for https://bitbucket.org/logilab/astroid/issues/273/ + + # In a regular file, "coding: utf-8" would have been used. + node = extract_node(u''' + from __future__ import unicode_literals + + class MyClass(object): + def method(self): + "With unicode : %s " + + instance = MyClass() + ''' % u"\u2019") + + next(node.value.infer()).as_string() + + def test_binop_generates_nodes_with_parents(self): + node = extract_node(''' + def no_op(*args): + pass + def foo(*args): + def inner(*more_args): + args + more_args #@ + return inner + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertIsNotNone(inferred.parent) + self.assertIsInstance(inferred.parent, nodes.BinOp) + + def test_decorator_names_inference_error_leaking(self): + node = extract_node(''' + class Parent(object): + @property + def foo(self): + pass + + class Child(Parent): + @Parent.foo.getter + def foo(self): #@ + return super(Child, self).foo + ['oink'] + ''') + inferred = next(node.infer()) + self.assertEqual(inferred.decoratornames(), set()) + + def test_ssl_protocol(self): + node = extract_node(''' + import ssl + ssl.PROTOCOL_TLSv1 + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + + def test_uninferable_string_argument_of_namedtuple(self): + node = extract_node(''' + import collections + collections.namedtuple('{}'.format("a"), '')() + ''') + next(node.infer()) + + @require_version(maxver='3.0') + def test_reassignment_in_except_handler(self): + node = extract_node(''' + import exceptions + try: + {}["a"] + except KeyError, exceptions.IndexError: + pass + + IndexError #@ + ''') + self.assertEqual(len(node.inferred()), 1) + + +class Whatever(object): + a = property(lambda x: x, lambda x: x) + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_scoped_nodes.py b/pymode/libs/astroid/tests/unittest_scoped_nodes.py new file mode 100644 index 00000000..a15c923a --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_scoped_nodes.py @@ -0,0 +1,1583 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for specific behaviour of astroid scoped nodes (i.e. module, class and +function) +""" +import os +import sys +from functools import partial +import unittest +import warnings + +from astroid import builder +from astroid import nodes +from astroid import scoped_nodes +from astroid import util +from astroid.exceptions import ( + InferenceError, NotFoundError, + NoDefault, ResolveError, MroError, + InconsistentMroError, DuplicateBasesError, +) +from astroid.bases import ( + BUILTINS, Instance, + BoundMethod, UnboundMethod, Generator +) +from astroid import __pkginfo__ +from astroid import test_utils +from astroid.tests import resources + + +def _test_dict_interface(self, node, test_attr): + self.assertIs(node[test_attr], node[test_attr]) + self.assertIn(test_attr, node) + node.keys() + node.values() + node.items() + iter(node) + + +class ModuleLoader(resources.SysPathSetup): + def setUp(self): + super(ModuleLoader, self).setUp() + self.module = resources.build_file('data/module.py', 'data.module') + self.module2 = resources.build_file('data/module2.py', 'data.module2') + self.nonregr = resources.build_file('data/nonregr.py', 'data.nonregr') + self.pack = resources.build_file('data/__init__.py', 'data') + + +class ModuleNodeTest(ModuleLoader, unittest.TestCase): + + def test_special_attributes(self): + self.assertEqual(len(self.module.getattr('__name__')), 1) + self.assertIsInstance(self.module.getattr('__name__')[0], nodes.Const) + self.assertEqual(self.module.getattr('__name__')[0].value, 'data.module') + self.assertEqual(len(self.module.getattr('__doc__')), 1) + self.assertIsInstance(self.module.getattr('__doc__')[0], nodes.Const) + self.assertEqual(self.module.getattr('__doc__')[0].value, 'test module for astroid\n') + self.assertEqual(len(self.module.getattr('__file__')), 1) + self.assertIsInstance(self.module.getattr('__file__')[0], nodes.Const) + self.assertEqual(self.module.getattr('__file__')[0].value, + os.path.abspath(resources.find('data/module.py'))) + self.assertEqual(len(self.module.getattr('__dict__')), 1) + self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) + self.assertRaises(NotFoundError, self.module.getattr, '__path__') + self.assertEqual(len(self.pack.getattr('__path__')), 1) + self.assertIsInstance(self.pack.getattr('__path__')[0], nodes.List) + + def test_dict_interface(self): + _test_dict_interface(self, self.module, 'YO') + + def test_getattr(self): + yo = self.module.getattr('YO')[0] + self.assertIsInstance(yo, nodes.ClassDef) + self.assertEqual(yo.name, 'YO') + red = next(self.module.igetattr('redirect')) + self.assertIsInstance(red, nodes.FunctionDef) + self.assertEqual(red.name, 'four_args') + namenode = next(self.module.igetattr('NameNode')) + self.assertIsInstance(namenode, nodes.ClassDef) + self.assertEqual(namenode.name, 'Name') + # resolve packageredirection + mod = resources.build_file('data/appl/myConnection.py', + 'data.appl.myConnection') + ssl = next(mod.igetattr('SSL1')) + cnx = next(ssl.igetattr('Connection')) + self.assertEqual(cnx.__class__, nodes.ClassDef) + self.assertEqual(cnx.name, 'Connection') + self.assertEqual(cnx.root().name, 'data.SSL1.Connection1') + self.assertEqual(len(self.nonregr.getattr('enumerate')), 2) + # raise ResolveError + self.assertRaises(InferenceError, self.nonregr.igetattr, 'YOAA') + + def test_wildcard_import_names(self): + m = resources.build_file('data/all.py', 'all') + self.assertEqual(m.wildcard_import_names(), ['Aaa', '_bla', 'name']) + m = resources.build_file('data/notall.py', 'notall') + res = sorted(m.wildcard_import_names()) + self.assertEqual(res, ['Aaa', 'func', 'name', 'other']) + + def test_public_names(self): + m = builder.parse(''' + name = 'a' + _bla = 2 + other = 'o' + class Aaa: pass + def func(): print('yo') + __all__ = 'Aaa', '_bla', 'name' + ''') + values = sorted(['Aaa', 'name', 'other', 'func']) + self.assertEqual(sorted(m._public_names()), values) + m = builder.parse(''' + name = 'a' + _bla = 2 + other = 'o' + class Aaa: pass + + def func(): return 'yo' + ''') + res = sorted(m._public_names()) + self.assertEqual(res, values) + + m = builder.parse(''' + from missing import tzop + trop = "test" + __all__ = (trop, "test1", tzop, 42) + ''') + res = sorted(m._public_names()) + self.assertEqual(res, ["trop", "tzop"]) + + m = builder.parse(''' + test = tzop = 42 + __all__ = ('test', ) + ('tzop', ) + ''') + res = sorted(m._public_names()) + self.assertEqual(res, ['test', 'tzop']) + + def test_module_getattr(self): + data = ''' + appli = application + appli += 2 + del appli + ''' + astroid = builder.parse(data, __name__) + # test del statement not returned by getattr + self.assertEqual(len(astroid.getattr('appli')), 2, + astroid.getattr('appli')) + + def test_relative_to_absolute_name(self): + # package + mod = nodes.Module('very.multi.package', 'doc') + mod.package = True + modname = mod.relative_to_absolute_name('utils', 1) + self.assertEqual(modname, 'very.multi.package.utils') + modname = mod.relative_to_absolute_name('utils', 2) + self.assertEqual(modname, 'very.multi.utils') + modname = mod.relative_to_absolute_name('utils', 0) + self.assertEqual(modname, 'very.multi.package.utils') + modname = mod.relative_to_absolute_name('', 1) + self.assertEqual(modname, 'very.multi.package') + # non package + mod = nodes.Module('very.multi.module', 'doc') + mod.package = False + modname = mod.relative_to_absolute_name('utils', 0) + self.assertEqual(modname, 'very.multi.utils') + modname = mod.relative_to_absolute_name('utils', 1) + self.assertEqual(modname, 'very.multi.utils') + modname = mod.relative_to_absolute_name('utils', 2) + self.assertEqual(modname, 'very.utils') + modname = mod.relative_to_absolute_name('', 1) + self.assertEqual(modname, 'very.multi') + + def test_import_1(self): + data = '''from . import subpackage''' + sys.path.insert(0, resources.find('data')) + astroid = builder.parse(data, 'package', 'data/package/__init__.py') + try: + m = astroid.import_module('', level=1) + self.assertEqual(m.name, 'package') + inferred = list(astroid.igetattr('subpackage')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0].name, 'package.subpackage') + finally: + del sys.path[0] + + + def test_import_2(self): + data = '''from . import subpackage as pouet''' + astroid = builder.parse(data, 'package', 'data/package/__init__.py') + sys.path.insert(0, resources.find('data')) + try: + m = astroid.import_module('', level=1) + self.assertEqual(m.name, 'package') + inferred = list(astroid.igetattr('pouet')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0].name, 'package.subpackage') + finally: + del sys.path[0] + + + def test_file_stream_in_memory(self): + data = '''irrelevant_variable is irrelevant''' + astroid = builder.parse(data, 'in_memory') + with warnings.catch_warnings(record=True): + self.assertEqual(astroid.file_stream.read().decode(), data) + + def test_file_stream_physical(self): + path = resources.find('data/all.py') + astroid = builder.AstroidBuilder().file_build(path, 'all') + with open(path, 'rb') as file_io: + with warnings.catch_warnings(record=True): + self.assertEqual(astroid.file_stream.read(), file_io.read()) + + def test_file_stream_api(self): + path = resources.find('data/all.py') + astroid = builder.AstroidBuilder().file_build(path, 'all') + if __pkginfo__.numversion >= (1, 6): + # file_stream is slated for removal in astroid 1.6. + with self.assertRaises(AttributeError): + # pylint: disable=pointless-statement + astroid.file_stream + else: + # Until astroid 1.6, Module.file_stream will emit + # PendingDeprecationWarning in 1.4, DeprecationWarning + # in 1.5 and finally it will be removed in 1.6, leaving + # only Module.stream as the recommended way to retrieve + # its file stream. + with warnings.catch_warnings(record=True) as cm: + warnings.simplefilter("always") + self.assertIsNot(astroid.file_stream, astroid.file_stream) + self.assertGreater(len(cm), 1) + self.assertEqual(cm[0].category, PendingDeprecationWarning) + + def test_stream_api(self): + path = resources.find('data/all.py') + astroid = builder.AstroidBuilder().file_build(path, 'all') + stream = astroid.stream() + self.assertTrue(hasattr(stream, 'close')) + with stream: + with open(path, 'rb') as file_io: + self.assertEqual(stream.read(), file_io.read()) + + +class FunctionNodeTest(ModuleLoader, unittest.TestCase): + + def test_special_attributes(self): + func = self.module2['make_class'] + self.assertEqual(len(func.getattr('__name__')), 1) + self.assertIsInstance(func.getattr('__name__')[0], nodes.Const) + self.assertEqual(func.getattr('__name__')[0].value, 'make_class') + self.assertEqual(len(func.getattr('__doc__')), 1) + self.assertIsInstance(func.getattr('__doc__')[0], nodes.Const) + self.assertEqual(func.getattr('__doc__')[0].value, 'check base is correctly resolved to Concrete0') + self.assertEqual(len(self.module.getattr('__dict__')), 1) + self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) + + def test_dict_interface(self): + _test_dict_interface(self, self.module['global_access'], 'local') + + def test_default_value(self): + func = self.module2['make_class'] + self.assertIsInstance(func.args.default_value('base'), nodes.Attribute) + self.assertRaises(NoDefault, func.args.default_value, 'args') + self.assertRaises(NoDefault, func.args.default_value, 'kwargs') + self.assertRaises(NoDefault, func.args.default_value, 'any') + #self.assertIsInstance(func.mularg_class('args'), nodes.Tuple) + #self.assertIsInstance(func.mularg_class('kwargs'), nodes.Dict) + #self.assertIsNone(func.mularg_class('base')) + + def test_navigation(self): + function = self.module['global_access'] + self.assertEqual(function.statement(), function) + l_sibling = function.previous_sibling() + # check taking parent if child is not a stmt + self.assertIsInstance(l_sibling, nodes.Assign) + child = function.args.args[0] + self.assertIs(l_sibling, child.previous_sibling()) + r_sibling = function.next_sibling() + self.assertIsInstance(r_sibling, nodes.ClassDef) + self.assertEqual(r_sibling.name, 'YO') + self.assertIs(r_sibling, child.next_sibling()) + last = r_sibling.next_sibling().next_sibling().next_sibling() + self.assertIsInstance(last, nodes.Assign) + self.assertIsNone(last.next_sibling()) + first = l_sibling.root().body[0] + self.assertIsNone(first.previous_sibling()) + + def test_nested_args(self): + if sys.version_info >= (3, 0): + self.skipTest("nested args has been removed in py3.x") + code = ''' + def nested_args(a, (b, c, d)): + "nested arguments test" + ''' + tree = builder.parse(code) + func = tree['nested_args'] + self.assertEqual(sorted(func._locals), ['a', 'b', 'c', 'd']) + self.assertEqual(func.args.format_args(), 'a, (b, c, d)') + + def test_four_args(self): + func = self.module['four_args'] + #self.assertEqual(func.args.args, ['a', ('b', 'c', 'd')]) + local = sorted(func.keys()) + self.assertEqual(local, ['a', 'b', 'c', 'd']) + self.assertEqual(func.type, 'function') + + def test_format_args(self): + func = self.module2['make_class'] + self.assertEqual(func.args.format_args(), + 'any, base=data.module.YO, *args, **kwargs') + func = self.module['four_args'] + self.assertEqual(func.args.format_args(), 'a, b, c, d') + + def test_is_generator(self): + self.assertTrue(self.module2['generator'].is_generator()) + self.assertFalse(self.module2['not_a_generator'].is_generator()) + self.assertFalse(self.module2['make_class'].is_generator()) + + def test_is_abstract(self): + method = self.module2['AbstractClass']['to_override'] + self.assertTrue(method.is_abstract(pass_is_abstract=False)) + self.assertEqual(method.qname(), 'data.module2.AbstractClass.to_override') + self.assertEqual(method.pytype(), '%s.instancemethod' % BUILTINS) + method = self.module2['AbstractClass']['return_something'] + self.assertFalse(method.is_abstract(pass_is_abstract=False)) + # non regression : test raise "string" doesn't cause an exception in is_abstract + func = self.module2['raise_string'] + self.assertFalse(func.is_abstract(pass_is_abstract=False)) + + def test_is_abstract_decorated(self): + methods = test_utils.extract_node(""" + import abc + + class Klass(object): + @abc.abstractproperty + def prop(self): #@ + pass + + @abc.abstractmethod + def method1(self): #@ + pass + + some_other_decorator = lambda x: x + @some_other_decorator + def method2(self): #@ + pass + """) + self.assertTrue(methods[0].is_abstract(pass_is_abstract=False)) + self.assertTrue(methods[1].is_abstract(pass_is_abstract=False)) + self.assertFalse(methods[2].is_abstract(pass_is_abstract=False)) + +## def test_raises(self): +## method = self.module2['AbstractClass']['to_override'] +## self.assertEqual([str(term) for term in method.raises()], +## ["Call(Name('NotImplementedError'), [], None, None)"] ) + +## def test_returns(self): +## method = self.module2['AbstractClass']['return_something'] +## # use string comp since Node doesn't handle __cmp__ +## self.assertEqual([str(term) for term in method.returns()], +## ["Const('toto')", "Const(None)"]) + + def test_lambda_pytype(self): + data = ''' + def f(): + g = lambda: None + ''' + astroid = builder.parse(data) + g = list(astroid['f'].ilookup('g'))[0] + self.assertEqual(g.pytype(), '%s.function' % BUILTINS) + + def test_lambda_qname(self): + astroid = builder.parse('lmbd = lambda: None', __name__) + self.assertEqual('%s.' % __name__, astroid['lmbd'].parent.value.qname()) + + def test_is_method(self): + data = ''' + class A: + def meth1(self): + return 1 + @classmethod + def meth2(cls): + return 2 + @staticmethod + def meth3(): + return 3 + + def function(): + return 0 + + @staticmethod + def sfunction(): + return -1 + ''' + astroid = builder.parse(data) + self.assertTrue(astroid['A']['meth1'].is_method()) + self.assertTrue(astroid['A']['meth2'].is_method()) + self.assertTrue(astroid['A']['meth3'].is_method()) + self.assertFalse(astroid['function'].is_method()) + self.assertFalse(astroid['sfunction'].is_method()) + + def test_argnames(self): + if sys.version_info < (3, 0): + code = 'def f(a, (b, c), *args, **kwargs): pass' + else: + code = 'def f(a, b, c, *args, **kwargs): pass' + astroid = builder.parse(code, __name__) + self.assertEqual(astroid['f'].argnames(), ['a', 'b', 'c', 'args', 'kwargs']) + + def test_return_nothing(self): + """test inferred value on a function with empty return""" + data = ''' + def func(): + return + + a = func() + ''' + astroid = builder.parse(data) + call = astroid.body[1].value + func_vals = call.inferred() + self.assertEqual(len(func_vals), 1) + self.assertIsInstance(func_vals[0], nodes.Const) + self.assertIsNone(func_vals[0].value) + + def test_func_instance_attr(self): + """test instance attributes for functions""" + data = """ + def test(): + print(test.bar) + + test.bar = 1 + test() + """ + astroid = builder.parse(data, 'mod') + func = astroid.body[2].value.func.inferred()[0] + self.assertIsInstance(func, nodes.FunctionDef) + self.assertEqual(func.name, 'test') + one = func.getattr('bar')[0].inferred()[0] + self.assertIsInstance(one, nodes.Const) + self.assertEqual(one.value, 1) + + def test_type_builtin_descriptor_subclasses(self): + astroid = builder.parse(""" + class classonlymethod(classmethod): + pass + class staticonlymethod(staticmethod): + pass + + class Node: + @classonlymethod + def clsmethod_subclass(cls): + pass + @classmethod + def clsmethod(cls): + pass + @staticonlymethod + def staticmethod_subclass(cls): + pass + @staticmethod + def stcmethod(cls): + pass + """) + node = astroid._locals['Node'][0] + self.assertEqual(node._locals['clsmethod_subclass'][0].type, + 'classmethod') + self.assertEqual(node._locals['clsmethod'][0].type, + 'classmethod') + self.assertEqual(node._locals['staticmethod_subclass'][0].type, + 'staticmethod') + self.assertEqual(node._locals['stcmethod'][0].type, + 'staticmethod') + + def test_decorator_builtin_descriptors(self): + astroid = builder.parse(""" + def static_decorator(platform=None, order=50): + def wrapper(f): + f.cgm_module = True + f.cgm_module_order = order + f.cgm_module_platform = platform + return staticmethod(f) + return wrapper + + def long_classmethod_decorator(platform=None, order=50): + def wrapper(f): + def wrapper2(f): + def wrapper3(f): + f.cgm_module = True + f.cgm_module_order = order + f.cgm_module_platform = platform + return classmethod(f) + return wrapper3(f) + return wrapper2(f) + return wrapper + + def classmethod_decorator(platform=None): + def wrapper(f): + f.platform = platform + return classmethod(f) + return wrapper + + def classmethod_wrapper(fn): + def wrapper(cls, *args, **kwargs): + result = fn(cls, *args, **kwargs) + return result + + return classmethod(wrapper) + + def staticmethod_wrapper(fn): + def wrapper(*args, **kwargs): + return fn(*args, **kwargs) + return staticmethod(wrapper) + + class SomeClass(object): + @static_decorator() + def static(node, cfg): + pass + @classmethod_decorator() + def classmethod(cls): + pass + @static_decorator + def not_so_static(node): + pass + @classmethod_decorator + def not_so_classmethod(node): + pass + @classmethod_wrapper + def classmethod_wrapped(cls): + pass + @staticmethod_wrapper + def staticmethod_wrapped(): + pass + @long_classmethod_decorator() + def long_classmethod(cls): + pass + """) + node = astroid._locals['SomeClass'][0] + self.assertEqual(node._locals['static'][0].type, + 'staticmethod') + self.assertEqual(node._locals['classmethod'][0].type, + 'classmethod') + self.assertEqual(node._locals['not_so_static'][0].type, + 'method') + self.assertEqual(node._locals['not_so_classmethod'][0].type, + 'method') + self.assertEqual(node._locals['classmethod_wrapped'][0].type, + 'classmethod') + self.assertEqual(node._locals['staticmethod_wrapped'][0].type, + 'staticmethod') + self.assertEqual(node._locals['long_classmethod'][0].type, + 'classmethod') + + def test_igetattr(self): + func = test_utils.extract_node(''' + def test(): + pass + ''') + func._instance_attrs['value'] = [nodes.Const(42)] + value = func.getattr('value') + self.assertEqual(len(value), 1) + self.assertIsInstance(value[0], nodes.Const) + self.assertEqual(value[0].value, 42) + inferred = next(func.igetattr('value')) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + @test_utils.require_version(minver='3.0') + def test_return_annotation_is_not_the_last(self): + func = builder.parse(''' + def test() -> bytes: + pass + pass + return + ''').body[0] + last_child = func.last_child() + self.assertIsInstance(last_child, nodes.Return) + self.assertEqual(func.tolineno, 5) + + +class ClassNodeTest(ModuleLoader, unittest.TestCase): + + def test_dict_interface(self): + _test_dict_interface(self, self.module['YOUPI'], 'method') + + def test_cls_special_attributes_1(self): + cls = self.module['YO'] + self.assertEqual(len(cls.getattr('__bases__')), 1) + self.assertEqual(len(cls.getattr('__name__')), 1) + self.assertIsInstance(cls.getattr('__name__')[0], nodes.Const) + self.assertEqual(cls.getattr('__name__')[0].value, 'YO') + self.assertEqual(len(cls.getattr('__doc__')), 1) + self.assertIsInstance(cls.getattr('__doc__')[0], nodes.Const) + self.assertEqual(cls.getattr('__doc__')[0].value, 'hehe') + self.assertEqual(len(cls.getattr('__module__')), 1) + self.assertIsInstance(cls.getattr('__module__')[0], nodes.Const) + self.assertEqual(cls.getattr('__module__')[0].value, 'data.module') + self.assertEqual(len(cls.getattr('__dict__')), 1) + if not cls.newstyle: + self.assertRaises(NotFoundError, cls.getattr, '__mro__') + for cls in (nodes.List._proxied, nodes.Const(1)._proxied): + self.assertEqual(len(cls.getattr('__bases__')), 1) + self.assertEqual(len(cls.getattr('__name__')), 1) + self.assertEqual(len(cls.getattr('__doc__')), 1, (cls, cls.getattr('__doc__'))) + self.assertEqual(cls.getattr('__doc__')[0].value, cls.doc) + self.assertEqual(len(cls.getattr('__module__')), 1) + self.assertEqual(len(cls.getattr('__dict__')), 1) + self.assertEqual(len(cls.getattr('__mro__')), 1) + + def test__mro__attribute(self): + node = test_utils.extract_node(''' + class A(object): pass + class B(object): pass + class C(A, B): pass + ''') + mro = node.getattr('__mro__')[0] + self.assertIsInstance(mro, nodes.Tuple) + self.assertEqual(mro.elts, node.mro()) + + def test__bases__attribute(self): + node = test_utils.extract_node(''' + class A(object): pass + class B(object): pass + class C(A, B): pass + class D(C): pass + ''') + bases = node.getattr('__bases__')[0] + self.assertIsInstance(bases, nodes.Tuple) + self.assertEqual(len(bases.elts), 1) + self.assertIsInstance(bases.elts[0], nodes.ClassDef) + self.assertEqual(bases.elts[0].name, 'C') + + def test_cls_special_attributes_2(self): + astroid = builder.parse(''' + class A: pass + class B: pass + + A.__bases__ += (B,) + ''', __name__) + self.assertEqual(len(astroid['A'].getattr('__bases__')), 2) + self.assertIsInstance(astroid['A'].getattr('__bases__')[0], nodes.Tuple) + self.assertIsInstance(astroid['A'].getattr('__bases__')[1], nodes.AssignAttr) + + def test_instance_special_attributes(self): + for inst in (Instance(self.module['YO']), nodes.List(), nodes.Const(1)): + self.assertRaises(NotFoundError, inst.getattr, '__mro__') + self.assertRaises(NotFoundError, inst.getattr, '__bases__') + self.assertRaises(NotFoundError, inst.getattr, '__name__') + self.assertEqual(len(inst.getattr('__dict__')), 1) + self.assertEqual(len(inst.getattr('__doc__')), 1) + + def test_navigation(self): + klass = self.module['YO'] + self.assertEqual(klass.statement(), klass) + l_sibling = klass.previous_sibling() + self.assertTrue(isinstance(l_sibling, nodes.FunctionDef), l_sibling) + self.assertEqual(l_sibling.name, 'global_access') + r_sibling = klass.next_sibling() + self.assertIsInstance(r_sibling, nodes.ClassDef) + self.assertEqual(r_sibling.name, 'YOUPI') + + def test_local_attr_ancestors(self): + module = builder.parse(''' + class A(): + def __init__(self): pass + class B(A): pass + class C(B): pass + class D(object): pass + class F(): pass + class E(F, D): pass + ''') + # Test old-style (Python 2) / new-style (Python 3+) ancestors lookups + klass2 = module['C'] + it = klass2.local_attr_ancestors('__init__') + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'A') + if sys.version_info[0] == 2: + self.assertRaises(StopIteration, partial(next, it)) + else: + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'object') + self.assertRaises(StopIteration, partial(next, it)) + + it = klass2.local_attr_ancestors('method') + self.assertRaises(StopIteration, partial(next, it)) + + # Test mixed-style ancestor lookups + klass2 = module['E'] + it = klass2.local_attr_ancestors('__init__') + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'object') + self.assertRaises(StopIteration, partial(next, it)) + + def test_local_attr_mro(self): + module = builder.parse(''' + class A(object): + def __init__(self): pass + class B(A): + def __init__(self, arg, arg2): pass + class C(A): pass + class D(C, B): pass + ''') + dclass = module['D'] + init = dclass.local_attr('__init__')[0] + self.assertIsInstance(init, nodes.FunctionDef) + self.assertEqual(init.parent.name, 'B') + + cclass = module['C'] + init = cclass.local_attr('__init__')[0] + self.assertIsInstance(init, nodes.FunctionDef) + self.assertEqual(init.parent.name, 'A') + + ancestors = list(dclass.local_attr_ancestors('__init__')) + self.assertEqual([node.name for node in ancestors], ['B', 'A', 'object']) + + def test_instance_attr_ancestors(self): + klass2 = self.module['YOUPI'] + it = klass2.instance_attr_ancestors('yo') + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'YO') + self.assertRaises(StopIteration, partial(next, it)) + klass2 = self.module['YOUPI'] + it = klass2.instance_attr_ancestors('member') + self.assertRaises(StopIteration, partial(next, it)) + + def test_methods(self): + expected_methods = {'__init__', 'class_method', 'method', 'static_method'} + klass2 = self.module['YOUPI'] + methods = {m.name for m in klass2.methods()} + self.assertTrue( + methods.issuperset(expected_methods)) + methods = {m.name for m in klass2.mymethods()} + self.assertSetEqual(expected_methods, methods) + klass2 = self.module2['Specialization'] + methods = {m.name for m in klass2.mymethods()} + self.assertSetEqual(set([]), methods) + method_locals = klass2.local_attr('method') + self.assertEqual(len(method_locals), 1) + self.assertEqual(method_locals[0].name, 'method') + self.assertRaises(NotFoundError, klass2.local_attr, 'nonexistant') + methods = {m.name for m in klass2.methods()} + self.assertTrue(methods.issuperset(expected_methods)) + + #def test_rhs(self): + # my_dict = self.module['MY_DICT'] + # self.assertIsInstance(my_dict.rhs(), nodes.Dict) + # a = self.module['YO']['a'] + # value = a.rhs() + # self.assertIsInstance(value, nodes.Const) + # self.assertEqual(value.value, 1) + + @unittest.skipIf(sys.version_info[0] >= 3, "Python 2 class semantics required.") + def test_ancestors(self): + klass = self.module['YOUPI'] + self.assertEqual(['YO'], [a.name for a in klass.ancestors()]) + klass = self.module2['Specialization'] + self.assertEqual(['YOUPI', 'YO'], [a.name for a in klass.ancestors()]) + + @unittest.skipIf(sys.version_info[0] < 3, "Python 3 class semantics required.") + def test_ancestors_py3(self): + klass = self.module['YOUPI'] + self.assertEqual(['YO', 'object'], [a.name for a in klass.ancestors()]) + klass = self.module2['Specialization'] + self.assertEqual(['YOUPI', 'YO', 'object'], [a.name for a in klass.ancestors()]) + + def test_type(self): + klass = self.module['YOUPI'] + self.assertEqual(klass.type, 'class') + klass = self.module2['Metaclass'] + self.assertEqual(klass.type, 'metaclass') + klass = self.module2['MyException'] + self.assertEqual(klass.type, 'exception') + klass = self.module2['MyError'] + self.assertEqual(klass.type, 'exception') + # the following class used to be detected as a metaclass + # after the fix which used instance._proxied in .ancestors(), + # when in fact it is a normal class + klass = self.module2['NotMetaclass'] + self.assertEqual(klass.type, 'class') + + def test_inner_classes(self): + eee = self.nonregr['Ccc']['Eee'] + self.assertEqual([n.name for n in eee.ancestors()], ['Ddd', 'Aaa', 'object']) + + + def test_classmethod_attributes(self): + data = ''' + class WebAppObject(object): + def registered(cls, application): + cls.appli = application + cls.schema = application.schema + cls.config = application.config + return cls + registered = classmethod(registered) + ''' + astroid = builder.parse(data, __name__) + cls = astroid['WebAppObject'] + self.assertEqual(sorted(cls._locals.keys()), + ['appli', 'config', 'registered', 'schema']) + + def test_class_getattr(self): + data = ''' + class WebAppObject(object): + appli = application + appli += 2 + del self.appli + ''' + astroid = builder.parse(data, __name__) + cls = astroid['WebAppObject'] + # test del statement not returned by getattr + self.assertEqual(len(cls.getattr('appli')), 2) + + + def test_instance_getattr(self): + data = ''' + class WebAppObject(object): + def __init__(self, application): + self.appli = application + self.appli += 2 + del self.appli + ''' + astroid = builder.parse(data) + inst = Instance(astroid['WebAppObject']) + # test del statement not returned by getattr + self.assertEqual(len(inst.getattr('appli')), 2) + + + def test_instance_getattr_with_class_attr(self): + data = ''' + class Parent: + aa = 1 + cc = 1 + + class Klass(Parent): + aa = 0 + bb = 0 + + def incr(self, val): + self.cc = self.aa + if val > self.aa: + val = self.aa + if val < self.bb: + val = self.bb + self.aa += val + ''' + astroid = builder.parse(data) + inst = Instance(astroid['Klass']) + self.assertEqual(len(inst.getattr('aa')), 3, inst.getattr('aa')) + self.assertEqual(len(inst.getattr('bb')), 1, inst.getattr('bb')) + self.assertEqual(len(inst.getattr('cc')), 2, inst.getattr('cc')) + + + def test_getattr_method_transform(self): + data = ''' + class Clazz(object): + + def m1(self, value): + self.value = value + m2 = m1 + + def func(arg1, arg2): + "function that will be used as a method" + return arg1.value + arg2 + + Clazz.m3 = func + inst = Clazz() + inst.m4 = func + ''' + astroid = builder.parse(data) + cls = astroid['Clazz'] + # test del statement not returned by getattr + for method in ('m1', 'm2', 'm3'): + inferred = list(cls.igetattr(method)) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], UnboundMethod) + inferred = list(Instance(cls).igetattr(method)) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], BoundMethod) + inferred = list(Instance(cls).igetattr('m4')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + + def test_getattr_from_grandpa(self): + data = ''' + class Future: + attr = 1 + + class Present(Future): + pass + + class Past(Present): + pass + ''' + astroid = builder.parse(data) + past = astroid['Past'] + attr = past.getattr('attr') + self.assertEqual(len(attr), 1) + attr1 = attr[0] + self.assertIsInstance(attr1, nodes.AssignName) + self.assertEqual(attr1.name, 'attr') + + def test_function_with_decorator_lineno(self): + data = ''' + @f(a=2, + b=3) + def g1(x): + print(x) + + @f(a=2, + b=3) + def g2(): + pass + ''' + astroid = builder.parse(data) + self.assertEqual(astroid['g1'].fromlineno, 4) + self.assertEqual(astroid['g1'].tolineno, 5) + self.assertEqual(astroid['g2'].fromlineno, 9) + self.assertEqual(astroid['g2'].tolineno, 10) + + @test_utils.require_version(maxver='3.0') + def test_simple_metaclass(self): + astroid = builder.parse(""" + class Test(object): + __metaclass__ = type + """) + klass = astroid['Test'] + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.name, 'type') + + def test_metaclass_error(self): + astroid = builder.parse(""" + class Test(object): + __metaclass__ = typ + """) + klass = astroid['Test'] + self.assertFalse(klass.metaclass()) + + @test_utils.require_version(maxver='3.0') + def test_metaclass_imported(self): + astroid = builder.parse(""" + from abc import ABCMeta + class Test(object): + __metaclass__ = ABCMeta + """) + klass = astroid['Test'] + + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + def test_metaclass_yes_leak(self): + astroid = builder.parse(""" + # notice `ab` instead of `abc` + from ab import ABCMeta + + class Meta(object): + __metaclass__ = ABCMeta + """) + klass = astroid['Meta'] + self.assertIsNone(klass.metaclass()) + + @test_utils.require_version(maxver='3.0') + def test_newstyle_and_metaclass_good(self): + astroid = builder.parse(""" + from abc import ABCMeta + class Test: + __metaclass__ = ABCMeta + """) + klass = astroid['Test'] + self.assertTrue(klass.newstyle) + self.assertEqual(klass.metaclass().name, 'ABCMeta') + astroid = builder.parse(""" + from abc import ABCMeta + __metaclass__ = ABCMeta + class Test: + pass + """) + klass = astroid['Test'] + self.assertTrue(klass.newstyle) + self.assertEqual(klass.metaclass().name, 'ABCMeta') + + @test_utils.require_version(maxver='3.0') + def test_nested_metaclass(self): + astroid = builder.parse(""" + from abc import ABCMeta + class A(object): + __metaclass__ = ABCMeta + class B: pass + + __metaclass__ = ABCMeta + class C: + __metaclass__ = type + class D: pass + """) + a = astroid['A'] + b = a._locals['B'][0] + c = astroid['C'] + d = c._locals['D'][0] + self.assertEqual(a.metaclass().name, 'ABCMeta') + self.assertFalse(b.newstyle) + self.assertIsNone(b.metaclass()) + self.assertEqual(c.metaclass().name, 'type') + self.assertEqual(d.metaclass().name, 'ABCMeta') + + @test_utils.require_version(maxver='3.0') + def test_parent_metaclass(self): + astroid = builder.parse(""" + from abc import ABCMeta + class Test: + __metaclass__ = ABCMeta + class SubTest(Test): pass + """) + klass = astroid['SubTest'] + self.assertTrue(klass.newstyle) + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + @test_utils.require_version(maxver='3.0') + def test_metaclass_ancestors(self): + astroid = builder.parse(""" + from abc import ABCMeta + + class FirstMeta(object): + __metaclass__ = ABCMeta + + class SecondMeta(object): + __metaclass__ = type + + class Simple(object): + pass + + class FirstImpl(FirstMeta): pass + class SecondImpl(FirstImpl): pass + class ThirdImpl(Simple, SecondMeta): + pass + """) + classes = { + 'ABCMeta': ('FirstImpl', 'SecondImpl'), + 'type': ('ThirdImpl', ) + } + for metaclass, names in classes.items(): + for name in names: + impl = astroid[name] + meta = impl.metaclass() + self.assertIsInstance(meta, nodes.ClassDef) + self.assertEqual(meta.name, metaclass) + + def test_metaclass_type(self): + klass = test_utils.extract_node(""" + def with_metaclass(meta, base=object): + return meta("NewBase", (base, ), {}) + + class ClassWithMeta(with_metaclass(type)): #@ + pass + """) + self.assertEqual( + ['NewBase', 'object'], + [base.name for base in klass.ancestors()]) + + def test_no_infinite_metaclass_loop(self): + klass = test_utils.extract_node(""" + class SSS(object): + + class JJJ(object): + pass + + @classmethod + def Init(cls): + cls.JJJ = type('JJJ', (cls.JJJ,), {}) + + class AAA(SSS): + pass + + class BBB(AAA.JJJ): + pass + """) + self.assertFalse(scoped_nodes._is_metaclass(klass)) + ancestors = [base.name for base in klass.ancestors()] + self.assertIn('object', ancestors) + self.assertIn('JJJ', ancestors) + + def test_no_infinite_metaclass_loop_with_redefine(self): + nodes = test_utils.extract_node(""" + import datetime + + class A(datetime.date): #@ + @classmethod + def now(cls): + return cls() + + class B(datetime.date): #@ + pass + + datetime.date = A + datetime.date = B + """) + for klass in nodes: + self.assertEqual(None, klass.metaclass()) + + def test_metaclass_generator_hack(self): + klass = test_utils.extract_node(""" + import six + + class WithMeta(six.with_metaclass(type, object)): #@ + pass + """) + self.assertEqual( + ['object'], + [base.name for base in klass.ancestors()]) + self.assertEqual( + 'type', klass.metaclass().name) + + def test_using_six_add_metaclass(self): + klass = test_utils.extract_node(''' + import six + import abc + + @six.add_metaclass(abc.ABCMeta) + class WithMeta(object): + pass + ''') + inferred = next(klass.infer()) + metaclass = inferred.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.qname(), 'abc.ABCMeta') + + def test_using_invalid_six_add_metaclass_call(self): + klass = test_utils.extract_node(''' + import six + @six.add_metaclass() + class Invalid(object): + pass + ''') + inferred = next(klass.infer()) + self.assertIsNone(inferred.metaclass()) + + def test_nonregr_infer_callresult(self): + astroid = builder.parse(""" + class Delegate(object): + def __get__(self, obj, cls): + return getattr(obj._subject, self.attribute) + + class CompositeBuilder(object): + __call__ = Delegate() + + builder = CompositeBuilder(result, composite) + tgts = builder() + """) + instance = astroid['tgts'] + # used to raise "'_Yes' object is not iterable", see + # https://bitbucket.org/logilab/astroid/issue/17 + self.assertEqual(list(instance.infer()), [util.YES]) + + def test_slots(self): + astroid = builder.parse(""" + from collections import deque + from textwrap import dedent + + class First(object): #@ + __slots__ = ("a", "b", 1) + class Second(object): #@ + __slots__ = "a" + class Third(object): #@ + __slots__ = deque(["a", "b", "c"]) + class Fourth(object): #@ + __slots__ = {"a": "a", "b": "b"} + class Fifth(object): #@ + __slots__ = list + class Sixth(object): #@ + __slots__ = "" + class Seventh(object): #@ + __slots__ = dedent.__name__ + class Eight(object): #@ + __slots__ = ("parens") + class Ninth(object): #@ + pass + class Ten(object): #@ + __slots__ = dict({"a": "b", "c": "d"}) + """) + expected = [ + ('First', ('a', 'b')), + ('Second', ('a', )), + ('Third', None), + ('Fourth', ('a', 'b')), + ('Fifth', None), + ('Sixth', None), + ('Seventh', ('dedent', )), + ('Eight', ('parens', )), + ('Ninth', None), + ('Ten', ('a', 'c')), + ] + for cls, expected_value in expected: + slots = astroid[cls].slots() + if expected_value is None: + self.assertIsNone(slots) + else: + self.assertEqual(list(expected_value), + [node.value for node in slots]) + + @test_utils.require_version(maxver='3.0') + def test_slots_py2(self): + module = builder.parse(""" + class UnicodeSlots(object): + __slots__ = (u"a", u"b", "c") + """) + slots = module['UnicodeSlots'].slots() + self.assertEqual(len(slots), 3) + self.assertEqual(slots[0].value, "a") + self.assertEqual(slots[1].value, "b") + self.assertEqual(slots[2].value, "c") + + @test_utils.require_version(maxver='3.0') + def test_slots_py2_not_implemented(self): + module = builder.parse(""" + class OldStyle: + __slots__ = ("a", "b") + """) + msg = "The concept of slots is undefined for old-style classes." + with self.assertRaises(NotImplementedError) as cm: + module['OldStyle'].slots() + self.assertEqual(str(cm.exception), msg) + + def test_slots_empty_list_of_slots(self): + module = builder.parse(""" + class Klass(object): + __slots__ = () + """) + cls = module['Klass'] + self.assertEqual(cls.slots(), []) + + def test_slots_taken_from_parents(self): + module = builder.parse(''' + class FirstParent(object): + __slots__ = ('a', 'b', 'c') + class SecondParent(FirstParent): + __slots__ = ('d', 'e') + class Third(SecondParent): + __slots__ = ('d', ) + ''') + cls = module['Third'] + slots = cls.slots() + self.assertEqual(sorted(set(slot.value for slot in slots)), + ['a', 'b', 'c', 'd', 'e']) + + def test_all_ancestors_need_slots(self): + module = builder.parse(''' + class A(object): + __slots__ = ('a', ) + class B(A): pass + class C(B): + __slots__ = ('a', ) + ''') + cls = module['C'] + self.assertIsNone(cls.slots()) + cls = module['B'] + self.assertIsNone(cls.slots()) + + def assertEqualMro(self, klass, expected_mro): + self.assertEqual( + [member.name for member in klass.mro()], + expected_mro) + + @test_utils.require_version(maxver='3.0') + def test_no_mro_for_old_style(self): + node = test_utils.extract_node(""" + class Old: pass""") + with self.assertRaises(NotImplementedError) as cm: + node.mro() + self.assertEqual(str(cm.exception), "Could not obtain mro for " + "old-style classes.") + + @test_utils.require_version(maxver='3.0') + def test_combined_newstyle_oldstyle_in_mro(self): + node = test_utils.extract_node(''' + class Old: + pass + class New(object): + pass + class New1(object): + pass + class New2(New, New1): + pass + class NewOld(New2, Old): #@ + pass + ''') + self.assertEqualMro(node, ['NewOld', 'New2', 'New', 'New1', 'object', 'Old']) + self.assertTrue(node.newstyle) + + def test_with_metaclass_mro(self): + astroid = builder.parse(""" + import six + + class C(object): + pass + class B(C): + pass + class A(six.with_metaclass(type, B)): + pass + """) + self.assertEqualMro(astroid['A'], ['A', 'B', 'C', 'object']) + + def test_mro(self): + astroid = builder.parse(""" + class C(object): pass + class D(dict, C): pass + + class A1(object): pass + class B1(A1): pass + class C1(A1): pass + class D1(B1, C1): pass + class E1(C1, B1): pass + class F1(D1, E1): pass + class G1(E1, D1): pass + + class Boat(object): pass + class DayBoat(Boat): pass + class WheelBoat(Boat): pass + class EngineLess(DayBoat): pass + class SmallMultihull(DayBoat): pass + class PedalWheelBoat(EngineLess, WheelBoat): pass + class SmallCatamaran(SmallMultihull): pass + class Pedalo(PedalWheelBoat, SmallCatamaran): pass + + class OuterA(object): + class Inner(object): + pass + class OuterB(OuterA): + class Inner(OuterA.Inner): + pass + class OuterC(OuterA): + class Inner(OuterA.Inner): + pass + class OuterD(OuterC): + class Inner(OuterC.Inner, OuterB.Inner): + pass + class Duplicates(str, str): pass + + """) + self.assertEqualMro(astroid['D'], ['D', 'dict', 'C', 'object']) + self.assertEqualMro(astroid['D1'], ['D1', 'B1', 'C1', 'A1', 'object']) + self.assertEqualMro(astroid['E1'], ['E1', 'C1', 'B1', 'A1', 'object']) + with self.assertRaises(InconsistentMroError) as cm: + astroid['F1'].mro() + self.assertEqual(str(cm.exception), + "Cannot create a consistent method resolution order " + "for bases (B1, C1, A1, object), " + "(C1, B1, A1, object)") + + with self.assertRaises(InconsistentMroError) as cm: + astroid['G1'].mro() + self.assertEqual(str(cm.exception), + "Cannot create a consistent method resolution order " + "for bases (C1, B1, A1, object), " + "(B1, C1, A1, object)") + + self.assertEqualMro( + astroid['PedalWheelBoat'], + ["PedalWheelBoat", "EngineLess", + "DayBoat", "WheelBoat", "Boat", "object"]) + + self.assertEqualMro( + astroid["SmallCatamaran"], + ["SmallCatamaran", "SmallMultihull", "DayBoat", "Boat", "object"]) + + self.assertEqualMro( + astroid["Pedalo"], + ["Pedalo", "PedalWheelBoat", "EngineLess", "SmallCatamaran", + "SmallMultihull", "DayBoat", "WheelBoat", "Boat", "object"]) + + self.assertEqualMro( + astroid['OuterD']['Inner'], + ['Inner', 'Inner', 'Inner', 'Inner', 'object']) + + with self.assertRaises(DuplicateBasesError) as cm: + astroid['Duplicates'].mro() + self.assertEqual(str(cm.exception), "Duplicates found in the mro.") + self.assertTrue(issubclass(cm.exception.__class__, MroError)) + self.assertTrue(issubclass(cm.exception.__class__, ResolveError)) + + def test_generator_from_infer_call_result_parent(self): + func = test_utils.extract_node(""" + import contextlib + + @contextlib.contextmanager + def test(): #@ + yield + """) + result = next(func.infer_call_result(func)) + self.assertIsInstance(result, Generator) + self.assertEqual(result.parent, func) + + def test_type_three_arguments(self): + classes = test_utils.extract_node(""" + type('A', (object, ), {"a": 1, "b": 2, missing: 3}) #@ + """) + first = next(classes.infer()) + self.assertIsInstance(first, nodes.ClassDef) + self.assertEqual(first.name, "A") + self.assertEqual(first.basenames, ["object"]) + self.assertIsInstance(first["a"], nodes.Const) + self.assertEqual(first["a"].value, 1) + self.assertIsInstance(first["b"], nodes.Const) + self.assertEqual(first["b"].value, 2) + with self.assertRaises(NotFoundError): + first.getattr("missing") + + def test_implicit_metaclass(self): + cls = test_utils.extract_node(""" + class A(object): + pass + """) + type_cls = scoped_nodes.builtin_lookup("type")[1][0] + self.assertEqual(cls.implicit_metaclass(), type_cls) + + @test_utils.require_version(maxver='3.0') + def test_implicit_metaclass_is_none(self): + cls = test_utils.extract_node(""" + class A: pass + """) + self.assertIsNone(cls.implicit_metaclass()) + + def test_local_attr_invalid_mro(self): + cls = test_utils.extract_node(""" + # A has an invalid MRO, local_attr should fallback + # to using .ancestors. + class A(object, object): + test = 42 + class B(A): #@ + pass + """) + local = cls.local_attr('test')[0] + inferred = next(local.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_has_dynamic_getattr(self): + module = builder.parse(""" + class Getattr(object): + def __getattr__(self, attrname): + pass + + class Getattribute(object): + def __getattribute__(self, attrname): + pass + + class ParentGetattr(Getattr): + pass + """) + self.assertTrue(module['Getattr'].has_dynamic_getattr()) + self.assertTrue(module['Getattribute'].has_dynamic_getattr()) + self.assertTrue(module['ParentGetattr'].has_dynamic_getattr()) + + # Test that objects analyzed through the live introspection + # aren't considered to have dynamic getattr implemented. + import datetime + astroid_builder = builder.AstroidBuilder() + module = astroid_builder.module_build(datetime) + self.assertFalse(module['timedelta'].has_dynamic_getattr()) + + def test_duplicate_bases_namedtuple(self): + module = builder.parse(""" + import collections + _A = collections.namedtuple('A', 'a') + + class A(_A): pass + + class B(A): pass + """) + self.assertRaises(DuplicateBasesError, module['B'].mro) + + def test_instance_bound_method_lambdas(self): + ast_nodes = test_utils.extract_node(''' + class Test(object): #@ + lam = lambda self: self + not_method = lambda xargs: xargs + Test() #@ + ''') + cls = next(ast_nodes[0].infer()) + self.assertIsInstance(next(cls.igetattr('lam')), scoped_nodes.Lambda) + self.assertIsInstance(next(cls.igetattr('not_method')), scoped_nodes.Lambda) + + instance = next(ast_nodes[1].infer()) + lam = next(instance.igetattr('lam')) + self.assertIsInstance(lam, BoundMethod) + not_method = next(instance.igetattr('not_method')) + self.assertIsInstance(not_method, scoped_nodes.Lambda) + + def test_class_extra_decorators_frame_is_not_class(self): + ast_node = test_utils.extract_node(''' + def ala(): + def bala(): #@ + func = 42 + ''') + self.assertEqual(ast_node.extra_decorators, []) + + def test_class_extra_decorators_only_callfunc_are_considered(self): + ast_node = test_utils.extract_node(''' + class Ala(object): + def func(self): #@ + pass + func = 42 + ''') + self.assertEqual(ast_node.extra_decorators, []) + + def test_class_extra_decorators_only_assignment_names_are_considered(self): + ast_node = test_utils.extract_node(''' + class Ala(object): + def func(self): #@ + pass + def __init__(self): + self.func = staticmethod(func) + + ''') + self.assertEqual(ast_node.extra_decorators, []) + + def test_class_extra_decorators_only_same_name_considered(self): + ast_node = test_utils.extract_node(''' + class Ala(object): + def func(self): #@ + pass + bala = staticmethod(func) + ''') + self.assertEqual(ast_node.extra_decorators, []) + self.assertEqual(ast_node.type, 'method') + + def test_class_extra_decorators(self): + static_method, clsmethod = test_utils.extract_node(''' + class Ala(object): + def static(self): #@ + pass + def class_method(self): #@ + pass + class_method = classmethod(class_method) + static = staticmethod(static) + ''') + self.assertEqual(len(clsmethod.extra_decorators), 1) + self.assertEqual(clsmethod.type, 'classmethod') + self.assertEqual(len(static_method.extra_decorators), 1) + self.assertEqual(static_method.type, 'staticmethod') + + def test_extra_decorators_only_class_level_assignments(self): + node = test_utils.extract_node(''' + def _bind(arg): + return arg.bind + + class A(object): + @property + def bind(self): + return 42 + def irelevant(self): + # This is important, because it used to trigger + # a maximum recursion error. + bind = _bind(self) + return bind + A() #@ + ''') + inferred = next(node.infer()) + bind = next(inferred.igetattr('bind')) + self.assertIsInstance(bind, nodes.Const) + self.assertEqual(bind.value, 42) + parent = bind.scope() + self.assertEqual(len(parent.extra_decorators), 0) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_transforms.py b/pymode/libs/astroid/tests/unittest_transforms.py new file mode 100644 index 00000000..1553bfc4 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_transforms.py @@ -0,0 +1,245 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +from __future__ import print_function + +import contextlib +import time +import unittest + +from astroid import builder +from astroid import nodes +from astroid import parse +from astroid import transforms + + +@contextlib.contextmanager +def add_transform(manager, node, transform, predicate=None): + manager.register_transform(node, transform, predicate) + try: + yield + finally: + manager.unregister_transform(node, transform, predicate) + + +class TestTransforms(unittest.TestCase): + + def setUp(self): + self.transformer = transforms.TransformVisitor() + + def parse_transform(self, code): + module = parse(code, apply_transforms=False) + return self.transformer.visit(module) + + def test_function_inlining_transform(self): + def transform_call(node): + # Let's do some function inlining + inferred = next(node.infer()) + return inferred + + self.transformer.register_transform(nodes.Call, + transform_call) + + module = self.parse_transform(''' + def test(): return 42 + test() #@ + ''') + + self.assertIsInstance(module.body[1], nodes.Expr) + self.assertIsInstance(module.body[1].value, nodes.Const) + self.assertEqual(module.body[1].value.value, 42) + + def test_recursive_transforms_into_astroid_fields(self): + # Test that the transformer walks properly the tree + # by going recursively into the _astroid_fields per each node. + def transform_compare(node): + # Let's check the values of the ops + _, right = node.ops[0] + # Assume they are Consts and they were transformed before + # us. + return nodes.const_factory(node.left.value < right.value) + + def transform_name(node): + # Should be Consts + return next(node.infer()) + + self.transformer.register_transform(nodes.Compare, transform_compare) + self.transformer.register_transform(nodes.Name, transform_name) + + module = self.parse_transform(''' + a = 42 + b = 24 + a < b + ''') + + self.assertIsInstance(module.body[2], nodes.Expr) + self.assertIsInstance(module.body[2].value, nodes.Const) + self.assertFalse(module.body[2].value.value) + + def test_transform_patches_locals(self): + def transform_function(node): + assign = nodes.Assign() + name = nodes.AssignName() + name.name = 'value' + assign.targets = [name] + assign.value = nodes.const_factory(42) + node.body.append(assign) + + self.transformer.register_transform(nodes.FunctionDef, + transform_function) + + module = self.parse_transform(''' + def test(): + pass + ''') + + func = module.body[0] + self.assertEqual(len(func.body), 2) + self.assertIsInstance(func.body[1], nodes.Assign) + self.assertEqual(func.body[1].as_string(), 'value = 42') + + def test_predicates(self): + def transform_call(node): + inferred = next(node.infer()) + return inferred + + def should_inline(node): + return node.func.name.startswith('inlineme') + + self.transformer.register_transform(nodes.Call, + transform_call, + should_inline) + + module = self.parse_transform(''' + def inlineme_1(): + return 24 + def dont_inline_me(): + return 42 + def inlineme_2(): + return 2 + inlineme_1() + dont_inline_me() + inlineme_2() + ''') + values = module.body[-3:] + self.assertIsInstance(values[0], nodes.Expr) + self.assertIsInstance(values[0].value, nodes.Const) + self.assertEqual(values[0].value.value, 24) + self.assertIsInstance(values[1], nodes.Expr) + self.assertIsInstance(values[1].value, nodes.Call) + self.assertIsInstance(values[2], nodes.Expr) + self.assertIsInstance(values[2].value, nodes.Const) + self.assertEqual(values[2].value.value, 2) + + def test_transforms_are_separated(self): + # Test that the transforming is done at a separate + # step, which means that we are not doing inference + # on a partially constructred tree anymore, which was the + # source of crashes in the past when certain inference rules + # were used in a transform. + def transform_function(node): + if node.decorators: + for decorator in node.decorators.nodes: + inferred = next(decorator.infer()) + if inferred.qname() == 'abc.abstractmethod': + return next(node.infer_call_result(node)) + + manager = builder.MANAGER + with add_transform(manager, nodes.FunctionDef, transform_function): + module = builder.parse(''' + import abc + from abc import abstractmethod + + class A(object): + @abc.abstractmethod + def ala(self): + return 24 + + @abstractmethod + def bala(self): + return 42 + ''') + + cls = module['A'] + ala = cls.body[0] + bala = cls.body[1] + self.assertIsInstance(ala, nodes.Const) + self.assertEqual(ala.value, 24) + self.assertIsInstance(bala, nodes.Const) + self.assertEqual(bala.value, 42) + + def test_transforms_are_called_for_builtin_modules(self): + # Test that transforms are called for builtin modules. + def transform_function(node): + name = nodes.AssignName() + name.name = 'value' + node.args.args = [name] + return node + + manager = builder.MANAGER + predicate = lambda node: node.root().name == 'time' + with add_transform(manager, nodes.FunctionDef, + transform_function, predicate): + builder_instance = builder.AstroidBuilder() + module = builder_instance.module_build(time) + + asctime = module['asctime'] + self.assertEqual(len(asctime.args.args), 1) + self.assertIsInstance(asctime.args.args[0], nodes.AssignName) + self.assertEqual(asctime.args.args[0].name, 'value') + + def test_builder_apply_transforms(self): + def transform_function(node): + return nodes.const_factory(42) + + manager = builder.MANAGER + with add_transform(manager, nodes.FunctionDef, transform_function): + astroid_builder = builder.AstroidBuilder(apply_transforms=False) + module = astroid_builder.string_build('''def test(): pass''') + + # The transform wasn't applied. + self.assertIsInstance(module.body[0], nodes.FunctionDef) + + def test_transform_crashes_on_is_subtype_of(self): + # Test that we don't crash when having is_subtype_of + # in a transform, as per issue #188. This happened + # before, when the transforms weren't in their own step. + def transform_class(cls): + if cls.is_subtype_of('django.db.models.base.Model'): + return cls + return cls + + self.transformer.register_transform(nodes.ClassDef, + transform_class) + + self.parse_transform(''' + # Change environ to automatically call putenv() if it exists + import os + putenv = os.putenv + try: + # This will fail if there's no putenv + putenv + except NameError: + pass + else: + import UserDict + ''') + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_utils.py b/pymode/libs/astroid/tests/unittest_utils.py new file mode 100644 index 00000000..ef832252 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_utils.py @@ -0,0 +1,124 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import unittest + +from astroid import builder +from astroid import InferenceError +from astroid import nodes +from astroid import node_classes +from astroid import test_utils +from astroid import util as astroid_util + + +class InferenceUtil(unittest.TestCase): + + def test_not_exclusive(self): + module = builder.parse(""" + x = 10 + for x in range(5): + print (x) + + if x > 0: + print ('#' * x) + """, __name__, __file__) + xass1 = module.locals['x'][0] + assert xass1.lineno == 2 + xnames = [n for n in module.nodes_of_class(nodes.Name) if n.name == 'x'] + assert len(xnames) == 3 + assert xnames[1].lineno == 6 + self.assertEqual(node_classes.are_exclusive(xass1, xnames[1]), False) + self.assertEqual(node_classes.are_exclusive(xass1, xnames[2]), False) + + def test_if(self): + module = builder.parse(''' + if 1: + a = 1 + a = 2 + elif 2: + a = 12 + a = 13 + else: + a = 3 + a = 4 + ''') + a1 = module.locals['a'][0] + a2 = module.locals['a'][1] + a3 = module.locals['a'][2] + a4 = module.locals['a'][3] + a5 = module.locals['a'][4] + a6 = module.locals['a'][5] + self.assertEqual(node_classes.are_exclusive(a1, a2), False) + self.assertEqual(node_classes.are_exclusive(a1, a3), True) + self.assertEqual(node_classes.are_exclusive(a1, a5), True) + self.assertEqual(node_classes.are_exclusive(a3, a5), True) + self.assertEqual(node_classes.are_exclusive(a3, a4), False) + self.assertEqual(node_classes.are_exclusive(a5, a6), False) + + def test_try_except(self): + module = builder.parse(''' + try: + def exclusive_func2(): + "docstring" + except TypeError: + def exclusive_func2(): + "docstring" + except: + def exclusive_func2(): + "docstring" + else: + def exclusive_func2(): + "this one redefine the one defined line 42" + ''') + f1 = module.locals['exclusive_func2'][0] + f2 = module.locals['exclusive_func2'][1] + f3 = module.locals['exclusive_func2'][2] + f4 = module.locals['exclusive_func2'][3] + self.assertEqual(node_classes.are_exclusive(f1, f2), True) + self.assertEqual(node_classes.are_exclusive(f1, f3), True) + self.assertEqual(node_classes.are_exclusive(f1, f4), False) + self.assertEqual(node_classes.are_exclusive(f2, f4), True) + self.assertEqual(node_classes.are_exclusive(f3, f4), True) + self.assertEqual(node_classes.are_exclusive(f3, f2), True) + + self.assertEqual(node_classes.are_exclusive(f2, f1), True) + self.assertEqual(node_classes.are_exclusive(f4, f1), False) + self.assertEqual(node_classes.are_exclusive(f4, f2), True) + + def test_unpack_infer_uninferable_nodes(self): + node = test_utils.extract_node(''' + x = [A] * 1 + f = [x, [A] * 2] + f + ''') + inferred = next(node.infer()) + unpacked = list(node_classes.unpack_infer(inferred)) + self.assertEqual(len(unpacked), 3) + self.assertTrue(all(elt is astroid_util.YES + for elt in unpacked)) + + def test_unpack_infer_empty_tuple(self): + node = test_utils.extract_node(''' + () + ''') + inferred = next(node.infer()) + with self.assertRaises(InferenceError): + list(node_classes.unpack_infer(inferred)) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/transforms.py b/pymode/libs/astroid/transforms.py new file mode 100644 index 00000000..5d8fc91b --- /dev/null +++ b/pymode/libs/astroid/transforms.py @@ -0,0 +1,96 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +import collections +import warnings + + +class TransformVisitor(object): + """A visitor for handling transforms. + + The standard approach of using it is to call + :meth:`~visit` with an *astroid* module and the class + will take care of the rest, walking the tree and running the + transforms for each encountered node. + """ + + def __init__(self): + self.transforms = collections.defaultdict(list) + + def _transform(self, node): + """Call matching transforms for the given node if any and return the + transformed node. + """ + cls = node.__class__ + if cls not in self.transforms: + # no transform registered for this class of node + return node + + transforms = self.transforms[cls] + orig_node = node # copy the reference + for transform_func, predicate in transforms: + if predicate is None or predicate(node): + ret = transform_func(node) + # if the transformation function returns something, it's + # expected to be a replacement for the node + if ret is not None: + if node is not orig_node: + # node has already be modified by some previous + # transformation, warn about it + warnings.warn('node %s substituted multiple times' % node) + node = ret + return node + + def _visit(self, node): + if hasattr(node, '_astroid_fields'): + for field in node._astroid_fields: + value = getattr(node, field) + visited = self._visit_generic(value) + setattr(node, field, visited) + return self._transform(node) + + def _visit_generic(self, node): + if isinstance(node, list): + return [self._visit_generic(child) for child in node] + elif isinstance(node, tuple): + return tuple(self._visit_generic(child) for child in node) + else: + return self._visit(node) + + def register_transform(self, node_class, transform, predicate=None): + """Register `transform(node)` function to be applied on the given + astroid's `node_class` if `predicate` is None or returns true + when called with the node as argument. + + The transform function may return a value which is then used to + substitute the original node in the tree. + """ + self.transforms[node_class].append((transform, predicate)) + + def unregister_transform(self, node_class, transform, predicate=None): + """Unregister the given transform.""" + self.transforms[node_class].remove((transform, predicate)) + + def visit(self, module): + """Walk the given astroid *tree* and transform each encountered node + + Only the nodes which have transforms registered will actually + be replaced or changed. + """ + module.body = [self._visit(child) for child in module.body] + return self._transform(module) diff --git a/pymode/libs/astroid/util.py b/pymode/libs/astroid/util.py new file mode 100644 index 00000000..44e2039d --- /dev/null +++ b/pymode/libs/astroid/util.py @@ -0,0 +1,89 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +# +# The code in this file was originally part of logilab-common, licensed under +# the same license. +import warnings + +from astroid import exceptions + + +def generate_warning(message, warning): + return lambda *args: warnings.warn(message % args, warning, stacklevel=3) + +rename_warning = generate_warning( + "%r is deprecated and will be removed in astroid %.1f, use %r instead", + PendingDeprecationWarning) + +attribute_to_method_warning = generate_warning( + "%s is deprecated and will be removed in astroid %.1f, use the " + "method '%s()' instead.", PendingDeprecationWarning) + +attribute_to_function_warning = generate_warning( + "%s is deprecated and will be removed in astroid %.1f, use the " + "function '%s()' instead.", PendingDeprecationWarning) + +method_to_function_warning = generate_warning( + "%s() is deprecated and will be removed in astroid %.1f, use the " + "function '%s()' instead.", PendingDeprecationWarning) + + +class _Yes(object): + """Special inference object, which is returned when inference fails.""" + def __repr__(self): + return 'YES' + + __str__ = __repr__ + + def __getattribute__(self, name): + if name == 'next': + raise AttributeError('next method should not be called') + if name.startswith('__') and name.endswith('__'): + return super(_Yes, self).__getattribute__(name) + if name == 'accept': + return super(_Yes, self).__getattribute__(name) + return self + + def __call__(self, *args, **kwargs): + return self + + def accept(self, visitor): + func = getattr(visitor, "visit_yes") + return func(self) + + +YES = _Yes() + +def safe_infer(node, context=None): + """Return the inferred value for the given node. + + Return None if inference failed or if there is some ambiguity (more than + one node has been inferred). + """ + try: + inferit = node.infer(context=context) + value = next(inferit) + except exceptions.InferenceError: + return + try: + next(inferit) + return # None if there is ambiguity on the inferred node + except exceptions.InferenceError: + return # there is some kind of ambiguity + except StopIteration: + return value diff --git a/pymode/libs/astroid/utils.py b/pymode/libs/astroid/utils.py deleted file mode 100644 index ae72a92c..00000000 --- a/pymode/libs/astroid/utils.py +++ /dev/null @@ -1,239 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . -"""this module contains some utilities to navigate in the tree or to -extract information from it -""" -from __future__ import print_function - -__docformat__ = "restructuredtext en" - -from astroid.exceptions import AstroidBuildingException -from astroid.builder import parse - - -class ASTWalker(object): - """a walker visiting a tree in preorder, calling on the handler: - - * visit_ on entering a node, where class name is the class of - the node in lower case - - * leave_ on leaving a node, where class name is the class of - the node in lower case - """ - - def __init__(self, handler): - self.handler = handler - self._cache = {} - - def walk(self, node, _done=None): - """walk on the tree from , getting callbacks from handler""" - if _done is None: - _done = set() - if node in _done: - raise AssertionError((id(node), node, node.parent)) - _done.add(node) - self.visit(node) - for child_node in node.get_children(): - self.handler.set_context(node, child_node) - assert child_node is not node - self.walk(child_node, _done) - self.leave(node) - assert node.parent is not node - - def get_callbacks(self, node): - """get callbacks from handler for the visited node""" - klass = node.__class__ - methods = self._cache.get(klass) - if methods is None: - handler = self.handler - kid = klass.__name__.lower() - e_method = getattr(handler, 'visit_%s' % kid, - getattr(handler, 'visit_default', None)) - l_method = getattr(handler, 'leave_%s' % kid, - getattr(handler, 'leave_default', None)) - self._cache[klass] = (e_method, l_method) - else: - e_method, l_method = methods - return e_method, l_method - - def visit(self, node): - """walk on the tree from , getting callbacks from handler""" - method = self.get_callbacks(node)[0] - if method is not None: - method(node) - - def leave(self, node): - """walk on the tree from , getting callbacks from handler""" - method = self.get_callbacks(node)[1] - if method is not None: - method(node) - - -class LocalsVisitor(ASTWalker): - """visit a project by traversing the locals dictionary""" - def __init__(self): - ASTWalker.__init__(self, self) - self._visited = {} - - def visit(self, node): - """launch the visit starting from the given node""" - if node in self._visited: - return - self._visited[node] = 1 # FIXME: use set ? - methods = self.get_callbacks(node) - if methods[0] is not None: - methods[0](node) - if 'locals' in node.__dict__: # skip Instance and other proxy - for local_node in node.values(): - self.visit(local_node) - if methods[1] is not None: - return methods[1](node) - - -def _check_children(node): - """a helper function to check children - parent relations""" - for child in node.get_children(): - ok = False - if child is None: - print("Hm, child of %s is None" % node) - continue - if not hasattr(child, 'parent'): - print(" ERROR: %s has child %s %x with no parent" % ( - node, child, id(child))) - elif not child.parent: - print(" ERROR: %s has child %s %x with parent %r" % ( - node, child, id(child), child.parent)) - elif child.parent is not node: - print(" ERROR: %s %x has child %s %x with wrong parent %s" % ( - node, id(node), child, id(child), child.parent)) - else: - ok = True - if not ok: - print("lines;", node.lineno, child.lineno) - print("of module", node.root(), node.root().name) - raise AstroidBuildingException - _check_children(child) - - -class TreeTester(object): - '''A helper class to see _ast tree and compare with astroid tree - - indent: string for tree indent representation - lineno: bool to tell if we should print the line numbers - - >>> tester = TreeTester('print') - >>> print tester.native_tree_repr() - - - . body = [ - . - . . nl = True - . ] - >>> print tester.astroid_tree_repr() - Module() - body = [ - Print() - dest = - values = [ - ] - ] - ''' - - indent = '. ' - lineno = False - - def __init__(self, sourcecode): - self._string = '' - self.sourcecode = sourcecode - self._ast_node = None - self.build_ast() - - def build_ast(self): - """build the _ast tree from the source code""" - self._ast_node = parse(self.sourcecode) - - def native_tree_repr(self, node=None, indent=''): - """get a nice representation of the _ast tree""" - self._string = '' - if node is None: - node = self._ast_node - self._native_repr_tree(node, indent) - return self._string - - - def _native_repr_tree(self, node, indent, _done=None): - """recursive method for the native tree representation""" - from _ast import Load as _Load, Store as _Store, Del as _Del - from _ast import AST as Node - if _done is None: - _done = set() - if node in _done: - self._string += '\nloop in tree: %r (%s)' % ( - node, getattr(node, 'lineno', None)) - return - _done.add(node) - self._string += '\n' + indent + '<%s>' % node.__class__.__name__ - indent += self.indent - if not hasattr(node, '__dict__'): - self._string += '\n' + self.indent + " ** node has no __dict__ " + str(node) - return - node_dict = node.__dict__ - if hasattr(node, '_attributes'): - for a in node._attributes: - attr = node_dict[a] - if attr is None: - continue - if a in ("lineno", "col_offset") and not self.lineno: - continue - self._string += '\n' + indent + a + " = " + repr(attr) - for field in node._fields or (): - attr = node_dict[field] - if attr is None: - continue - if isinstance(attr, list): - if not attr: - continue - self._string += '\n' + indent + field + ' = [' - for elt in attr: - self._native_repr_tree(elt, indent, _done) - self._string += '\n' + indent + ']' - continue - if isinstance(attr, (_Load, _Store, _Del)): - continue - if isinstance(attr, Node): - self._string += '\n' + indent + field + " = " - self._native_repr_tree(attr, indent, _done) - else: - self._string += '\n' + indent + field + " = " + repr(attr) - - - def build_astroid_tree(self): - """build astroid tree from the _ast tree - """ - from astroid.builder import AstroidBuilder - tree = AstroidBuilder().string_build(self.sourcecode) - return tree - - def astroid_tree_repr(self, ids=False): - """build the astroid tree and return a nice tree representation""" - mod = self.build_astroid_tree() - return mod.repr_tree(ids) - - -__all__ = ('LocalsVisitor', 'ASTWalker',) - diff --git a/pymode/libs/backports.functools_lru_cache-1.3-py3.5-nspkg.pth b/pymode/libs/backports.functools_lru_cache-1.3-py3.5-nspkg.pth new file mode 100644 index 00000000..0b1f79dd --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3-py3.5-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('backports',));ie = os.path.exists(os.path.join(p,'__init__.py'));m = not ie and sys.modules.setdefault('backports', types.ModuleType('backports'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/DESCRIPTION.rst b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..ab650b3c --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/DESCRIPTION.rst @@ -0,0 +1,28 @@ +.. image:: https://img.shields.io/pypi/v/backports.functools_lru_cache.svg + :target: https://pypi.org/project/backports.functools_lru_cache + +.. image:: https://img.shields.io/pypi/pyversions/backports.functools_lru_cache.svg + +.. image:: https://img.shields.io/pypi/dm/backports.functools_lru_cache.svg + +.. image:: https://img.shields.io/travis/jaraco/backports.functools_lru_cache/master.svg + :target: http://travis-ci.org/jaraco/backports.functools_lru_cache + +License is indicated in the project metadata (typically one or more +of the Trove classifiers). For more details, see `this explanation +`_. + +Backport of functools.lru_cache from Python 3.3 as published at `ActiveState +`_. + +Usage +----- + +Consider using this technique for importing the 'lru_cache' function:: + + try: + from functools import lru_cache + except ImportError: + from backports.functools_lru_cache import lru_cache + + diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/INSTALLER b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/METADATA b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/METADATA new file mode 100644 index 00000000..758d5304 --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/METADATA @@ -0,0 +1,44 @@ +Metadata-Version: 2.0 +Name: backports.functools-lru-cache +Version: 1.3 +Summary: backports.functools_lru_cache +Home-page: https://github.com/jaraco/backports.functools_lru_cache +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 + +.. image:: https://img.shields.io/pypi/v/backports.functools_lru_cache.svg + :target: https://pypi.org/project/backports.functools_lru_cache + +.. image:: https://img.shields.io/pypi/pyversions/backports.functools_lru_cache.svg + +.. image:: https://img.shields.io/pypi/dm/backports.functools_lru_cache.svg + +.. image:: https://img.shields.io/travis/jaraco/backports.functools_lru_cache/master.svg + :target: http://travis-ci.org/jaraco/backports.functools_lru_cache + +License is indicated in the project metadata (typically one or more +of the Trove classifiers). For more details, see `this explanation +`_. + +Backport of functools.lru_cache from Python 3.3 as published at `ActiveState +`_. + +Usage +----- + +Consider using this technique for importing the 'lru_cache' function:: + + try: + from functools import lru_cache + except ImportError: + from backports.functools_lru_cache import lru_cache + + diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/RECORD b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/RECORD new file mode 100644 index 00000000..124767c8 --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/RECORD @@ -0,0 +1,11 @@ +backports.functools_lru_cache-1.3-py3.5-nspkg.pth,sha256=JHydVi7598aklfRVCkCh9f5qqKRWgzZ9rAwKAApSu4w,314 +backports/functools_lru_cache.py,sha256=Rz9eeee4g7nSELRnqQCYwf7_i19lGXCZlLzfqa_Zjsk,7317 +backports.functools_lru_cache-1.3.dist-info/DESCRIPTION.rst,sha256=iYa72BvLZDki4qjxP2qLBxLIDop4i1c1SQS-mwkabsU,997 +backports.functools_lru_cache-1.3.dist-info/METADATA,sha256=M3kggZIt4_i_Q0rkatLxYI-9oQTI_vLqVJSGHXgv674,1565 +backports.functools_lru_cache-1.3.dist-info/RECORD,, +backports.functools_lru_cache-1.3.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +backports.functools_lru_cache-1.3.dist-info/metadata.json,sha256=kJgZvBIl8AQCESL4_9mDLCbaEBEW7-Xe0jHuHte_iVU,695 +backports.functools_lru_cache-1.3.dist-info/namespace_packages.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 +backports.functools_lru_cache-1.3.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 +backports.functools_lru_cache-1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +backports/functools_lru_cache.pyc,, diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/WHEEL b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/WHEEL new file mode 100644 index 00000000..8b6dd1b5 --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/metadata.json b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/metadata.json new file mode 100644 index 00000000..e7912716 --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "jaraco@jaraco.com", "name": "Jason R. Coombs", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/jaraco/backports.functools_lru_cache"}}}, "generator": "bdist_wheel (0.29.0)", "metadata_version": "2.0", "name": "backports.functools-lru-cache", "summary": "backports.functools_lru_cache", "version": "1.3"} \ No newline at end of file diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/namespace_packages.txt b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/namespace_packages.txt new file mode 100644 index 00000000..99d2be5b --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/namespace_packages.txt @@ -0,0 +1 @@ +backports diff --git a/pymode/libs/backports.functools_lru_cache-1.3.dist-info/top_level.txt b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/top_level.txt new file mode 100644 index 00000000..99d2be5b --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +backports diff --git a/pymode/libs/backports/configparser/__init__.py b/pymode/libs/backports/configparser/__init__.py new file mode 100644 index 00000000..06d7a085 --- /dev/null +++ b/pymode/libs/backports/configparser/__init__.py @@ -0,0 +1,1390 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Configuration file parser. + +A configuration file consists of sections, lead by a "[section]" header, +and followed by "name: value" entries, with continuations and such in +the style of RFC 822. + +Intrinsic defaults can be specified by passing them into the +ConfigParser constructor as a dictionary. + +class: + +ConfigParser -- responsible for parsing a list of + configuration files, and managing the parsed database. + + methods: + + __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, + delimiters=('=', ':'), comment_prefixes=('#', ';'), + inline_comment_prefixes=None, strict=True, + empty_lines_in_values=True, default_section='DEFAULT', + interpolation=, converters=): + Create the parser. When `defaults' is given, it is initialized into the + dictionary or intrinsic defaults. The keys must be strings, the values + must be appropriate for %()s string interpolation. + + When `dict_type' is given, it will be used to create the dictionary + objects for the list of sections, for the options within a section, and + for the default values. + + When `delimiters' is given, it will be used as the set of substrings + that divide keys from values. + + When `comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in empty lines. Comments can be + indented. + + When `inline_comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in non-empty lines. + + When `strict` is True, the parser won't allow for any section or option + duplicates while reading from a single source (file, string or + dictionary). Default is True. + + When `empty_lines_in_values' is False (default: True), each empty line + marks the end of an option. Otherwise, internal empty lines of + a multiline option are kept as part of the value. + + When `allow_no_value' is True (default: False), options without + values are accepted; the value presented for these is None. + + sections() + Return all the configuration section names, sans DEFAULT. + + has_section(section) + Return whether the given section exists. + + has_option(section, option) + Return whether the given option exists in the given section. + + options(section) + Return list of configuration options for the named section. + + read(filenames, encoding=None) + Read and parse the list of named configuration files, given by + name. A single filename is also allowed. Non-existing files + are ignored. Return list of successfully read files. + + read_file(f, filename=None) + Read and parse one configuration file, given as a file object. + The filename defaults to f.name; it is only used in error + messages (if f has no `name' attribute, the string `' is used). + + read_string(string) + Read configuration from a given string. + + read_dict(dictionary) + Read configuration from a dictionary. Keys are section names, + values are dictionaries with keys and values that should be present + in the section. If the used dictionary type preserves order, sections + and their keys will be added in order. Values are automatically + converted to strings. + + get(section, option, raw=False, vars=None, fallback=_UNSET) + Return a string value for the named option. All % interpolations are + expanded in the return values, based on the defaults passed into the + constructor and the DEFAULT section. Additional substitutions may be + provided using the `vars' argument, which must be a dictionary whose + contents override any pre-existing defaults. If `option' is a key in + `vars', the value from `vars' is used. + + getint(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to an integer. + + getfloat(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a float. + + getboolean(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a boolean (currently case + insensitively defined as 0, false, no, off for False, and 1, true, + yes, on for True). Returns False or True. + + items(section=_UNSET, raw=False, vars=None) + If section is given, return a list of tuples with (name, value) for + each option in the section. Otherwise, return a list of tuples with + (section_name, section_proxy) for each section, including DEFAULTSECT. + + remove_section(section) + Remove the given file section and all its options. + + remove_option(section, option) + Remove the given option from the given section. + + set(section, option, value) + Set the given option. + + write(fp, space_around_delimiters=True) + Write the configuration state in .ini format. If + `space_around_delimiters' is True (the default), delimiters + between keys and values are surrounded by spaces. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from collections import MutableMapping +import functools +import io +import itertools +import re +import sys +import warnings + +from backports.configparser.helpers import OrderedDict as _default_dict +from backports.configparser.helpers import ChainMap as _ChainMap +from backports.configparser.helpers import from_none, open, str, PY2 + +__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError", + "NoOptionError", "InterpolationError", "InterpolationDepthError", + "InterpolationMissingOptionError", "InterpolationSyntaxError", + "ParsingError", "MissingSectionHeaderError", + "ConfigParser", "SafeConfigParser", "RawConfigParser", + "Interpolation", "BasicInterpolation", "ExtendedInterpolation", + "LegacyInterpolation", "SectionProxy", "ConverterMapping", + "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"] + +DEFAULTSECT = "DEFAULT" + +MAX_INTERPOLATION_DEPTH = 10 + + +# exception classes +class Error(Exception): + """Base class for ConfigParser exceptions.""" + + def __init__(self, msg=''): + self.message = msg + Exception.__init__(self, msg) + + def __repr__(self): + return self.message + + __str__ = __repr__ + + +class NoSectionError(Error): + """Raised when no section matches a requested option.""" + + def __init__(self, section): + Error.__init__(self, 'No section: %r' % (section,)) + self.section = section + self.args = (section, ) + + +class DuplicateSectionError(Error): + """Raised when a section is repeated in an input source. + + Possible repetitions that raise this exception are: multiple creation + using the API or in strict parsers when a section is found more than once + in a single input file, string or dictionary. + """ + + def __init__(self, section, source=None, lineno=None): + msg = [repr(section), " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": section ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Section ") + Error.__init__(self, "".join(msg)) + self.section = section + self.source = source + self.lineno = lineno + self.args = (section, source, lineno) + + +class DuplicateOptionError(Error): + """Raised by strict parsers when an option is repeated in an input source. + + Current implementation raises this exception only when an option is found + more than once in a single file, string or dictionary. + """ + + def __init__(self, section, option, source=None, lineno=None): + msg = [repr(option), " in section ", repr(section), + " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": option ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Option ") + Error.__init__(self, "".join(msg)) + self.section = section + self.option = option + self.source = source + self.lineno = lineno + self.args = (section, option, source, lineno) + + +class NoOptionError(Error): + """A requested option was not found.""" + + def __init__(self, option, section): + Error.__init__(self, "No option %r in section: %r" % + (option, section)) + self.option = option + self.section = section + self.args = (option, section) + + +class InterpolationError(Error): + """Base class for interpolation-related exceptions.""" + + def __init__(self, option, section, msg): + Error.__init__(self, msg) + self.option = option + self.section = section + self.args = (option, section, msg) + + +class InterpolationMissingOptionError(InterpolationError): + """A string substitution required a setting which was not available.""" + + def __init__(self, option, section, rawval, reference): + msg = ("Bad value substitution: option {0!r} in section {1!r} contains " + "an interpolation key {2!r} which is not a valid option name. " + "Raw value: {3!r}".format(option, section, reference, rawval)) + InterpolationError.__init__(self, option, section, msg) + self.reference = reference + self.args = (option, section, rawval, reference) + + +class InterpolationSyntaxError(InterpolationError): + """Raised when the source text contains invalid syntax. + + Current implementation raises this exception when the source text into + which substitutions are made does not conform to the required syntax. + """ + + +class InterpolationDepthError(InterpolationError): + """Raised when substitutions are nested too deeply.""" + + def __init__(self, option, section, rawval): + msg = ("Recursion limit exceeded in value substitution: option {0!r} " + "in section {1!r} contains an interpolation key which " + "cannot be substituted in {2} steps. Raw value: {3!r}" + "".format(option, section, MAX_INTERPOLATION_DEPTH, + rawval)) + InterpolationError.__init__(self, option, section, msg) + self.args = (option, section, rawval) + + +class ParsingError(Error): + """Raised when a configuration file does not follow legal syntax.""" + + def __init__(self, source=None, filename=None): + # Exactly one of `source'/`filename' arguments has to be given. + # `filename' kept for compatibility. + if filename and source: + raise ValueError("Cannot specify both `filename' and `source'. " + "Use `source'.") + elif not filename and not source: + raise ValueError("Required argument `source' not given.") + elif filename: + source = filename + Error.__init__(self, 'Source contains parsing errors: %r' % source) + self.source = source + self.errors = [] + self.args = (source, ) + + @property + def filename(self): + """Deprecated, use `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + return self.source + + @filename.setter + def filename(self, value): + """Deprecated, user `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + self.source = value + + def append(self, lineno, line): + self.errors.append((lineno, line)) + self.message += '\n\t[line %2d]: %s' % (lineno, line) + + +class MissingSectionHeaderError(ParsingError): + """Raised when a key-value pair is found before any section header.""" + + def __init__(self, filename, lineno, line): + Error.__init__( + self, + 'File contains no section headers.\nfile: %r, line: %d\n%r' % + (filename, lineno, line)) + self.source = filename + self.lineno = lineno + self.line = line + self.args = (filename, lineno, line) + + +# Used in parser getters to indicate the default behaviour when a specific +# option is not found it to raise an exception. Created to enable `None' as +# a valid fallback value. +_UNSET = object() + + +class Interpolation(object): + """Dummy interpolation that passes the value through with no changes.""" + + def before_get(self, parser, section, option, value, defaults): + return value + + def before_set(self, parser, section, option, value): + return value + + def before_read(self, parser, section, option, value): + return value + + def before_write(self, parser, section, option, value): + return value + + +class BasicInterpolation(Interpolation): + """Interpolation as implemented in the classic ConfigParser. + + The option values can contain format strings which refer to other values in + the same section, or values in the special default section. + + For example: + + something: %(dir)s/whatever + + would resolve the "%(dir)s" to the value of dir. All reference + expansions are done late, on demand. If a user needs to use a bare % in + a configuration file, she can escape it by writing %%. Other % usage + is considered a user error and raises `InterpolationSyntaxError'.""" + + _KEYCRE = re.compile(r"%\(([^)]+)\)s") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('%%', '') # escaped percent signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '%' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('%'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("%") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "%": + accum.append("%") + rest = rest[2:] + elif c == "(": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + var = parser.optionxform(m.group(1)) + rest = rest[m.end():] + try: + v = map[var] + except KeyError: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, var)) + if "%" in v: + self._interpolate_some(parser, option, accum, v, + section, map, depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'%%' must be followed by '%%' or '(', " + "found: %r" % (rest,)) + + +class ExtendedInterpolation(Interpolation): + """Advanced variant of interpolation, supports the syntax used by + `zc.buildout'. Enables interpolation between sections.""" + + _KEYCRE = re.compile(r"\$\{([^}]+)\}") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('$$', '') # escaped dollar signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '$' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('$'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("$") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "$": + accum.append("$") + rest = rest[2:] + elif c == "{": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + path = m.group(1).split(':') + rest = rest[m.end():] + sect = section + opt = option + try: + if len(path) == 1: + opt = parser.optionxform(path[0]) + v = map[opt] + elif len(path) == 2: + sect = path[0] + opt = parser.optionxform(path[1]) + v = parser.get(sect, opt, raw=True) + else: + raise InterpolationSyntaxError( + option, section, + "More than one ':' found: %r" % (rest,)) + except (KeyError, NoSectionError, NoOptionError): + raise from_none(InterpolationMissingOptionError( + option, section, rawval, ":".join(path))) + if "$" in v: + self._interpolate_some(parser, opt, accum, v, sect, + dict(parser.items(sect, raw=True)), + depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'$' must be followed by '$' or '{', " + "found: %r" % (rest,)) + + +class LegacyInterpolation(Interpolation): + """Deprecated interpolation used in old versions of ConfigParser. + Use BasicInterpolation or ExtendedInterpolation instead.""" + + _KEYCRE = re.compile(r"%\(([^)]*)\)s|.") + + def before_get(self, parser, section, option, value, vars): + rawval = value + depth = MAX_INTERPOLATION_DEPTH + while depth: # Loop through this until it's done + depth -= 1 + if value and "%(" in value: + replace = functools.partial(self._interpolation_replace, + parser=parser) + value = self._KEYCRE.sub(replace, value) + try: + value = value % vars + except KeyError as e: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, e.args[0])) + else: + break + if value and "%(" in value: + raise InterpolationDepthError(option, section, rawval) + return value + + def before_set(self, parser, section, option, value): + return value + + @staticmethod + def _interpolation_replace(match, parser): + s = match.group(1) + if s is None: + return match.group() + else: + return "%%(%s)s" % parser.optionxform(s) + + +class RawConfigParser(MutableMapping): + """ConfigParser that does not do interpolation.""" + + # Regular expressions for parsing section headers and options + _SECT_TMPL = r""" + \[ # [ + (?P
[^]]+) # very permissive! + \] # ] + """ + _OPT_TMPL = r""" + (?P