diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 00000000..3b087e6f --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,11 @@ +[bumpversion] +commit = True +current_version = 0.9.2 +files = plugin/pymode.vim +tag = True +tag_name = {new_version} + +[bumpversion:file:doc/pymode.txt] +search = Version: {current_version} +replace = Version: {new_version} + diff --git a/.gitignore b/.gitignore index 02e750e1..f5674a78 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,17 @@ -*.pyc +*.py[cod] +*.sw? +*~ +.DS_Store +.bundle +.ropeproject +.vim-flavor +.vimrc +Gemfile.lock +VimFlavor.lock +_ +build tags +test.py todo.txt -.ropeproject +vendor +vim.py diff --git a/.ruby-gemset b/.ruby-gemset new file mode 100644 index 00000000..5ded393e --- /dev/null +++ b/.ruby-gemset @@ -0,0 +1 @@ +vim-flavor diff --git a/.ruby-version b/.ruby-version new file mode 100644 index 00000000..67b8bc0d --- /dev/null +++ b/.ruby-version @@ -0,0 +1 @@ +ruby-1.9.3 diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..e10ed9f1 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,6 @@ +language: ruby +python: "2.7" +rvm: + - 1.9.3 +script: + - make travis diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 00000000..cc3de277 --- /dev/null +++ b/AUTHORS @@ -0,0 +1,64 @@ +Maintainers: + +* Kirill Klenov +* Bryce Guinta (https://github.com/brycepg) + + +Contributors: + +* Alvin Francis (http://github.com/alvinfrancis); +* Andriy Kohut (https://github.com/andriykohut) +* Anler Hp (http://github.com/ikame); +* Anton Parkhomenko (http://github.com/chuwy); +* Ashley Hewson (http://github.com/ashleyh); +* Benjamin Ruston (http://github.com/bruston); +* Boris Filippov (http://github.com/frenzykryger); +* Brad Mease (http://github.com/bmease) +* Brendan Maguire (https://github.com/brendanmaguire) +* Daniel Hahler (http://github.com/blueyed) +* David Vogt (http://github.com/winged); +* Denis Kasak (http://github.com/dkasak); +* Dimitrios Semitsoglou-Tsiapos (https://github.com/dset0x); +* Dirk Wallenstein (http://github.com/dirkwallenstein); +* Florent Xicluna (http://github.com/florentx); +* Fredrik Henrysson (http://github.com/fhenrysson); +* Igor Guerrero (http://github.com/igorgue); +* Jacob Niehus (https://github.com/wilywampa) +* Jason Harvey (http://github.com/alienth) +* Jay Rainey (https://github.com/jawrainey) +* Jonathan McCall (http://github.com/Jonnymcc); +* Kevin Deldycke (http://github.com/kdeldycke); +* Kurtis Rader (https://github.com/krader1961); +* Lawrence Akka (https://github.com/lawrenceakka); +* Lowe Thiderman (http://github.com/thiderman); +* Martin Brochhaus (http://github.com/mbrochh); +* Matt Dodge (https://github.com/mattdodge); +* Matthew Moses (http://github.com/mlmoses); +* Maxim (https://github.com/mpyatishev); +* Mel Boyce (http://github.com/syngin); +* Mohammed (http://github.com/mbadran); +* Naoya Inada (http://github.com/naoina); +* Nate Zhang (https://github.com/natezhang93); +* Paweł Korzeniewski (https://github.com/korzeniewskipl); +* Pedro Algarvio (http://github.com/s0undt3ch); +* Phillip Cloud (http://github.com/cpcloud); +* Piet Delport (http://github.com/pjdelport); +* Robert David Grant (http://github.com/bgrant); +* Robin Schneider (https://github.com/ypid); +* Ronald Andreu Kaiser (http://github.com/cathoderay);; +* Samir Benmendil (https://github.com/Ram-Z); +* Sorin Ionescu (sorin-ionescu); +* Steve Losh (http://github.com/sjl); +* Tommy Allen (https://github.com/tweekmonster); +* Tony Narlock (https://github.com/tony); +* Tyler Fenby (https://github.com/TFenby); +* Vincent Driessen (https://github.com/nvie); +* Wang Feng (https://github.com/mapler); +* Wayne Ye (https://github.com/WayneYe); +* Wes Turner (https://github.com/westurner); +* bendavis78 (https://github.com/bendavis78); +* fwuzju (https://github.com/fwuzju); +* lee (https://github.com/loyalpartner); +* nixon (https://github.com/nixon); +* sphaugh (https://github.com/sphaugh); +* tramchamploo (https://github.com/tramchamploo); diff --git a/COPYING b/COPYING new file mode 100644 index 00000000..65c5ca88 --- /dev/null +++ b/COPYING @@ -0,0 +1,165 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/Changelog.rst b/Changelog.rst index 95642d5b..e396eb69 100644 --- a/Changelog.rst +++ b/Changelog.rst @@ -1,6 +1,274 @@ Changelog ========= +* Pylama updated to version 5.0.5 +* Rope libs updated +* Add wdb to debugger list in breakpoint cmd +* Add 'pymode_options_max_line_length' option +* Add ability to set related checker options `:help pymode-lint-options` + Options added: 'pymode_lint_options_pep8', 'pymode_lint_options_pep257', + 'pymode_lint_options_mccabe', 'pymode_lint_options_pyflakes', + 'pymode_lint_options_pylint' +* Highlight comments inside class/function arg lists +* Don't fold single line def +* Don't skip a line when the first docstring contains text +* Add Python documentation vertical display option +* Rope: correct refactoring function calls + + +## 2014-06-11 0.8.1 +------------------- +* Pylama updated to version 3.3.2 +* Get fold's expression symbol from &fillchars; +* Fixed error when setting g:pymode_breakpoint_cmd (expobrain); +* Fixed code running; +* Ability to override rope project root and .ropeproject folder +* Added path argument to `PymodeRopeNewProject` which skips prompt +* Disable `pymode_rope_lookup_project` by default +* Options added: + 'pymode_rope_project_root', 'pymode_rope_ropefolder' + + +## 2013-12-04 0.7.8b +-------------------- + * Update indentation support; + * Python3 support; + * Removed pymode modeline support; + * Disabled async code checking support; + * Options changes: + 'pymode_doc_key' -> 'pymode_doc_bind' + 'pymode_run_key' -> 'pymode_run_bind' + 'pymode_breakpoint_key' -> 'pymode_breakpoint_bind' + 'pymode_breakpoint_template' -> 'pymode_breakpoint_cmd' + 'pymode_lint_write' -> 'pymode_lint_on_write' + 'pymode_lint_onfly' -> 'pymode_lint_on_fly' + 'pymode_lint_checker' -> 'pymode_lint_checkers' + 'pymode_lint_minheight' -> 'pymode_quickfix_minheight' + 'pymode_lint_maxheight' -> 'pymode_quickfix_maxheight' + 'pymode_rope_autocomplete_map' -> 'pymode_rope_completion_bind' + 'pymode_rope_enable_autoimport' -> 'pymode_rope_autoimport' + + * Options removed: + + 'pymode_lint_hold', 'pymode_lint_config', 'pymode_lint_jump', + 'pymode_lint_signs_always_visible', 'pymode_rope_extended_complete', + 'pymode_rope_auto_project', 'pymode_rope_autoimport_generate', + 'pymode_rope_autoimport_underlines', 'pymode_rope_codeassist_maxfixes', + 'pymode_rope_sorted_completions', 'pymode_rope_extended_complete', + 'pymode_rope_confirm_saving', 'pymode_rope_global_prefix', + 'pymode_rope_local_prefix', 'pymode_rope_vim_completion', + 'pymode_rope_guess_project', 'pymode_rope_goto_def_newwin', + 'pymode_rope_always_show_complete_menu' + + * Options added: + 'pymode_rope_regenerate_on_write', 'pymode_rope_completion', + 'pymode_rope_complete_on_dot', 'pymode_lint_sort', + 'pymode_rope_lookup_project', 'pymode_lint_unmodified' + + * Commands added: + 'PymodeVirtualenv' + + * Commands changed: + 'PyDoc' -> 'PymodeDoc' + 'Pyrun' -> 'PymodeRun' + 'PyLintToggle' -> 'PymodeLintToggle' + 'PyLint' -> 'PymodeLint' + 'PyLintAuto' -> 'PymodeLintAuto' + 'RopeOpenProject' -> 'PymodeRopeNewProject' + 'RopeUndo' -> 'PymodeRopeUndo' + 'RopeRedo' -> 'PymodeRopeRedo' + 'RopeRenameCurrentModule' -> 'PymodeRopeRenameModule' + 'RopeModuleToPackage' -> 'PymodeRopeModuleToPackage' + 'RopeGenerateAutoimportCache' -> 'PymodeRopeRegenerate' + 'RopeOrgamizeImports' -> 'PymodeRopeAutoImport' + + * Commands removed: + 'PyLintCheckerToggle', 'RopeCloseProject', 'RopeProjectConfig', + 'RopeRename', 'RopeCreate<...>', 'RopeWriteProject', 'RopeRename', + 'RopeExtractVariable', 'RopeExtractMethod', 'RopeInline', 'RopeMove', + 'RopeRestructure', 'RopeUseFunction', 'RopeIntroduceFactory', + 'RopeChangeSignature', 'RopeMoveCurrentModule', + 'RopeGenerate<...>', 'RopeAnalizeModule', 'RopeAutoImport', + + +## 2013-10-29 0.6.19 +-------------------- +* Added `g:pymode_rope_autocomplete_map` option; +* Removed `g:pymode_rope_map_space` option; +* Added PEP257 checker; +* Support 'pudb' in breakpoints; +* Pyrun can now operate on a range of lines, and does not need to save (c) lawrenceakka +* Update pylama to version 1.5.0 +* Add a set of `g:pymode_lint_*_symbol` options (c) kdeldycke; +* Support virtualenv for python3 (c) mlmoses; + +## 2013-05-15 0.6.18 +-------------------- +* Fixed autopep8 (`PyLintAuto`) command; +* Fix error on non-ascii characters in docstrings; +* Update python syntax; + +## 2013-05-03 0.6.17 +-------------------- +* Update `Pylint` to version 0.28.0; +* Update `pyflakes` to version 0.7.3; +* Fixed `lint_ignore` options bug; +* Fixed encoding problems when code running; + +## 2013-04-26 0.6.16 +-------------------- +* Improvement folding (thanks @alvinfrancis); + +## 2013-04-01 0.6.15 +-------------------- +* Bugfix release + +## 2013-03-16 0.6.14 +-------------------- +* Update `PEP8` to version 1.4.5; +* Update `Pylint` to version 0.27.0; +* Update `pyflakes` to version 0.6.1; +* Update `autopep8` to version 0.8.7; +* Fix breakpoint definition; +* Update python syntax; +* Fixed run-time error when output non-ascii in multibyte locale; +* Move initialization into ftplugin as it is python specific; +* Pyrex (Cython) files support; +* Support `raw_input` in run python code; + +## 2012-09-07 0.6.10 +-------------------- +* Dont raise an exception when Logger has no message handler (c) nixon +* Improve performance of white space removal (c) Dave Smith +* Improve ropemode support (c) s0undt3ch +* Add `g:pymode_updatetime` option +* Update autopep8 to version 0.8.1 + +## 2012-09-07 0.6.9 +------------------- +* Update autopep8 +* Improve pymode#troubleshooting#Test() + +## 2012-09-06 0.6.8 +------------------- +* Add PEP8 indentation ":help 'pymode_indent'" + +## 2012-08-15 0.6.7 +------------------- +* Fix documentation. Thanks (c) bgrant; +* Fix pymode "async queue" support. + +## 2012-08-02 0.6.6 +------------------- +* Updated Pep8 to version 1.3.3 +* Updated Pylint to version 0.25.2 +* Fixed virtualenv support for windows users +* Added pymode modeline ':help PythonModeModeline' +* Added diagnostic tool ':call pymode#troubleshooting#Test()' +* Added `PyLintAuto` command ':help PyLintAuto' +* Code checking is async operation now +* More, more fast the pymode folding +* Repaired execution of python code + +## 2012-05-24 0.6.4 +------------------- +* Add 'pymode_paths' option +* Rope updated to version 0.9.4 + +## 2012-04-18 0.6.3 +------------------- +* Fix pydocs integration + +## 2012-04-10 0.6.2 +------------------- +* Fix pymode_run for "unnamed" clipboard +* Add 'pymode_lint_mccabe_complexity' option +* Update Pep8 to version 1.0.1 +* Warning! Change 'pymode_rope_goto_def_newwin' option + for open "goto definition" in new window, set it to 'new' or 'vnew' + for horizontally or vertically split + If you use default behaviour (in the same buffer), not changes needed. + +## 2012-03-13 0.6.0 +------------------- +* Add 'pymode_lint_hold' option +* Improve pymode loading speed +* Add pep8, mccabe lint checkers +* Now g:pymode_lint_checker can have many values + Ex. "pep8,pyflakes,mccabe" +* Add 'pymode_lint_ignore' and 'pymode_lint_select' options +* Fix rope keys +* Fix python motion in visual mode +* Add folding 'pymode_folding' +* Warning: 'pymode_lint_checker' now set to 'pyflakes,pep8,mccabe' by default + +## 2012-02-12 0.5.8 +------------------- +* Fix pylint for Windows users +* Python documentation search running from Vim (delete g:pydoc option) +* Python code execution running from Vim (delete g:python option) + +## 2012-02-11 0.5.7 +------------------- +* Fix 'g:pymode_lint_message' mode error +* Fix breakpoints +* Fix python paths and virtualenv detection + +## 2012-02-06 0.5.6 +------------------- +* Fix 'g:pymode_syntax' option +* Show error message in bottom part of screen + see 'g:pymode_lint_message' +* Fix pylint for windows users +* Fix breakpoint command (Use pdb when idpb not installed) + +## 2012-01-17 0.5.5 +------------------- +* Add a sign for info messages from pylint. + (c) Fredrik Henrysson +* Change motion keys: vic - viC, dam - daM and etc +* Add 'g:pymode_lint_onfly' option + +## 2012-01-09 0.5.3 +------------------- +* Prevent the configuration from breaking python-mode + (c) Dirk Wallenstein + +## 2012-01-08 0.5.2 +------------------- +* Fix ropeomnicompletion +* Add preview documentation + +## 2012-01-06 0.5.1 +------------------- +* Happy new year! +* Objects and motion fixes + +## 2011-11-30 0.5.0 +------------------- +* Add python objects and motions (beta) + :h pymode_motion + +## 2011-11-27 0.4.8 +------------------- +* Add `PyLintWindowToggle` command +* Fix some bugs + +## 2011-11-23 0.4.6 +------------------- +* Enable all syntax highlighting + For old settings set in your vimrc: + let g:pymode_syntax_builtin_objs = 0 + let g:pymode_syntax_builtin_funcs = 0 + +* Change namespace of syntax variables + See README + +## 2011-11-18 0.4.5 +------------------- +* Add 'g:pymode_syntax' option +* Highlight 'self' keyword + ## 2011-11-16 0.4.4 ------------------- * Minor fixes @@ -34,7 +302,6 @@ Changelog * Add 'g:pymode_rope_always_show_complete_menu' option * Some pylint fixes - ## 2011-10-25 0.3.0 ------------------- * Add g:pymode_lint_minheight and g:pymode_lint_maxheight diff --git a/Gemfile b/Gemfile new file mode 100644 index 00000000..a87f4e1a --- /dev/null +++ b/Gemfile @@ -0,0 +1,3 @@ +source 'https://rubygems.org' + +gem 'vim-flavor', '~> 1.1' diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..dac3a287 --- /dev/null +++ b/Makefile @@ -0,0 +1,95 @@ +PYMODE = $(CURDIR)/pymode +LIBS = $(PYMODE)/libs +PYLAMA = $(LIBS)/pylama + +.PHONY: clean +clean: + find $(CURDIR) -name "*.pyc" -delete + rm -rf $(CURDIR)/build + rm -rf *.deb + +VERSION?=minor +# target: release - Bump version +release: + git fetch origin + git checkout master + git rebase + git merge develop + bumpversion $(VERSION) + git checkout develop + git rebase + git merge master + git push origin develop master + git push --tags + +.PHONY: minor +minor: release + +.PHONY: patch +patch: + make release VERSION=patch + +.PHONY: major +major: + make release VERSION=major + +# Temporary disable rope tests on Travis +.PHONY: travis +travis: + rake test + +.PHONY: test t +test: + bundle install + rm -rf $(CURDIR)/.ropeproject + rake test +t: test + +.PHONY: pylama +pylama: + rm -rf $(PYLAMA) + make $(PYLAMA) + make $(PYLAMA)/lint/pylama_pylint + @pip install --upgrade --force-reinstall --target=$(LIBS) pydocstyle + @pip install --upgrade --force-reinstall --target=$(LIBS) pycodestyle + @pip install --upgrade --force-reinstall --target=$(LIBS) pyflakes + @pip install --upgrade --force-reinstall --target=$(LIBS) mccabe + @find $(LIBS)/*.dist-info | xargs rm -rf + +.PHONY: rope +rope: + @git clone https://github.com/python-rope/rope.git $(CURDIR)/_/rope + @rm -rf $(CURDIR)/pymode/libs/rope + @cp -r $(CURDIR)/_/rope/rope $(CURDIR)/pymode/libs/. + +$(PYLAMA): + cp -r $$PRJDIR/pylama/pylama $(PYLAMA) + +$(PYLAMA)/lint/pylama_pylint: + cp -r $$PRJDIR/pylama/plugins/pylama_pylint/pylama_pylint/ $(PYLAMA)/lint/pylama_pylint + +$(CURDIR)/build: + mkdir -p $(CURDIR)/build/usr/share/vim/addons + mkdir -p $(CURDIR)/build/usr/share/vim/registry + cp -r after autoload doc ftplugin plugin pymode syntax $(CURDIR)/build/usr/share/vim/addons/. + cp -r python-mode.yaml $(CURDIR)/build/usr/share/vim/registry/. + +PACKAGE_VERSION?=$(shell git describe --tags `git rev-list master --tags --max-count=1`) +PACKAGE_NAME="vim-python-mode" +PACKAGE_MAINTAINER="Kirill Klenov " +PACKAGE_URL=http://github.com/klen/python-mode +deb: clean $(CURDIR)/build + @fpm -s dir -t deb -a all \ + -n $(PACKAGE_NAME) \ + -v $(PACKAGE_VERSION) \ + -m $(PACKAGE_MAINTAINER) \ + --url $(PACKAGE_URL) \ + --license "GNU lesser general public license" \ + --description "Vim-Swissknife for python" \ + --deb-user root \ + --deb-group root \ + -C $(CURDIR)/build \ + -d "python2.7" \ + -d "vim-addon-manager" \ + usr + @mv *.deb ~/Dropbox/projects/deb/load diff --git a/README.rst b/README.rst index ee0136e3..cf3be659 100644 --- a/README.rst +++ b/README.rst @@ -1,24 +1,74 @@ -Python-mode, Python in VIM -########################## - -Python-mode is a vim plugin that allows you to use the pylint_, rope_, pydoc_, pyflakes_ libraries in vim to provide -features like python code looking for bugs, refactoring and some other useful things. - -This plugin allow you create python code in vim very easily. -There is no need to install the pylint_, rope_ or any used python library on your system. - -- Highlight syntax errors -- Highlight and auto fix unused imports -- Strong code completion -- Code refactoring -- Python documentation -- Run python code -- Go to definition -- Powerful customization -- Virtualenv support -- And more... +|logo| Python-mode, Python in VIM +################################# + +.. image:: https://travis-ci.org/python-mode/python-mode.png?branch=develop + :target: https://travis-ci.org/python-mode/python-mode + + +------------------------------------------------------------------------------- + +

+ +

+ +***Important***: From 2017-11-19 onwards python-mode uses submodules instead of +hard coding 3rd party libraries into its codebase. Please issue the command: +`git submodule update --init --recursive` +inside your python-mode folder. + +If you are a new user please clone the repos using the recursive flag: +`git clone --recursive https://github.com/python-mode/python-mode` + +For clean and straightforward install instructions, please visit: +https://github.com/python-mode/python-mode#how-to-install + +------------------------------------------------------------------------------- + +*The project needs contributors* + +----- + +| +| Src: https://github.com/python-mode/python-mode +| Homepage: https://klen.github.io/python-mode/ +| Docs: https://github.com/python-mode/python-mode/blob/develop/doc/pymode.txt +| -See screencast here: http://t.co/3b0bzeXA (sorry for quality, this is my first screencast) +Python-mode is a vim plugin that helps you to create python code very quickly +by utilizing libraries including +`pylint`_, `rope`_, pydoc_, `pyflakes`_, `pep8`_, `autopep8`_, +`pep257`_ and `mccabe`_ +for features like static analysis, refactoring, folding, completion, +documentation, and more. + +The plugin contains all you need to develop python applications in Vim. + +There is no need to install `pylint`_, `rope`_ +or any other `Python Libraries`_ on your system. + +- Support Python version 2.6+ and 3.2+ +- Syntax highlighting +- Virtualenv support +- Run python code (``r``) +- Add/remove breakpoints (``b``) +- Improved Python indentation +- Python folding +- Python motions and operators (``]]``, ``3[[``, ``]]M``, ``vaC``, ``viM``, + ``daC``, ``ciM``, ...) +- Code checking (pylint_, pyflakes_, pylama_, ...) that can be run + simultaneously (``:PymodeLint``) +- Autofix PEP8 errors (``:PymodeLintAuto``) +- Search in python documentation (``K``) +- Code refactoring (rope_) +- Strong code completion (rope_) +- Go to definition (``g`` for `:RopeGotoDefinition`) +- And more, more ... + +See (very old) screencast here: http://www.youtube.com/watch?v=67OZNp9Z0CQ +(sorry for quality, this is my first screencast) Another old presentation here: +http://www.youtube.com/watch?v=YhqsjUUHj6g + +**To read python-mode documentation in Vim, see** ``:help pymode`` .. contents:: @@ -27,24 +77,23 @@ See screencast here: http://t.co/3b0bzeXA (sorry for quality, this is my first s Requirements ============ -- VIM >= 7.0 with python support - (also ``--with-features=big`` if you want use g:pymode_lint_signs) - +- VIM >= 7.3 (mostly features needed `+python` or `+python3` support) + (also ``--with-features=big`` if you want ``g:pymode_lint_signs``) How to install ============== - -Using pathogen_ (recomended) +Using pathogen (recommended) ---------------------------- :: % cd ~/.vim % mkdir -p bundle && cd bundle - % git clone git://github.com/klen/python-mode.git + % git clone https://github.com/python-mode/python-mode.git -- Enable pathogen_ in your ``~/.vimrc``: :: +- Enable `pathogen `_ + in your ``~/.vimrc``: :: " Pathogen load filetype off @@ -60,8 +109,8 @@ Manually -------- :: - % git clone git://github.com/klen/python-mode.git - % cd python-mode.vim + % git clone https://github.com/python-mode/python-mode.git + % cd python-mode % cp -R * ~/.vim Then rebuild **helptags** in vim:: @@ -69,277 +118,266 @@ Then rebuild **helptags** in vim:: :helptags ~/.vim/doc/ -.. note:: **filetype-plugin** (``:help filetype-plugin-on``) and **filetype-indent** (``:help filetype-indent-on``) - must be enabled for use python-mode. - - -Settings -======== - -.. note:: Also you can see vim help. ``:help PythonModeOptions`` - -To change this settings, edit your ``~/.vimrc``: :: +.. note:: **filetype-plugin** (``:help filetype-plugin-on``) and + **filetype-indent** (``:help filetype-indent-on``) + must be enabled to use python-mode. - " Disable pylint checking every save - let g:pymode_lint_write = 0 - " Set key 'R' for run python code - let g:pymode_run_key = 'R' - - -Show documentation ------------------- - -Default values: :: - - " Load show documentation plugin - let g:pymode_doc = 1 - - " Key for show python documentation - let g:pymode_doc_key = 'K' - - " Executable command for documentation search - let g:pydoc = 'pydoc' - - -Run python code +Debian packages --------------- +|Repository URL: https://klen.github.io/python-mode/deb/ -Default values: :: - - " Load run code plugin - let g:pymode_run = 1 - - " Key for run python code - let g:pymode_run_key = 'r' - +Install with commands: -Code checking -------------- - -Default values: :: - - " Load pylint code plugin - let g:pymode_lint = 1 - - " Switch pylint or pyflakes code checker - " values (pylint, pyflakes) - let g:pymode_lint_checker = "pylint" - - " Pylint configuration file - " If file not found use 'pylintrc' from python-mode plugin directory - let g:pymode_lint_config = "$HOME/.pylintrc" - - " Check code every save - let g:pymode_lint_write = 1 - - " Auto open cwindow if errors be finded - let g:pymode_lint_cwindow = 1 - - " Auto jump on first error - let g:pymode_lint_jump = 0 - - " Place error signs - let g:pymode_lint_signs = 1 - - " Minimal height of pylint error window - let g:pymode_lint_minheight = 3 - - " Maximal height of pylint error window - let g:pymode_lint_maxheight = 6 +:: + add-apt-repository https://klen.github.io/python-mode/deb main + apt-get update + apt-get install vim-python-mode -.. note:: - Pylint options (ex. disable messages) may be defined in ``$HOME/pylint.rc`` - See pylint documentation: http://pylint-messages.wikidot.com/all-codes +If you are getting the message: "The following signatures couldn't be verified because the public key is not available": :: + apt-key adv --keyserver keyserver.ubuntu.com --recv-keys B5DF65307000E266 -Rope refactoring library ------------------------- +`vim-python-mode` using `vim-addons`, so after installation just enable +`python-mode` with command: :: -Default values: :: + vim-addons install python-mode - " Load rope plugin - let g:pymode_rope = 1 - " Auto create and open ropeproject - let g:pymode_rope_auto_project = 1 +Troubleshooting +=============== - " Enable autoimport - let g:pymode_rope_enable_autoimport = 1 +If your python-mode doesn't work: - " Auto generate global cache - let g:pymode_rope_autoimport_generate = 1 +1. Load Vim with only python-mode enabled (use `debug.vim` from pymode): :: - let g:pymode_rope_autoimport_underlineds = 0 + vim -u /debug.vim - let g:pymode_rope_codeassist_maxfixes = 10 +And try to repeat your case. If no error occurs, seems like problem isn't in the +plugin. - let g:pymode_rope_sorted_completions = 1 +2. Type `:PymodeTroubleshooting` - let g:pymode_rope_extended_complete = 1 +And fix any warnings or copy the output and send it to me. (For example, by +creating a `new github issue `_ +if one does not already exist for the problem). - let g:pymode_rope_autoimport_modules = ["os","shutil","datetime"]) - let g:pymode_rope_confirm_saving = 1 +Customization +============= - let g:pymode_rope_global_prefix = "p" +You can override the default key bindings by redefining them in your `.vimrc`, for example: :: - let g:pymode_rope_local_prefix = "r" + " Override go-to.definition key shortcut to Ctrl-] + let g:pymode_rope_goto_definition_bind = "" - let g:pymode_rope_vim_completion = 1 + " Override run current python file key shortcut to Ctrl-Shift-e + let g:pymode_run_bind = "" - let g:pymode_rope_guess_project = 1 + " Override view python doc key shortcut to Ctrl-Shift-d + let g:pymode_doc_bind = "" - let g:pymode_rope_goto_def_newwin = 0 - let g:pymode_rope_always_show_complete_menu = 0 +Frequent Problems +================= +Read this section before opening an issue on the tracker. -Other stuff ------------ +Python 3 Syntax +--------------- -Default values: :: +By default python-mode uses python 2 syntax checking. To enable python 3 +syntax checking (e.g. for async) add:: - " Load breakpoints plugin - let g:pymode_breakpoint = 1 + let g:pymode_python = 'python3' - " Key for set/unset breakpoint - let g:pymode_breakpoint_key = 'b' +To your vimrc or exrc file - " Autoremove unused whitespaces - let g:pymode_utils_whitespaces = 1 - " Auto fix vim python paths if virtualenv enabled - let g:pymode_virtualenv = 1 +Documentation +============= - " Set default pymode python indent options - let g:pymode_options_indent = 1 +Documentation is available in your vim ``:help pymode`` - " Set default pymode python fold options - let g:pymode_options_fold = 1 - " Set default pymode python other options - let g:pymode_options_other = 1 +Bugtracker +=========== - " Enable pymode's custom syntax highlighting - let g:pymode_syntax = 1 +If you have any suggestions, bug reports or +annoyances please report them to the issue tracker +at https://github.com/python-mode/python-mode/issues -Default keys +Contributing ============ -.. note:: Also you can see vim help ``:help PythonModeKeys`` - -============== ============= -Keys Command -============== ============= -**K** Show python docs --------------- ------------- -**** Rope autocomplete --------------- ------------- -**r** Run python --------------- ------------- -**b** Set, unset breakpoint -============== ============= - -.. note:: See also ``:help ropevim.txt`` - - -Commands -======== - -.. note:: Also you can see vim help ``:help PythonModeCommands`` - -==================== ============= -Command Description -==================== ============= -:Pydoc Show python documentation --------------------- ------------- -PyLintToggle Enable, disable pylint --------------------- ------------- -PyLintCheckerToggle Toggle code checker (pylint, pyflakes) --------------------- ------------- -PyLint Check current buffer --------------------- ------------- -Pyrun Run current buffer in python -==================== ============= - -.. note:: See also ``:help ropevim.txt`` +* Kirill Klenov (horneds@gmail.com) +* Bryce Guinta (https://github.com/brycepg) + +Also see the `AUTHORS` file. + +Development of python-mode happens at github: +https://github.com/python-mode/python-mode + +Please make a pull request to `development` branch and add yourself to +`AUTHORS`. + +Source Links +=================== +- `doc/pymode.txt + `__ + -- ``:help pymode`` +- `plugin/pymode.vim + `__ + -- python-mode VIM plugin +- `syntax/python.vim + `__ + -- python-mode ``python.vim`` VIM syntax +- `syntax/pyrex.vim + `__ + -- ``pyrex.vim`` VIM syntax (pyrex, Cython) +- `t/ + `__ + -- ``*.vim`` more python-mode VIM configuration +- `pymode/ + `__ + -- ``*.py`` -- python-mode Python module +- `pymode/libs/ + `__ + -- ``*.py`` -- `Python Libraries <#python-libraries>`__ + + +Python Libraries +------------------ +Vendored Python modules are located +mostly in +`pymode/libs/ `__. -F.A.Q. ====== +rope +====== +| PyPI: https://pypi.python.org/pypi/rope +| Src: https://github.com/python-rope/rope +| Docs: https://github.com/python-rope/rope/blob/master/docs/overview.rst +| Docs: https://github.com/python-rope/rope/blob/master/docs/library.rst -Rope completion is very slow ----------------------------- - -To work rope_ creates a service directory: ``.ropeproject``. -If ``g:pymode_rope_guess_project`` set (by default) and ``.ropeproject`` in current dir not found, rope scan ``.ropeproject`` on every dir in parent path. -If rope finded ``.ropeproject`` in parent dirs, rope set project for all child dir and scan may be slow for many dirs and files. - -Solutions: +======================== +ropemode +======================== +| PyPI: https://pypi.python.org/pypi/ropemode +| Src: https://github.com/python-rope/ropemode -- Disable ``g:pymode_rope_guess_project`` to make rope always create ``.ropeproject`` in current dir. -- Delete ``.ropeproject`` from dip parent dir to make rope create ``.ropeproject`` in current dir. -- Press ``po`` or ``:RopeOpenProject`` to make force rope create ``.ropeproject`` in current dir. +========= +ropevim +========= +| PyPI: https://pypi.python.org/pypi/ropevim +| Src: https://github.com/python-rope/ropevim +| Docs: https://github.com/python-rope/ropevim/blob/master/doc/ropevim.txt +======= +pylama +======= +| PyPI: https://pypi.python.org/pypi/pylama +| Src: https://github.com/klen/pylama +======== +pylint +======== +| PyPI: https://pypi.python.org/pypi/pylint +| Src: https://bitbucket.org/logilab/pylint +| Homepage: http://www.pylint.org/ +| Docs: http://docs.pylint.org/ +| Docs: http://docs.pylint.org/message-control.html +| Docs: http://docs.pylint.org/faq.html#message-control +| ErrCodes: http://pylint-messages.wikidot.com/all-codes +| ErrCodes: http://pylint-messages.wikidot.com/all-messages + +========== +pyflakes +========== +| PyPI: https://pypi.python.org/pypi/pyflakes +| Src: https://github.com/pyflakes/pyflakes +| ErrCodes: https://flake8.readthedocs.org/en/latest/warnings.html -Pylint check is very slow -------------------------- +====== +pep8 +====== +| PyPI: https://pypi.python.org/pypi/pep8 +| Src: http://github.com/jcrocholl/pep8 +| PEP 8: http://www.python.org/dev/peps/pep-0008/ +| PEP 8: http://legacy.python.org/dev/peps/pep-0008/ +| Docs: https://pep8.readthedocs.org/en/latest/ +| Docs: https://pep8.readthedocs.org/en/latest/intro.html#configuration +| ErrCodes: https://pep8.readthedocs.org/en/latest/intro.html#error-codes -In some projects pylint_ may check slowly, because it also scan imported modules if posible. -Try use pyflakes_, see ``:h 'pymode_lint_checker'``. +========= +autopep8 +========= +| PyPI: https://pypi.python.org/pypi/autopep8 +| Src: https://github.com/hhatto/autopep8 -.. note:: You may ``set exrc`` and ``set secure`` in your ``vimrc`` for auto set custom settings from ``.vimrc`` from your projects directories. - Example: On Flask projects I automaticly set ``g:pymode_lint_checker = "pyflakes"``, on django ``g:pymode_lint_cheker = "pylint"`` +======= +pep257 +======= +| PyPI: https://pypi.python.org/pypi/pep257 +| Src: http://github.com/GreenSteam/pep257 +| Docs: https://pep257.readthedocs.org/en/latest/ +| PEP 257: http://www.python.org/dev/peps/pep-0257/ +| ErrCodes: https://pep257.readthedocs.org/en/latest/error_codes.html +======= +mccabe +======= +| PyPI: https://pypi.python.org/pypi/mccabe +| Src: https://github.com/flintwork/mccabe +| Docs: https://en.wikipedia.org/wiki/Cyclomatic_complexity -Bugtracker -=========== +Vim Libraries +--------------- +Vendored Vim modules are located mostly in ``t/``. -If you have any suggestions, bug reports or -annoyances please report them to the issue tracker -at https://github.com/klen/python-mode/issues +====================== +Python syntax for vim +====================== +| Src: http://www.hlabs.spb.ru/vim/python.vim -Contributing -============ +===================== +PEP8 VIM indentation +===================== +| Src: http://github.com/hynek/vim-python-pep8-indent -Development of pylint-mode happens at github: https://github.com/klen/python-mode Copyright ========= -Copyright (C) 2011 Kirill Klenov (klen_) - - **Rope** - Copyright (C) 2006-2010 Ali Gholami Rudi - - Copyright (C) 2009-2010 Anton Gritsay - - **Pylint** - Copyright (C) 2003-2011 LOGILAB S.A. (Paris, FRANCE). - http://www.logilab.fr/ - - **Pyflakes**: - Copyright (c) 2005 Divmod, Inc. - http://www.divmod.com/ - +Copyright © 2013-2015 Kirill Klenov (klen_) License ======= Licensed under a `GNU lesser general public license`_. +If you like this plugin, I would very appreciated if you kindly send me a postcard :) +My address is here: "Russia, 143500, MO, Istra, pos. Severny 8-3" to "Kirill Klenov". +**Thanks for support!** .. _GNU lesser general public license: http://www.gnu.org/copyleft/lesser.html -.. _klen: http://klen.github.com/ -.. _pylint: http://www.logilab.org/857 -.. _pyflakes: http://pypi.python.org/pypi/pyflakes -.. _rope: http://rope.sourceforge.net/ +.. _klen: https://klen.github.com/ .. _pydoc: http://docs.python.org/library/pydoc.html .. _pathogen: https://github.com/tpope/vim-pathogen +.. _rope_: https://pypi.python.org/pypi/rope +.. _pylama_: https://github.com/klen/pylama +.. _pylint_: https://bitbucket.org/logilab/pylint +.. _pyflakes_: https://pypi.python.org/pypi/pyflakes +.. _autopep8_: https://github.com/hhatto/autopep8 +.. _pep257_: http://github.com/GreenSteam/pep257 +.. _mccabe_: https://github.com/flintwork/mccabe +.. _pythonvim: http://www.hlabs.spb.ru/vim/python.vim +.. _pep8_: http://github.com/jcrocholl/pep8 +.. _pep8indent: http://github.com/hynek/vim-python-pep8-indent +.. |logo| image:: https://raw.github.com/python-mode/python-mode/develop/logo.png diff --git a/Rakefile b/Rakefile new file mode 100644 index 00000000..63a3a361 --- /dev/null +++ b/Rakefile @@ -0,0 +1,11 @@ +#!/usr/bin/env rake + +task :ci => [:dump, :test] + +task :dump do + sh 'vim --version' +end + +task :test do + sh 'bundle exec vim-flavor test' +end diff --git a/after/ftplugin/pyrex.vim b/after/ftplugin/pyrex.vim new file mode 100644 index 00000000..61d43637 --- /dev/null +++ b/after/ftplugin/pyrex.vim @@ -0,0 +1 @@ +runtime after/ftplugin/python.vim diff --git a/after/ftplugin/python.vim b/after/ftplugin/python.vim index e8cea841..0dec7542 100644 --- a/after/ftplugin/python.vim +++ b/after/ftplugin/python.vim @@ -1,4 +1,58 @@ -" Fix omnifunc -if g:pymode && g:pymode_rope && g:pymode_rope_vim_completion - setlocal omnifunc=RopeOmni +if !g:pymode + finish endif + +if g:pymode_motion + + if !&magic + if g:pymode_warning + call pymode#error("Pymode motion requires `&magic` option. Enable them or disable g:pymode_motion") + endif + finish + endif + + nnoremap ]] :call pymode#motion#move('v^(classdef)s', '') + nnoremap [[ :call pymode#motion#move('v^(classdef)s', 'b') + nnoremap ]C :call pymode#motion#move('v^(classdef)s', '') + nnoremap [C :call pymode#motion#move('v^(classdef)s', 'b') + nnoremap ]M :call pymode#motion#move('^s*defs', '') + nnoremap [M :call pymode#motion#move('^s*defs', 'b') + + onoremap ]] :call pymode#motion#move('v^(classdef)s', '') + onoremap [[ :call pymode#motion#move('v^(classdef)s', 'b') + onoremap ]C :call pymode#motion#move('v^(classdef)s', '') + onoremap [C :call pymode#motion#move('v^(classdef)s', 'b') + onoremap ]M :call pymode#motion#move('^s*defs', '') + onoremap [M :call pymode#motion#move('^s*defs', 'b') + + vnoremap ]] :call pymode#motion#vmove('v^(classdef)s', '') + vnoremap [[ :call pymode#motion#vmove('v^(classdef)s', 'b') + vnoremap ]M :call pymode#motion#vmove('^s*defs', '') + vnoremap [M :call pymode#motion#vmove('^s*defs', 'b') + + onoremap C :call pymode#motion#select('^s*classs', 0) + onoremap aC :call pymode#motion#select('^s*classs', 0) + onoremap iC :call pymode#motion#select('^s*classs', 1) + vnoremap aC :call pymode#motion#select('^s*classs', 0) + vnoremap iC :call pymode#motion#select('^s*classs', 1) + + onoremap M :call pymode#motion#select('^s*defs', 0) + onoremap aM :call pymode#motion#select('^s*defs', 0) + onoremap iM :call pymode#motion#select('^s*defs', 1) + vnoremap aM :call pymode#motion#select('^s*defs', 0) + vnoremap iM :call pymode#motion#select('^s*defs', 1) + +endif + +if g:pymode_rope && g:pymode_rope_completion + + setlocal omnifunc=pymode#rope#completions + + if g:pymode_rope_completion_bind != "" + exe "inoremap " . g:pymode_rope_completion_bind . " =pymode#rope#complete(0)" + if tolower(g:pymode_rope_completion_bind) == '' + exe "inoremap =pymode#rope#complete(0)" + endif + end + +end diff --git a/after/indent/pyrex.vim b/after/indent/pyrex.vim new file mode 100644 index 00000000..ab2e54dd --- /dev/null +++ b/after/indent/pyrex.vim @@ -0,0 +1 @@ +runtime after/indent/python.vim diff --git a/after/indent/python.vim b/after/indent/python.vim new file mode 100644 index 00000000..98399b40 --- /dev/null +++ b/after/indent/python.vim @@ -0,0 +1,13 @@ +if !g:pymode || !g:pymode_indent + finish +endif + +setlocal nolisp +setlocal tabstop=4 +setlocal softtabstop=4 +setlocal shiftwidth=4 +setlocal shiftround +setlocal expandtab +setlocal autoindent +setlocal indentexpr=pymode#indent#get_indent(v:lnum) +setlocal indentkeys=!^F,o,O,<:>,0),0],0},=elif,=except diff --git a/autoload/pymode.vim b/autoload/pymode.vim index f9979820..723af9b5 100644 --- a/autoload/pymode.vim +++ b/autoload/pymode.vim @@ -1,4 +1,7 @@ -fun! pymode#Default(name, default) "{{{ +" Pymode core functions + +" DESC: Check variable and set default value if it not exists +fun! pymode#default(name, default) "{{{ if !exists(a:name) let {a:name} = a:default return 0 @@ -6,15 +9,43 @@ fun! pymode#Default(name, default) "{{{ return 1 endfunction "}}} -fun! pymode#QuickfixOpen(onlyRecognized, holdCursor, maxHeight, minHeight, jumpError) "{{{ +" DESC: Import python libs +fun! pymode#init(plugin_root, paths) "{{{ + + PymodePython import sys, vim + PymodePython sys.path.insert(0, vim.eval('a:plugin_root')) + PymodePython sys.path = vim.eval('a:paths') + sys.path + +endfunction "}}} + +" DESC: Show wide message +fun! pymode#wide_message(msg) "{{{ + let x=&ruler | let y=&showcmd + set noruler noshowcmd + redraw + echohl Debug | echo strpart("[Pymode] " . a:msg, 0, &columns-1) | echohl none + let &ruler=x | let &showcmd=y +endfunction "}}} + +" DESC: Show error +fun! pymode#error(msg) "{{{ + execute "normal \" + echohl ErrorMsg + echomsg "[Pymode]: error: " . a:msg + echohl None +endfunction "}}} + +" DESC: Open quickfix window +fun! pymode#quickfix_open(onlyRecognized, maxHeight, minHeight, jumpError) "{{{ let numErrors = len(filter(getqflist(), 'v:val.valid')) let numOthers = len(getqflist()) - numErrors if numErrors > 0 || (!a:onlyRecognized && numOthers > 0) + let num = winnr() botright copen exe max([min([line("$"), a:maxHeight]), a:minHeight]) . "wincmd _" if a:jumpError cc - elseif a:holdCursor + elseif num != winnr() wincmd p endif else @@ -22,54 +53,81 @@ fun! pymode#QuickfixOpen(onlyRecognized, holdCursor, maxHeight, minHeight, jumpE endif redraw if numOthers > 0 - echo printf('Quickfix: %d(+%d)', numErrors, numOthers) - else - echo printf('Quickfix: %d', numErrors) + call pymode#wide_message(printf('Quickfix: %d(+%d)', numErrors, numOthers)) + elseif numErrors > 0 + call pymode#wide_message(printf('Quickfix: %d', numErrors)) endif endfunction "}}} -fun! pymode#PlaceSigns() "{{{ - sign unplace * - for item in filter(getqflist(), 'v:val.bufnr != ""') - execute printf('sign place 1 line=%d name=%s buffer=%d', item.lnum, item.type, item.bufnr) - endfor +" DESC: Open temp buffer. +fun! pymode#tempbuffer_open(name) "{{{ + pclose + exe "botright 8new " . a:name + setlocal buftype=nofile bufhidden=delete noswapfile nowrap previewwindow + redraw endfunction "}}} -fun! pymode#CheckProgram(name, append) "{{{ - let name = 'g:' . a:name - if pymode#Default(name, a:name) - return 1 +" DESC: Remove unused whitespaces +fun! pymode#trim_whitespaces() "{{{ + if g:pymode_trim_whitespaces + let cursor_pos = getpos('.') + silent! %s/\s\+$// + call setpos('.', cursor_pos) endif - if !executable(eval(l:name)) - echoerr "Can't find '".eval(name)."'. Please set ".name .", or extend $PATH, ".a:append - return 0 - endif - return 1 endfunction "}}} -fun! pymode#TempBuffer() "{{{ - pclose | botright 8new - setlocal buftype=nofile bufhidden=delete noswapfile nowrap previewwindow - redraw -endfunction "}}} -fun! pymode#ShowStr(str) "{{{ - call pymode#TempBuffer() - put! =a:str - redraw - normal gg - wincmd p +fun! pymode#save() "{{{ + if &modifiable && &modified + try + noautocmd write + catch /E212/ + call pymode#error("File modified and I can't save it. Please save it manually.") + return 0 + endtry + endif + return expand('%') != '' endfunction "}}} -fun! pymode#ShowCommand(cmd) "{{{ - call pymode#TempBuffer() +fun! pymode#reload_buf_by_nr(nr) "{{{ + let cur = bufnr("") try - silent exec 'r!' . a:cmd - catch /.*/ - close - echoerr 'Command fail: '.a:cmd + exe "buffer " . a:nr + catch /E86/ + return endtry - redraw - normal gg - wincmd p + exe "e!" + exe "buffer " . cur +endfunction "}}} + +fun! pymode#buffer_pre_write() "{{{ + let b:pymode_modified = &modified +endfunction "}}} + +fun! pymode#buffer_post_write() "{{{ + if g:pymode_rope + if g:pymode_rope_regenerate_on_write && b:pymode_modified + call pymode#debug('regenerate') + call pymode#rope#regenerate() + endif + endif + if g:pymode_lint + if g:pymode_lint_unmodified || (g:pymode_lint_on_write && b:pymode_modified) + call pymode#debug('check code') + call pymode#lint#check() + endif + endif +endfunction "}}} + +fun! pymode#debug(msg) "{{{ + if g:pymode_debug + let g:pymode_debug += 1 + echom string(g:pymode_debug) . ': ' . string(a:msg) + endif +endfunction "}}} + +fun! pymode#quit() "{{{ + augroup pymode + au! * + augroup END endfunction "}}} diff --git a/autoload/pymode/breakpoint.vim b/autoload/pymode/breakpoint.vim index c4e67592..c3189aad 100644 --- a/autoload/pymode/breakpoint.vim +++ b/autoload/pymode/breakpoint.vim @@ -1,11 +1,51 @@ -fun! pymode#breakpoint#Set(lnum) "{{{ +fun! pymode#breakpoint#init() "{{{ + + if !g:pymode_breakpoint + return + endif + + if g:pymode_breakpoint_cmd == '' + let g:pymode_breakpoint_cmd = 'import pdb; pdb.set_trace() # XXX BREAKPOINT' + + if g:pymode_python == 'disable' + return + endif + + endif + + PymodePython << EOF + +from imp import find_module + +for module in ('wdb', 'pudb', 'ipdb'): + try: + find_module(module) + vim.command('let g:pymode_breakpoint_cmd = "import %s; %s.set_trace() # XXX BREAKPOINT"' % (module, module)) + break + except ImportError: + continue + +EOF + +endfunction "}}} + +fun! pymode#breakpoint#operate(lnum) "{{{ let line = getline(a:lnum) if strridx(line, g:pymode_breakpoint_cmd) != -1 normal dd else let plnum = prevnonblank(a:lnum) - call append(line('.')-1, repeat(' ', indent(plnum)).g:pymode_breakpoint_cmd) + if &expandtab + let indents = repeat(' ', indent(plnum)) + else + let indents = repeat("\t", plnum / &shiftwidth) + endif + + call append(line('.')-1, indents.g:pymode_breakpoint_cmd) normal k endif - if &modifiable && &modified | write | endif + + " Save file without any events + call pymode#save() + endfunction "}}} diff --git a/autoload/pymode/doc.vim b/autoload/pymode/doc.vim index f1e4270b..b89eb0e7 100644 --- a/autoload/pymode/doc.vim +++ b/autoload/pymode/doc.vim @@ -1,7 +1,37 @@ -fun! pymode#doc#Show(word) "{{{ +" Python-mode search by documentation +" +PymodePython import pymode + +fun! pymode#doc#find() "{{{ + " Extract the 'word' at the cursor, expanding leftwards across identifiers + " and the . operator, and rightwards across the identifier only. + " + " For example: + " import xml.dom.minidom + " ^ ! + " + " With the cursor at ^ this returns 'xml'; at ! it returns 'xml.dom'. + let l:line = getline(".") + let l:pre = l:line[:col(".") - 1] + let l:suf = l:line[col("."):] + let word = matchstr(pre, "[A-Za-z0-9_.]*$") . matchstr(suf, "^[A-Za-z0-9_]*") + call pymode#doc#show(word) +endfunction "}}} + +fun! pymode#doc#show(word) "{{{ if a:word == '' - echoerr "No name/symbol under cursor!" - else - call pymode#ShowCommand(g:pydoc . " " . escape(a:word, " ")) + call pymode#error("No name/symbol under cursor!") + return 0 + endif + + call pymode#tempbuffer_open('__doc__') + PymodePython pymode.get_documentation() + setlocal nomodifiable + setlocal nomodified + setlocal filetype=rst + if g:pymode_doc_vertical + wincmd L endif + wincmd p + endfunction "}}} diff --git a/autoload/pymode/folding.vim b/autoload/pymode/folding.vim new file mode 100644 index 00000000..3b29aebb --- /dev/null +++ b/autoload/pymode/folding.vim @@ -0,0 +1,274 @@ +" Python-mode folding functions + +" Notice that folding is based on single line so complex regular expressions +" that take previous line into consideration are not fit for the job. + +" Regex definitions for correct folding +let s:def_regex = g:pymode_folding_regex +let s:blank_regex = '^\s*$' +" Spyder, a very popular IDE for python has a template which includes +" '@author:' ; thus the regex below. +let s:decorator_regex = '^\s*@\(author:\)\@!' +let s:doc_begin_regex = '^\s*[uUrR]\=\%("""\|''''''\)' +let s:doc_end_regex = '\%("""\|''''''\)\s*$' +" This one is needed for the while loop to count for opening and closing +" docstrings. +let s:doc_general_regex = '\%("""\|''''''\)' +let s:doc_line_regex = '^\s*[uUrR]\=\("""\|''''''\).\+\1\s*$' +let s:symbol = matchstr(&fillchars, 'fold:\zs.') " handles multibyte characters +if s:symbol == '' + let s:symbol = ' ' +endif +" '''''''' + + +fun! pymode#folding#text() " {{{ + let fs = v:foldstart + while getline(fs) !~ s:def_regex && getline(fs) !~ s:doc_begin_regex + let fs = nextnonblank(fs + 1) + endwhile + if getline(fs) =~ s:doc_end_regex && getline(fs) =~ s:doc_begin_regex + let fs = nextnonblank(fs + 1) + endif + let line = getline(fs) + + let has_numbers = &number || &relativenumber + let nucolwidth = &fdc + has_numbers * &numberwidth + let windowwidth = winwidth(0) - nucolwidth - 6 + let foldedlinecount = v:foldend - v:foldstart + + " expand tabs into spaces + let onetab = strpart(' ', 0, &tabstop) + let line = substitute(line, '\t', onetab, 'g') + + let line = strpart(line, 0, windowwidth - 2 -len(foldedlinecount)) + let line = substitute(line, '[uUrR]\=\%("""\|''''''\)', '', '') + let fillcharcount = windowwidth - len(line) - len(foldedlinecount) + 1 + return line . ' ' . repeat(s:symbol, fillcharcount) . ' ' . foldedlinecount +endfunction "}}} + +fun! pymode#folding#expr(lnum) "{{{ + + let line = getline(a:lnum) + let indent = indent(a:lnum) + let prev_line = getline(a:lnum - 1) + let next_line = getline(a:lnum + 1) + + " Decorators {{{ + if line =~ s:decorator_regex + return ">".(indent / &shiftwidth + 1) + endif "}}} + + " Definition {{{ + if line =~ s:def_regex + " If indent of this line is greater or equal than line below + " and previous non blank line does not end with : (that is, is not a + " definition) + " Keep the same indentation + if indent(a:lnum) >= indent(a:lnum+1) && getline(prevnonblank(a:lnum)) !~ ':\s*$' + return '=' + endif + " Check if last decorator is before the last def + let decorated = 0 + let lnum = a:lnum - 1 + while lnum > 0 + if getline(lnum) =~ s:def_regex + break + elseif getline(lnum) =~ s:decorator_regex + let decorated = 1 + break + endif + let lnum -= 1 + endwhile + if decorated + return '=' + else + return ">".(indent / &shiftwidth + 1) + endif + endif "}}} + + " Docstrings {{{ + + " TODO: A while loop now counts the number of open and closed folding in + " order to determine if it is a closing or opening folding. + " It is working but looks like it is an overkill. + + " Notice that an effect of this is that other docstring matches will not + " be one liners. + if line =~ s:doc_line_regex + return "=" + endif + + if line =~ s:doc_begin_regex + " echom 'just entering' + if s:Is_opening_folding(a:lnum) + " echom 'entering at line ' . a:lnum + return ">".(indent / &shiftwidth + 1) + endif + endif + if line =~ s:doc_end_regex + if !s:Is_opening_folding(a:lnum) + " echom 'leaving at line ' . a:lnum + return "<".(indent / &shiftwidth + 1) + endif + endif "}}} + + " Nested Definitions {{{ + " Handle nested defs but only for files shorter than + " g:pymode_folding_nest_limit lines due to performance concerns + if line('$') < g:pymode_folding_nest_limit && indent(prevnonblank(a:lnum)) + let curpos = getpos('.') + try + let last_block = s:BlockStart(a:lnum) + let last_block_indent = indent(last_block) + + " Check if last class/def is not indented and therefore can't be + " nested. + if last_block_indent + call cursor(a:lnum, 0) + let next_def = searchpos(s:def_regex, 'nW')[0] + let next_def_indent = next_def ? indent(next_def) : -1 + let last_block_end = s:BlockEnd(last_block) + + " If the next def has greater indent than the previous def, it + " is nested one level deeper and will have its own fold. If + " the class/def containing the current line is on the first + " line it can't be nested, and if this block ends on the last + " line, it contains no trailing code that should not be + " folded. Finally, if the next non-blank line after the end of + " the previous def is less indented than the previous def, it + " is not part of the same fold as that def. Otherwise, we know + " the current line is at the end of a nested def. + if next_def_indent <= last_block_indent && last_block > 1 && last_block_end < line('$') + \ && indent(nextnonblank(last_block_end)) >= last_block_indent + + " Include up to one blank line in the fold + if getline(last_block_end) =~ s:blank_regex + let fold_end = min([prevnonblank(last_block_end - 1), last_block_end]) + 1 + else + let fold_end = last_block_end + endif + if a:lnum == fold_end + return 's1' + else + return '=' + endif + endif + endif + finally + call setpos('.', curpos) + endtry + endif " }}} + + " Blank Line {{{ + if line =~ s:blank_regex + if prev_line =~ s:blank_regex + if indent(a:lnum + 1) == 0 && next_line !~ s:blank_regex && next_line !~ s:doc_general_regex + if s:Is_opening_folding(a:lnum) + " echom a:lnum + return "=" + else + " echom "not " . a:lnum + return 0 + endif + endif + return -1 + else + return '=' + endif + endif " }}} + + return '=' + +endfunction "}}} + +fun! s:BlockStart(lnum) "{{{ + " Note: Make sure to reset cursor position after using this function. + call cursor(a:lnum, 0) + + " In case the end of the block is indented to a higher level than the def + " statement plus one shiftwidth, we need to find the indent level at the + " bottom of that if/for/try/while/etc. block. + let last_def = searchpos(s:def_regex, 'bcnW')[0] + if last_def + let last_def_indent = indent(last_def) + call cursor(last_def, 0) + let next_stmt_at_def_indent = searchpos('\v^\s{'.last_def_indent.'}[^[:space:]#]', 'nW')[0] + else + let next_stmt_at_def_indent = -1 + endif + + " Now find the class/def one shiftwidth lower than the start of the + " aforementioned indent block. + if next_stmt_at_def_indent && next_stmt_at_def_indent < a:lnum + let max_indent = max([indent(next_stmt_at_def_indent) - &shiftwidth, 0]) + else + let max_indent = max([indent(prevnonblank(a:lnum)) - &shiftwidth, 0]) + endif + return searchpos('\v^\s{,'.max_indent.'}(def |class )\w', 'bcnW')[0] +endfunction "}}} + +fun! s:BlockEnd(lnum) "{{{ + " Note: Make sure to reset cursor position after using this function. + call cursor(a:lnum, 0) + return searchpos('\v^\s{,'.indent('.').'}\S', 'nW')[0] - 1 +endfunction "}}} + +function! s:Is_opening_folding(lnum) "{{{ + " Helper function to see if docstring is opening or closing + + " Cache the result so the loop runs only once per change + if get(b:, 'fold_changenr', -1) == changenr() + return b:fold_cache[a:lnum] "If odd then it is an opening + else + let b:fold_changenr = changenr() + let b:fold_cache = [] + endif + + let number_of_folding = 0 " To be analized if odd/even to inform if it is opening or closing. + let has_open_docstring = 0 " To inform is already has an open docstring. + let extra_docstrings = 0 " To help skipping ''' and """ which are not docstrings + + " The idea of this part of the function is to identify real docstrings and + " not just triple quotes (that could be a regular string). + " + " Iterater over all lines from the start until current line (inclusive) + for i in range(1, line('$')) + call add(b:fold_cache, number_of_folding % 2) + + let i_line = getline(i) + + if i_line =~ s:doc_line_regex + " echom "case 00 on line " . i + continue + endif + + if i_line =~ s:doc_begin_regex && ! has_open_docstring + " echom "case 01 on line " . i + " This causes the loop to continue if there is a triple quote which + " is not a docstring. + if extra_docstrings > 0 + let extra_docstrings = extra_docstrings - 1 + continue + else + let has_open_docstring = 1 + let number_of_folding = number_of_folding + 1 + endif + " If it is an end doc and has an open docstring. + elseif i_line =~ s:doc_end_regex && has_open_docstring + " echom "case 02 on line " . i + let has_open_docstring = 0 + let number_of_folding = number_of_folding + 1 + + elseif i_line =~ s:doc_general_regex + " echom "extra docstrings on line " . i + let extra_docstrings = extra_docstrings + 1 + endif + endfor + + call add(b:fold_cache, number_of_folding % 2) + + return b:fold_cache[a:lnum] +endfunction "}}} + +" vim: fdm=marker:fdl=0 diff --git a/autoload/pymode/indent.vim b/autoload/pymode/indent.vim new file mode 100644 index 00000000..efd41f29 --- /dev/null +++ b/autoload/pymode/indent.vim @@ -0,0 +1,186 @@ +" PEP8 compatible Python indent file +" Language: Python +" Maintainer: Hynek Schlawack +" Prev Maintainer: Eric Mc Sween (address invalid) +" Original Author: David Bustos (address invalid) +" Last Change: 2012-06-21 +" License: Public Domain + + +function! pymode#indent#get_indent(lnum) + + " First line has indent 0 + if a:lnum == 1 + return 0 + endif + + " If we can find an open parenthesis/bracket/brace, line up with it. + call cursor(a:lnum, 1) + let parlnum = s:SearchParensPair() + if parlnum > 0 + let parcol = col('.') + let closing_paren = match(getline(a:lnum), '^\s*[])}]') != -1 + if match(getline(parlnum), '[([{]\s*$', parcol - 1) != -1 + if closing_paren + return indent(parlnum) + else + return indent(parlnum) + &shiftwidth + endif + else + return parcol + endif + endif + + " Examine this line + let thisline = getline(a:lnum) + let thisindent = indent(a:lnum) + + " If the line starts with 'elif' or 'else', line up with 'if' or 'elif' + if thisline =~ '^\s*\(elif\|else\)\>' + let bslnum = s:BlockStarter(a:lnum, '^\s*\(if\|elif\)\>') + if bslnum > 0 + return indent(bslnum) + else + return -1 + endif + endif + + " If the line starts with 'except' or 'finally', line up with 'try' + " or 'except' + if thisline =~ '^\s*\(except\|finally\)\>' + let bslnum = s:BlockStarter(a:lnum, '^\s*\(try\|except\)\>') + if bslnum > 0 + return indent(bslnum) + else + return -1 + endif + endif + + " Examine previous line + let plnum = a:lnum - 1 + let pline = getline(plnum) + let sslnum = s:StatementStart(plnum) + + " If the previous line is blank, keep the same indentation + if pline =~ '^\s*$' + return -1 + endif + + " If this line is explicitly joined, find the first indentation that is a + " multiple of four and will distinguish itself from next logical line. + if pline =~ '\\$' + let maybe_indent = indent(sslnum) + &sw + let control_structure = '^\s*\(if\|while\|for\s.*\sin\|except\)\s*' + if match(getline(sslnum), control_structure) != -1 + " add extra indent to avoid E125 + return maybe_indent + &sw + else + " control structure not found + return maybe_indent + endif + endif + + " If the previous line ended with a colon and is not a comment, indent + " relative to statement start. + if pline =~ '^[^#]*:\s*\(#.*\)\?$' + return indent(sslnum) + &sw + endif + + " If the previous line was a stop-execution statement or a pass + if getline(sslnum) =~ '^\s*\(break\|continue\|raise\|return\|pass\)\>' + " See if the user has already dedented + if indent(a:lnum) > indent(sslnum) - &sw + " If not, recommend one dedent + return indent(sslnum) - &sw + endif + " Otherwise, trust the user + return -1 + endif + + " In all other cases, line up with the start of the previous statement. + return indent(sslnum) +endfunction + + +" Find backwards the closest open parenthesis/bracket/brace. +function! s:SearchParensPair() " {{{ + let line = line('.') + let col = col('.') + + " Skip strings and comments and don't look too far + let skip = "line('.') < " . (line - 50) . " ? dummy :" . + \ 'synIDattr(synID(line("."), col("."), 0), "name") =~? ' . + \ '"string\\|comment\\|doctest"' + + " Search for parentheses + call cursor(line, col) + let parlnum = searchpair('(', '', ')', 'bW', skip) + let parcol = col('.') + + " Search for brackets + call cursor(line, col) + let par2lnum = searchpair('\[', '', '\]', 'bW', skip) + let par2col = col('.') + + " Search for braces + call cursor(line, col) + let par3lnum = searchpair('{', '', '}', 'bW', skip) + let par3col = col('.') + + " Get the closest match + if par2lnum > parlnum || (par2lnum == parlnum && par2col > parcol) + let parlnum = par2lnum + let parcol = par2col + endif + if par3lnum > parlnum || (par3lnum == parlnum && par3col > parcol) + let parlnum = par3lnum + let parcol = par3col + endif + + " Put the cursor on the match + if parlnum > 0 + call cursor(parlnum, parcol) + endif + return parlnum +endfunction " }}} + + +" Find the start of a multi-line statement +function! s:StatementStart(lnum) " {{{ + let lnum = a:lnum + while 1 + if getline(lnum - 1) =~ '\\$' + let lnum = lnum - 1 + else + call cursor(lnum, 1) + let maybe_lnum = s:SearchParensPair() + if maybe_lnum < 1 + return lnum + else + let lnum = maybe_lnum + endif + endif + endwhile +endfunction " }}} + + +" Find the block starter that matches the current line +function! s:BlockStarter(lnum, block_start_re) " {{{ + let lnum = a:lnum + let maxindent = 10000 " whatever + while lnum > 1 + let lnum = prevnonblank(lnum - 1) + if indent(lnum) < maxindent + if getline(lnum) =~ a:block_start_re + return lnum + else + let maxindent = indent(lnum) + " It's not worth going further if we reached the top level + if maxindent == 0 + return -1 + endif + endif + endif + endwhile + return -1 +endfunction " }}} diff --git a/autoload/pymode/lint.vim b/autoload/pymode/lint.vim index a1ca50d5..e7dba8b5 100644 --- a/autoload/pymode/lint.vim +++ b/autoload/pymode/lint.vim @@ -1,33 +1,101 @@ -function! pymode#lint#Check() - if g:pymode_lint == 0 | return | endif - if &modifiable && &modified - try - write - catch /E212/ - echohl Error | echo "File modified and I can't save it. PyLint cancel." | echohl None - return 0 - endtry - endif - exe "py ".g:pymode_lint_checker."()" - call setqflist(b:qf_list, 'r') - if g:pymode_lint_cwindow - call pymode#QuickfixOpen(0, 0, g:pymode_lint_maxheight, g:pymode_lint_minheight, g:pymode_lint_jump) +PymodePython from pymode.lint import code_check + +call pymode#tools#signs#init() +call pymode#tools#loclist#init() + + +fun! pymode#lint#auto() "{{{ + if !pymode#save() + return 0 endif - if g:pymode_lint_signs - call pymode#PlaceSigns() + PymodePython from pymode import auto + PymodePython auto() + cclose + call g:PymodeSigns.clear() + edit + call pymode#wide_message("AutoPep8 done.") +endfunction "}}} + + +fun! pymode#lint#show_errormessage() "{{{ + let loclist = g:PymodeLocList.current() + if loclist.is_empty() + return endif -endfunction -fun! pymode#lint#Toggle() "{{{ + let l = line('.') + if l == b:pymode_error_line + return + endif + let b:pymode_error_line = l + if has_key(loclist._messages, l) + call pymode#wide_message(loclist._messages[l]) + else + echo + endif +endfunction "}}} + + +fun! pymode#lint#toggle() "{{{ let g:pymode_lint = g:pymode_lint ? 0 : 1 if g:pymode_lint - echomsg "PyLint enabled." + call pymode#wide_message("Code checking is enabled.") + else + call pymode#wide_message("Code checking is disabled.") + end +endfunction "}}} + + +fun! pymode#lint#check() "{{{ + " DESC: Run checkers on current file. + " + let loclist = g:PymodeLocList.current() + + let b:pymode_error_line = -1 + + call loclist.clear() + + call pymode#wide_message('Code checking is running ...') + + PymodePython code_check() + + if loclist.is_empty() + call pymode#wide_message('Code checking is completed. No errors found.') + endif + + call g:PymodeSigns.refresh(loclist) + + call loclist.show() + + call pymode#lint#show_errormessage() + call pymode#wide_message('Found errors and warnings: ' . len(loclist._loclist)) + +endfunction " }}} + + +fun! pymode#lint#tick_queue() "{{{ + + python import time + python print time.time() + + if mode() == 'i' + if col('.') == 1 + call feedkeys("\\", "n") + else + call feedkeys("\\", "n") + endif else - echomsg "PyLint disabled." + call feedkeys("f\e", "n") endif endfunction "}}} -fun! pymode#lint#ToggleChecker() "{{{ - let g:pymode_lint_checker = g:pymode_lint_checker == "pylint" ? "pyflakes" : "pylint" - echomsg "PyLint checker: " . g:pymode_lint_checker + +fun! pymode#lint#stop() "{{{ + au! pymode CursorHold +endfunction "}}} + + +fun! pymode#lint#start() "{{{ + au! pymode CursorHold call pymode#lint#tick_queue() + call pymode#lint#tick_queue() endfunction "}}} diff --git a/autoload/pymode/motion.vim b/autoload/pymode/motion.vim new file mode 100644 index 00000000..67e99e6b --- /dev/null +++ b/autoload/pymode/motion.vim @@ -0,0 +1,97 @@ +" Python-mode motion functions + + +fun! pymode#motion#move(pattern, flags, ...) "{{{ + let cnt = v:count1 - 1 + let [line, column] = searchpos(a:pattern, a:flags . 'sW') + let indent = indent(line) + while cnt && line + let [line, column] = searchpos(a:pattern, a:flags . 'W') + if indent(line) == indent + let cnt = cnt - 1 + endif + endwhile + return [line, column] +endfunction "}}} + + +fun! pymode#motion#vmove(pattern, flags) range "{{{ + call cursor(a:lastline, 0) + let end = pymode#motion#move(a:pattern, a:flags) + call cursor(a:firstline, 0) + normal! v + call cursor(end) +endfunction "}}} + + +fun! pymode#motion#pos_le(pos1, pos2) "{{{ + return ((a:pos1[0] < a:pos2[0]) || (a:pos1[0] == a:pos2[0] && a:pos1[1] <= a:pos2[1])) +endfunction "}}} + + +fun! pymode#motion#select(pattern, inner) "{{{ + let cnt = v:count1 - 1 + let orig = getpos('.')[1:2] + let snum = s:BlockStart(orig[0], a:pattern) + if getline(snum) !~ a:pattern + return 0 + endif + let enum = s:BlockEnd(snum, indent(snum)) + while cnt + let lnum = search(a:pattern, 'nW') + if lnum + let enum = s:BlockEnd(lnum, indent(lnum)) + call cursor(enum, 1) + endif + let cnt = cnt - 1 + endwhile + if pymode#motion#pos_le([snum, 0], orig) && pymode#motion#pos_le(orig, [enum, 1]) + if a:inner + let snum = snum + 1 + let enum = prevnonblank(enum) + endif + + call cursor(snum, 1) + normal! v + call cursor(enum, len(getline(enum))) + endif +endfunction "}}} + + +fun! s:BlockStart(lnum, ...) "{{{ + let pattern = a:0 ? a:1 : '^\s*\(@\|class\s.*:\|def\s\)' + let lnum = a:lnum + 1 + let indent = 100 + while lnum + let lnum = prevnonblank(lnum - 1) + let test = indent(lnum) + let line = getline(lnum) + if line =~ '^\s*#' " Skip comments + continue + elseif !test " Zero-level regular line + return lnum + elseif test >= indent " Skip deeper or equal lines + continue + " Indent is strictly less at this point: check for def/class + elseif line =~ pattern && line !~ '^\s*@' + return lnum + endif + let indent = indent(lnum) + endwhile + return 0 +endfunction "}}} + + +fun! s:BlockEnd(lnum, ...) "{{{ + let indent = a:0 ? a:1 : indent(a:lnum) + let lnum = a:lnum + while lnum + let lnum = nextnonblank(lnum + 1) + if getline(lnum) =~ '^\s*#' | continue + elseif lnum && indent(lnum) <= indent + return lnum - 1 + endif + endwhile + return line('$') +endfunction "}}} +" vim: fdm=marker:fdl=0 diff --git a/autoload/pymode/rope.vim b/autoload/pymode/rope.vim new file mode 100644 index 00000000..a82a46d9 --- /dev/null +++ b/autoload/pymode/rope.vim @@ -0,0 +1,185 @@ +" Python-mode Rope support +" +PymodePython from pymode import rope + +call pymode#tools#loclist#init() + + +fun! pymode#rope#completions(findstart, base) + PymodePython rope.completions() +endfunction + +fun! pymode#rope#complete(dot) + if pumvisible() + return "\" + end + if a:dot + PymodePython rope.complete(True) + else + PymodePython rope.complete() + end + return pumvisible() ? "\\" : "" +endfunction + +fun! pymode#rope#complete_on_dot() "{{{ + if !exists("*synstack") + return "" + end + for group in map(synstack(line('.'), col('.') - 1), 'synIDattr(v:val, "name")') + for name in ['pythonString', 'pythonComment', 'pythonNumber', 'pythonDocstring'] + if group == name + return "" + endif + endfor + endfor + if g:pymode_rope_autoimport_import_after_complete + PymodePython rope.complete_check() + endif + return pymode#rope#complete(1) +endfunction "}}} + +fun! pymode#rope#goto_definition() + PymodePython rope.goto() +endfunction + + +fun! pymode#rope#organize_imports() + if !pymode#save() + return 0 + endif + call pymode#wide_message('Organize imports ... ') + PymodePython rope.organize_imports() +endfunction + + +fun! pymode#rope#find_it() + let loclist = g:PymodeLocList.current() + let loclist._title = "Occurrences" + call pymode#wide_message('Finding Occurrences ...') + PymodePython rope.find_it() + call loclist.show() +endfunction + + +fun! pymode#rope#show_doc() + let l:output = [] + + PymodePython rope.show_doc() + + if !empty(l:output) + call pymode#tempbuffer_open('__doc____rope__') + call append(0, l:output) + setlocal nomodifiable + setlocal nomodified + setlocal filetype=rst + wincmd p + end +endfunction + + +fun! pymode#rope#regenerate() "{{{ + call pymode#wide_message('Regenerate Rope cache ... ') + PymodePython rope.regenerate() +endfunction "}}} + + +fun! pymode#rope#new(...) "{{{ + PymodePython rope.new() +endfunction "}}} + + +fun! pymode#rope#rename() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.RenameRefactoring().run() +endfunction "}}} + +fun! pymode#rope#rename_module() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.RenameRefactoring(True).run() +endfunction "}}} + +fun! pymode#rope#extract_method() range "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.ExtractMethodRefactoring().run() +endfunction "}}} + +fun! pymode#rope#extract_variable() range "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.ExtractVariableRefactoring().run() +endfunction "}}} + +fun! pymode#rope#undo() "{{{ + PymodePython rope.undo() +endfunction "}}} + +fun! pymode#rope#redo() "{{{ + PymodePython rope.redo() +endfunction "}}} + +fun! pymode#rope#inline() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.InlineRefactoring().run() +endfunction "}}} + +fun! pymode#rope#move() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.MoveRefactoring().run() +endfunction "}}} + +fun! pymode#rope#signature() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.ChangeSignatureRefactoring().run() +endfunction "}}} + +fun! pymode#rope#use_function() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.UseFunctionRefactoring().run() +endfunction "}}} + +fun! pymode#rope#module_to_package() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.ModuleToPackageRefactoring().run() +endfunction "}}} + +fun! pymode#rope#autoimport(word) "{{{ + PymodePython rope.autoimport() +endfunction "}}} + +fun! pymode#rope#generate_function() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.GenerateElementRefactoring('function').run() +endfunction "}}} + +fun! pymode#rope#generate_class() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.GenerateElementRefactoring('class').run() +endfunction "}}} + +fun! pymode#rope#generate_package() "{{{ + if !pymode#save() + return 0 + endif + PymodePython rope.GenerateElementRefactoring('package').run() +endfunction "}}} diff --git a/autoload/pymode/run.vim b/autoload/pymode/run.vim index f06e8e7f..24c8729c 100644 --- a/autoload/pymode/run.vim +++ b/autoload/pymode/run.vim @@ -1,5 +1,99 @@ -" DESC: Save file if it modified and run python code -fun! pymode#run#Run() "{{{ - if &modifiable && &modified | write | endif - call pymode#ShowCommand(g:python . " " . expand("%:p")) +" The following lines set Vim's errorformat variable, to allow the +" quickfix window to show Python tracebacks properly. It is much +" easier to use let than set, because set requires many more +" characters to be escaped. This is much easier to read and +" maintain. % escapes are still needed however before any regex meta +" characters. Hence \S (non-whitespace) becomes %\S etc. Note that +" * becomes %#, so .* (match any character) becomes %.%# Commas must +" also be escaped, with a backslash (\,). See the Vim help on +" quickfix for details. +" +" Python errors are multi-lined. They often start with 'Traceback', so +" we want to capture that (with +G) and show it in the quickfix window +" because it explains the order of error messages. +let s:efm = '%+GTraceback%.%#,' + +" The error message itself starts with a line with 'File' in it. There +" are a couple of variations, and we need to process a line beginning +" with whitespace followed by File, the filename in "", a line number, +" and optional further text. %E here indicates the start of a multi-line +" error message. The %\C at the end means that a case-sensitive search is +" required. +let s:efm .= '%E File "%f"\, line %l\,%m%\C,' +let s:efm .= '%E File "%f"\, line %l%\C,' + +" The possible continutation lines are idenitifed to Vim by %C. We deal +" with these in order of most to least specific to ensure a proper +" match. A pointer (^) identifies the column in which the error occurs +" (but will not be entirely accurate due to indention of Python code). +let s:efm .= '%C%p^,' + +" Any text, indented by more than two spaces contain useful information. +" We want this to appear in the quickfix window, hence %+. +let s:efm .= '%+C %.%#,' +let s:efm .= '%+C %.%#,' + +" The last line (%Z) does not begin with any whitespace. We use a zero +" width lookahead (\&) to check this. The line contains the error +" message itself (%m) +let s:efm .= '%Z%\S%\&%m,' + +" We can ignore any other lines (%-G) +let s:efm .= '%-G%.%#' + +PymodePython from pymode.run import run_code + + +" DESC: Run python code +fun! pymode#run#code_run(line1, line2) "{{{ + + let l:output = [] + let l:traceback = [] + call setqflist([]) + + call pymode#wide_message("Code running ...") + + try + + PymodePython run_code() + + if len(l:output) + call pymode#tempbuffer_open('__run__') + call append(line('$'), l:output) + normal dd + wincmd p + else + call pymode#wide_message("No output.") + endif + + cexpr "" + + let l:_efm = &efm + + let &efm = s:efm + + cgetexpr(l:traceback) + + " If a range is run (starting other than at line 1), fix the reported error line numbers for + " the current buffer + if a:line1 > 1 + let qflist = getqflist() + for i in qflist + if i.bufnr == bufnr("") + let i.lnum = i.lnum - 1 + a:line1 + endif + endfor + call setqflist(qflist) + endif + + call pymode#quickfix_open(0, g:pymode_quickfix_maxheight, g:pymode_quickfix_maxheight, 0) + + let &efm = l:_efm + + catch /E234/ + + echohl Error | echo "Run-time error." | echohl none + + endtry + endfunction "}}} diff --git a/autoload/pymode/tools/loclist.vim b/autoload/pymode/tools/loclist.vim new file mode 100644 index 00000000..18b6d294 --- /dev/null +++ b/autoload/pymode/tools/loclist.vim @@ -0,0 +1,81 @@ +let g:PymodeLocList= {} + + +fun! pymode#tools#loclist#init() "{{{ + return +endfunction "}}} + + +fun! g:PymodeLocList.init(raw_list) "{{{ + let obj = copy(self) + let loc_list = filter(copy(a:raw_list), 'v:val["valid"] == 1') + call obj.clear() + let obj._title = 'CodeCheck' + return obj +endfunction "}}} + + +fun! g:PymodeLocList.current() "{{{ + if !exists("b:pymode_loclist") + let b:pymode_loclist = g:PymodeLocList.init([]) + endif + return b:pymode_loclist +endfunction "}}} + + +fun! g:PymodeLocList.is_empty() "{{{ + return empty(self._loclist) +endfunction "}}} + + +fun! g:PymodeLocList.clear() "{{{ + let self._loclist = [] + let self._messages = {} + let self._name = expand('%:t') +endfunction "}}} + + +fun! g:PymodeLocList.extend(raw_list) "{{{ + call extend(self._loclist, a:raw_list) + for issue in a:raw_list + let self._messages[issue.lnum] = issue.text + endfor + return self +endfunction "}}} + + +fun! g:PymodeLocList.filter(filters) "{{{ + let loclist = [] + for error in self._loclist + let passes_filters = 1 + for key in keys(a:filters) + if get(error, key, '') !=? a:filters[key] + let passes_filters = 0 + break + endif + endfor + + if passes_filters + call add(loclist, error) + endif + + endfor + return loclist +endfunction "}}} + + +fun! g:PymodeLocList.show() "{{{ + call setloclist(0, self._loclist) + if self.is_empty() + lclose + elseif g:pymode_lint_cwindow + let num = winnr() + lopen + setl nowrap + execute max([min([line("$"), g:pymode_quickfix_maxheight]), g:pymode_quickfix_minheight]) . "wincmd _" + if num != winnr() + call setwinvar(winnr(), 'quickfix_title', self._title . ' <' . self._name . '>') + exe num . "wincmd w" + endif + end +endfunction "}}} diff --git a/autoload/pymode/tools/signs.vim b/autoload/pymode/tools/signs.vim new file mode 100644 index 00000000..3487cf85 --- /dev/null +++ b/autoload/pymode/tools/signs.vim @@ -0,0 +1,57 @@ +let g:PymodeSigns = {} + + +fun! pymode#tools#signs#init() "{{{ + call g:PymodeSigns.setup() +endfunction "}}} + + +fun! g:PymodeSigns.enabled() "{{{ + return (g:pymode_lint_signs && has('signs')) +endfunction "}}} + + +fun! g:PymodeSigns.setup() "{{{ + if self.enabled() + execute 'sign define PymodeW text=' . g:pymode_lint_todo_symbol . " texthl=Todo" + execute 'sign define PymodeD text=' . g:pymode_lint_docs_symbol . " texthl=String" + execute 'sign define PymodeC text=' . g:pymode_lint_comment_symbol . " texthl=Comment" + execute 'sign define PymodeR text=' . g:pymode_lint_visual_symbol . " texthl=Visual" + execute 'sign define PymodeE text=' . g:pymode_lint_error_symbol . " texthl=Error" + execute 'sign define PymodeI text=' . g:pymode_lint_info_symbol . " texthl=Info" + execute 'sign define PymodeF text=' . g:pymode_lint_pyflakes_symbol . " texthl=Info" + endif + let self._sign_ids = [] + let self._next_id = 10000 + let self._messages = {} +endfunction "}}} + + +fun! g:PymodeSigns.refresh(loclist) "{{{ + if self.enabled() + call self.clear() + call self.place(a:loclist) + endif +endfunction "}}} + + +fun! g:PymodeSigns.clear() "{{{ + let ids = copy(self._sign_ids) + for i in ids + execute "sign unplace " . i + call remove(self._sign_ids, index(self._sign_ids, i)) + endfor +endfunction "}}} + + +fun! g:PymodeSigns.place(loclist) "{{{ + let seen = {} + for issue in a:loclist._loclist + if !has_key(seen, issue.lnum) + let seen[issue.lnum] = 1 + call add(self._sign_ids, self._next_id) + execute printf('sign place %d line=%d name=%s buffer=%d', self._next_id, issue.lnum, "Pymode".issue.type[0], issue.bufnr) + let self._next_id += 1 + endif + endfor +endfunction "}}} diff --git a/autoload/pymode/troubleshooting.vim b/autoload/pymode/troubleshooting.vim new file mode 100644 index 00000000..915a5c5e --- /dev/null +++ b/autoload/pymode/troubleshooting.vim @@ -0,0 +1,89 @@ +" DESC: Get debug information about pymode problem +fun! pymode#troubleshooting#test() "{{{ + new + setlocal buftype=nofile bufhidden=delete noswapfile nowrap + + let os = "Unknown" + if has('win16') || has('win32') || has('win64') + let os = "Windows" + else + let os = substitute(system('uname'), "\n", "", "") + endif + + if !pymode#default('g:pymode_init', 1) + call pymode#init(expand(':p:h'), g:pymode_paths) + call pymode#virtualenv#init() + call pymode#breakpoint#init() + endif + + call append('0', ['Pymode diagnostic', + \ '===================', + \ 'VIM:' . v:version . ', OS: ' . os .', multi_byte:' . has('multi_byte') . ', pymode: ' . g:pymode_version . ', pymode-python: ' . g:pymode_python, + \ '']) + + if !exists('#filetypeplugin') + call append('$', ['WARNING: ', 'Python-mode required :filetype plugin indent on', '']) + endif + + call append('$', ['+python: ' . has('python')]) + call append('$', ['+python3: ' . has('python3'), '']) + + if g:pymode_python == 'disable' + + if !has('python') && !has('python3') + + call append('$', ['WARNING: Python-mode required vim compiled with +python or +python3.', + \ '"lint, rope, run, doc, virtualenv" features disabled.', '']) + + else + + call append('$', ['WARNING: Python is disabled by `pymode_python` option.', + \ '"lint, rope, run, doc, virtualenv" features disabled.', '']) + + endif + + else + + call append('$', 'VIM python paths:') + call append('$', '-----------------') + PymodePython << EOF +import vim +vim.command('let l:output = %s' % repr(sys.path)) +EOF + call append('$', output) + call append('$', '') + + endif + + call append('$', 'Pymode variables:') + call append('$', '-------------------') + call append('$', 'let pymode = ' . string(g:pymode)) + call append('$', 'let pymode_breakpoint = ' . string(g:pymode_breakpoint)) + call append('$', 'let pymode_breakpoint_bind = ' . string(g:pymode_breakpoint_bind)) + call append('$', 'let pymode_doc = ' . string(g:pymode_doc)) + call append('$', 'let pymode_doc_bind = ' . string(g:pymode_doc_bind)) + call append('$', 'let pymode_folding = ' . string(g:pymode_folding)) + call append('$', 'let pymode_indent = ' . string(g:pymode_indent)) + call append('$', 'let pymode_lint = ' . string(g:pymode_lint)) + call append('$', 'let pymode_lint_checkers = ' . string(g:pymode_lint_checkers)) + call append('$', 'let pymode_lint_cwindow = ' . string(g:pymode_lint_cwindow)) + call append('$', 'let pymode_lint_ignore = ' . string(g:pymode_lint_ignore)) + call append('$', 'let pymode_lint_message = ' . string(g:pymode_lint_message)) + call append('$', 'let pymode_lint_on_fly = ' . string(g:pymode_lint_on_fly)) + call append('$', 'let pymode_lint_on_write = ' . string(g:pymode_lint_on_write)) + call append('$', 'let pymode_lint_select = ' . string(g:pymode_lint_select)) + call append('$', 'let pymode_lint_signs = ' . string(g:pymode_lint_signs)) + call append('$', 'let pymode_motion = ' . string(g:pymode_motion)) + call append('$', 'let pymode_options = ' . string(g:pymode_options)) + call append('$', 'let pymode_paths = ' . string(g:pymode_paths)) + call append('$', 'let pymode_quickfix_maxheight = ' . string(g:pymode_quickfix_maxheight)) + call append('$', 'let pymode_quickfix_minheight = ' . string(g:pymode_quickfix_minheight)) + call append('$', 'let pymode_rope = ' . string(g:pymode_rope)) + call append('$', 'let pymode_run = ' . string(g:pymode_run)) + call append('$', 'let pymode_run_bind = ' . string(g:pymode_run_bind)) + call append('$', 'let pymode_trim_whitespaces = ' . string(g:pymode_trim_whitespaces)) + call append('$', 'let pymode_virtualenv = ' . string(g:pymode_virtualenv)) + call append('$', 'let pymode_virtualenv_enabled = ' . string(g:pymode_virtualenv_enabled)) + call append('$', 'let pymode_virtualenv_path = ' . string(g:pymode_virtualenv_path)) + +endfunction "}}} diff --git a/autoload/pymode/virtualenv.vim b/autoload/pymode/virtualenv.vim index 536e64ca..7401e94b 100644 --- a/autoload/pymode/virtualenv.vim +++ b/autoload/pymode/virtualenv.vim @@ -1,18 +1,17 @@ -fun! pymode#virtualenv#Activate() "{{{ +" Support virtualenv +" +PymodePython from pymode.virtualenv import enable_virtualenv - for env in g:pymode_virtualenv_enabled - if env == $VIRTUAL_ENV - return 0 - endif - endfor +fun! pymode#virtualenv#init() "{{{ + if !g:pymode_virtualenv || g:pymode_virtualenv_path == "" + return + endif - call add(g:pymode_virtualenv_enabled, $VIRTUAL_ENV) - echomsg "Enabled virtualenv: " . $VIRTUAL_ENV + PymodePython enable_virtualenv() -python << EOF -ve_dir = os.environ['VIRTUAL_ENV'] -ve_dir in sys.path or sys.path.insert(0, ve_dir) -activate_this = os.path.join(os.path.join(ve_dir, 'bin'), 'activate_this.py') -execfile(activate_this, dict(__file__=activate_this)) -EOF +endfunction "}}} + +fun! pymode#virtualenv#activate(path) "{{{ + let g:pymode_virtualenv_path = a:path + call pymode#virtualenv#init() endfunction "}}} diff --git a/debug.vim b/debug.vim new file mode 100644 index 00000000..c7d32661 --- /dev/null +++ b/debug.vim @@ -0,0 +1,13 @@ +" Use this settings for testing the plugin. +" Run vim with command +" +" $ vim -u debug.py +" +" Only python-mode will be loaded. + + +execute('set rtp+='. expand(':p:h')) +set rtp -=$HOME/.vim +set rtp -=$HOME/.vim/after +set nocp +syntax enable diff --git a/doc/pymode.txt b/doc/pymode.txt index 4a920782..e5cc2806 100644 --- a/doc/pymode.txt +++ b/doc/pymode.txt @@ -1,362 +1,760 @@ -*pymode.txt* *python-mode.txt* Python-mode for vim! +*pymode.txt* *python-mode.txt* *pymode* *python-mode* + + ____ _ _ ____ _ _ _____ _ _ __ __ _____ ____ ____ ~ + ( _ \( \/ )(_ _)( )_( )( _ )( \( )___( \/ )( _ )( _ \( ___) ~ + )___/ \ / )( ) _ ( )(_)( ) ((___)) ( )(_)( )(_) ))__) ~ + (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~ + + + Version: 0.9.2 ============================================================================== -CONTENTS *Python-mode-contents* +DISCLAIMER: + python-mode is under heavy development. You are using our latest stable + release, which has some known bugs. We recommend you to use the latest + development version that can be found on the `develop` branch of our git + repository. - 1.Intro...................................|PythonMode| - 2.Options.................................|PythonModeOptions| - 2.1.Customisation details.............|PythonModeOptionsDetails| - 3.Default Keys............................|PythonModeKeys| - 4.Commands................................|PythonModeCommands| - 5.FAQ.....................................|PythonModeFAQ| - 6.Credits.................................|PythonModeCredits| - 7.License.................................|PythonModeLicense| + For clean and straightforward install instructions, please visit: + https://github.com/python-mode/python-mode#how-to-install + +============================================================================== +CONTENTS *pymode-contents* + + 1.Intro.......................................................|pymode-intro| + 2.Common functionality.......................................|pymode-common| + 2.1 Python version...............................|pymode-python-version| + 2.2 Python indentation...................................|pymode-indent| + 2.3 Python folding......................................|pymode-folding| + 2.4 Vim motion...........................................|pymode-motion| + 2.5 Show documentation............................|pymode-documentation| + 2.6 Support virtualenv...............................|pymode-virtualenv| + 2.7 Run code................................................|pymode-run| + 2.8 Breakpoints.....................................|pymode-breakpoints| + 3. Code checking...............................................|pymode-lint| + 3.1 Code checkers options..........................|pymode-lint-options| + 4. Rope support................................................|pymode-rope| + 4.1 Code completion..................................|pymode-completion| + 4.2 Find definition.................................|pymode-rope-findit| + 4.3 Refactoring................................|pymode-rope-refactoring| + 4.4 Undo/Redo changes.................................|pymode-rope-undo| + 5. Syntax....................................................|pymode-syntax| + 6.FAQ...........................................................|pymode-faq| + 7.Credits...................................................|pymode-credits| + 8.License...................................................|pymode-license| ============================================================================== 1. Intro ~ - *PythonMode* + *pymode-intro* + +Python-mode is a vim plugin that allows you to use the pylint, rope, and pydoc +libraries in vim to provide features like python code bug checking, +refactoring, and some other useful things. + +This plugin allows you to create python code in vim very easily. There is no +need to install the pylint or rope libraries on your system. + +Python-mode contains all you need to develop python applications in Vim. + +Features: *pymode-features* + +- Support Python version 2.6+ and 3.2+ +- Syntax highlighting +- Virtualenv support +- Run python code (``r``) +- Add/remove breakpoints (``b``) +- Improved Python indentation +- Python folding +- Python motions and operators (``]]``, ``3[[``, ``]]M``, ``vaC``, ``viM``, + ``daC``, ``ciM``, ...) +- Code checking (pylint_, pyflakes_, pylama_, ...) that can be run + simultaneously (``:PymodeLint``) +- Autofix PEP8 errors (``:PymodeLintAuto``) +- Search in python documentation (``K``) +- Code refactoring (rope_) +- Strong code completion (rope_) +- Go to definition (``g`` for `:RopeGotoDefinition`) +- And more, more ... -Python-mode is a vim plugin that allows you to use the pylint, rope, pydoc -library in vim to provide features like python code looking for bugs, -refactoring and some other usefull things. -This plugin allow you create python code in vim very easily. There is no need -to install the pylint or rope library on your system. +============================================================================== +2. Common functionality ~ + *pymode-common* +This script provides the following options that can customizes the behavior of +PythonMode. These options should be set in your |vimrc|. -============================================================================== -2. Options ~ - *PythonModeOptions* + Below shows the default values. - Note: - Pylint options (ex. disable messages) may be defined in '$HOME/pylint.rc' - See pylint documentation. -The script provides the following options that can customise the behaviour the -PythonMode. These options should be set in your vimrc. +Turn on the whole plugin *'g:pymode'* +> + let g:pymode = 1 -|'pymode_doc'| Turns off the documentation script +Turn off plugin's warnings *'g:pymode_warnings'* +> + let g:pymode_warnings = 1 -|'pymode_doc_key'| Key for show documentation +Add paths to `sys.path` *'g:pymode_paths'* +Value is list of path's strings. +> + let g:pymode_paths = [] -|'pydoc'| Command for run pydoc +Trim unused white spaces on save *'g:pymode_trim_whitespaces'* +> + let g:pymode_trim_whitespaces = 1 -|'pymode_run'| Turns off the run code script +Setup default python options *'g:pymode_options'* +> + let g:pymode_options = 1 -|'pymode_run_key'| Key for run python code +If this option is set to 1, pymode will enable the following options for +python buffers: > -|'pymode_lint'| Turns off pylint script + setlocal complete+=t + setlocal formatoptions-=t + if v:version > 702 && !&relativenumber + setlocal number + endif + setlocal nowrap + setlocal textwidth=79 + setlocal commentstring=#%s + setlocal define=^\s*\\(def\\\\|class\\) -|'pymode_lint_checker'| Switch code checkers (pylint, pyflakes) +Setup max line length *'g:pymode_options_max_line_length'* +> + let g:pymode_options_max_line_length = 79 -|'pymode_lint_config'| Filepath to pylinc configuration +Enable colorcolumn display at max_line_length *'g:pymode_options_colorcolumn'* +> + let g:pymode_options_colorcolumn = 1 -|'pymode_lint_write'| Check code every save +Setup pymode |quickfix| window -|'pymode_lint_cwindow'| Show cwindow + *'g:pymode_quickfix_maxheight'* *'g:pymode_quickfix_minheight'* +> + let g:pymode_quickfix_minheight = 3 + let g:pymode_quickfix_maxheight = 6 -|'pymode_lint_signs'| Place signs +------------------------------------------------------------------------------ +2.1. Python version ~ + *pymode-python-version* -|'pymode_lint_jump'| Auto jump on first error +By default pymode looks for current python version supported in your Vim. +You could choose prefer version, but value will be tested on loading. -|'pymode_lint_minheight'| Minimal height of pylint error window + *'g:pymode_python'* +> + let g:pymode_python = 'python' -|'pymode_lint_maxheight'| Maximal height of pylint error window +Values are `python`, `python3`, `disable`. If value set to `disable` most +python-features of **pymode** will be disabled. -|'pymode_rope'| Turns off rope script +Set value to `python3` if you are working with python3 projects. You could use +|exrc| -|'pymode_breakpoint'| Turns off breakpoint script +------------------------------------------------------------------------------ +2.2 Python indentation ~ + *pymode-indent* -|'pymode_breakpoint_key'| Key for breakpoint +Pymode supports PEP8-compatible python indent. +Enable pymode indentation *'g:pymode_indent'* +> + let g:pymode_indent = 1 -|'pymode_utils'| Turns off utils +------------------------------------------------------------------------------ +2.3 Python folding ~ + *pymode-folding* -|'pymode_virtualenv'| Turns off virtualenv +Fast and usual python folding in Vim. +Enable pymode folding *'g:pymode_folding'* +> + let g:pymode_folding = 1 -|'pymode_utils_whitespaces'| Remove unused whitespaces +------------------------------------------------------------------------------ +2.4 Vim motion ~ + *pymode-motion* -|'pymode_syntax'| Turns off the custom syntax highlighting +Support Vim motion (See |operator|) for python objects (such as functions, +class and methods). -|'pymode_options_indent'| Set default pymode options for - python indentation +`C` — means class +`M` — means method or function + *pymode-motion-keys* -|'pymode_options_fold'| Set default pymode options for - python folding +================ ============================ +Key Command +================ ============================ +[[ Jump to previous class or function (normal, visual, operator modes) +]] Jump to next class or function (normal, visual, operator modes) +[M Jump to previous class or method (normal, visual, operator modes) +]M Jump to next class or method (normal, visual, operator modes) +aC Select a class. Ex: vaC, daC, yaC, caC (normal, operator modes) +iC Select inner class. Ex: viC, diC, yiC, ciC (normal, operator modes) +aM Select a function or method. Ex: vaM, daM, yaM, caM (normal, operator modes) +iM Select inner function or method. Ex: viM, diM, yiM, ciM (normal, operator modes) +================ ============================ -|'pymode_options_other'| Set default pymode options for - python codding +Enable pymode-motion *'g:pymode_motion'* +> + let g:pymode_motion = 1 +------------------------------------------------------------------------------ +2.5 Show documentation ~ + *pymode-documentation* - Note: - Also see |ropevim.txt| +Pymode could show documentation for current word by `pydoc`. +Commands: +*:PymodeDoc* — show documentation ------------------------------------------------------------------------------- -2.1. Customisation details ~ - *PythonModeOptionsDetails* +Turns on the documentation script *'g:pymode_doc'* +> + let g:pymode_doc = 1 -To enable any of the below options you should put the given line in your -'$HOME/.vimrc'. See |vimrc-intro|. +Bind keys to show documentation for current word (selection) + *'g:pymode_doc_bind'* +> + let g:pymode_doc_bind = 'K' ------------------------------------------------------------------------------ - *'pymode_doc'* -Values: 0 or 1. -Default: 1. +2.6 Support virtualenv ~ + *pymode-virtualenv* -If this option is set to 0 then docs script is disabled. +Commands: +*:PymodeVirtualenv* -- Activate virtualenv (path can be absolute or +relative to current working directory) ------------------------------------------------------------------------------- - *'pymode_doc_key'* -Default: 'K'. +Enable automatic virtualenv detection *'g:pymode_virtualenv'* +> + let g:pymode_virtualenv = 1 -Set key for show python documentation. +Set path to virtualenv manually *'g:pymode_virtualenv_path'* +> + let g:pymode_virtualenv_path = $VIRTUAL_ENV ------------------------------------------------------------------------------ - *'pydoc'* -Default: 'pydoc'. +2.7 Run code ~ + *pymode-run* -Set command for documentation search. +Commands: +*:PymodeRun* -- Run current buffer or selection ------------------------------------------------------------------------------- - *'pymode_run'* -Values: 0 or 1. -Default: 1. +Turn on the run code script *'g:pymode_run'* +> + let g:pymode_run = 1 -If this option is set to 0 then run script is disabled. +Binds keys to run python code *'g:pymode_run_bind'* +> + let g:pymode_run_bind = 'r' ------------------------------------------------------------------------------ - *'pymode_run_key'* -Default: 'r'. +2.8 Breakpoints ~ + *pymode-breakpoints* -Set key for run python code. +Pymode automatically detects available debugger (like pdb, ipdb, pudb) and user +can set/unset breakpoint with one key and without code checking and etc. ------------------------------------------------------------------------------- - *'pymode_lint'* -Values: 0 or 1. -Default: 1. +Enable functionality *'g:pymode_breakpoint'* +> + let g:pymode_breakpoint = 1 -If this option is set to 0 then pylint script is disabled. +Bind keys +> + let g:pymode_breakpoint_bind = 'b' ------------------------------------------------------------------------------- - *'pymode_lint_checker'* -Values: "pylint" or "pyflakes" -Default: "pylint". +Manually set breakpoint command (leave empty for automatic detection) +> + let g:pymode_breakpoint_cmd = '' -This option set code checker. ------------------------------------------------------------------------------- - *'pymode_lint_config'* -Values: 'Path to pylint configuration file' -Default: "$HOME/.pylintrc" +============================================================================== +3. Code checking ~ + *pymode-lint* -If this option is set path to pylint configuration. If configuration not found -uses file 'pylintrc' from python-mode sources. +Pymode supports `pylint`, `pep257`, `pep8`, `pyflakes`, `mccabe` code +checkers. You could run several similar checkers. ------------------------------------------------------------------------------- - *'pymode_lint_write'* -Values: 0 or 1. -Default: 1. + Pymode uses Pylama library for code checking. Many options like skip + files, errors and etc could be defined in `pylama.ini` file or modelines. + Check Pylama documentation for details. -If this option is set to 0 then pylint auto check every save is disabled. + Pylint options (ex. disable messages) may be defined in `$HOME/pylint.rc` + See pylint documentation. ------------------------------------------------------------------------------- - *'pymode_lint_cwindow'* -Values: 0 or 1. -Default: 1. +Commands: +*:PymodeLint* -- Check code in current buffer +*:PymodeLintToggle* -- Toggle code checking +*:PymodeLintAuto* -- Fix PEP8 errors in current buffer automatically -If this option is set to 0 then pylint not show cwindow. +Turn on code checking *'g:pymode_lint'* +> + let g:pymode_lint = 1 ------------------------------------------------------------------------------- - *'pymode_lint_signs'* -Values: 0 or 1. -Default: 1. +Check code on every save (if file has been modified) *'g:pymode_lint_on_write'* +> + let g:pymode_lint_on_write = 1 -If this option is set to 0 then pylint not place error signs. +Check code on every save (every) *'g:pymode_lint_unmodified'* +> + let g:pymode_lint_unmodified = 0 ------------------------------------------------------------------------------- - *'pymode_lint_jump'* -Values: 0 or 1. -Default: 0. +Check code when editing (on the fly) *'g:pymode_lint_on_fly'* +> + let g:pymode_lint_on_fly = 0 -If this option is set to 0 then pylint not jump on first error. +Show error message if cursor placed at the error line *'g:pymode_lint_message'* +> + let g:pymode_lint_message = 1 ------------------------------------------------------------------------------- - *'pymode_lint_minheight'* -Values: int -Default: 3. +Default code checkers (you could set several) *'g:pymode_lint_checkers'* +> + let g:pymode_lint_checkers = ['pyflakes', 'pep8', 'mccabe'] -Set minimal height for pylint cwindow +Values may be chosen from: `pylint`, `pep8`, `mccabe`, `pep257`, `pyflakes`. ------------------------------------------------------------------------------- - *'pymode_lint_maxheight'* -Values: int -Default: 6. +Skip errors and warnings *'g:pymode_lint_ignore'* +E.g. "E501,W002", "E2,W" (Skip all Warnings and Errors that starts with E2) and etc +> + let g:pymode_lint_ignore = "E501,W" -Set maximal height for pylint cwindow +Select some error or warnings. *'g:pymode_lint_select'* +By example you disable all warnings starting from 'W', but want to see warning +'W0011' and warning 'W430' +> + let g:pymode_lint_select = "E501,W0011,W430" ------------------------------------------------------------------------------- - *'pymode_rope'* -Values: 0 or 1. -Default: 1. +Sort errors by relevance *'g:pymode_lint_sort'* +If not empty, errors will be sort by defined relevance +E.g. let g:pymode_lint_sort = ['E', 'C', 'I'] " Errors first 'E', +after them 'C' and ... +> + let g:pymode_lint_sort = [] -If this option is set to 0 then rope script is disabled. +Auto open cwindow (quickfix) if any errors have been found + *'g:pymode_lint_cwindow'* +> + let g:pymode_lint_cwindow = 1 ------------------------------------------------------------------------------- - *'pymode_breakpoint'* -Values: 0 or 1. -Default: 1. +Place error |signs| *'g:pymode_signs'* +> + let g:pymode_lint_signs = 1 -If this option is set to 0 then breakpoint script is disabled. +Definitions for |signs| +> + let g:pymode_lint_todo_symbol = 'WW' + let g:pymode_lint_comment_symbol = 'CC' + let g:pymode_lint_visual_symbol = 'RR' + let g:pymode_lint_error_symbol = 'EE' + let g:pymode_lint_info_symbol = 'II' + let g:pymode_lint_pyflakes_symbol = 'FF' ------------------------------------------------------------------------------ - *'pymode_breakpoint_key'* -Default: 'b'. +3.1 Set code checkers options ~ + *pymode-lint-options* - Key for set/unset breakpoint +Pymode has the ability to set code checkers options from pymode variables: ------------------------------------------------------------------------------- - *'pymode_utils'* -Values: 0 or 1. -Default: 1. +Set PEP8 options *'g:pymode_lint_options_pep8'* +> + let g:pymode_lint_options_pep8 = + \ {'max_line_length': g:pymode_options_max_line_length}) -If this option is set to 0 then utils script is disabled. +See https://pep8.readthedocs.org/en/1.4.6/intro.html#configuration for more +info. ------------------------------------------------------------------------------- - *'pymode_virtualenv'* -Values: 0 or 1. -Default: 1. +Set Pyflakes options *'g:pymode_lint_options_pyflakes'* +> + let g:pymode_lint_options_pyflakes = { 'builtins': '_' } + +Set mccabe options *'g:pymode_lint_options_mccabe'* +> + let g:pymode_lint_options_mccabe = { 'complexity': 12 } + +Set pep257 options *'g:pymode_lint_options_pep257'* +> + let g:pymode_lint_options_pep257 = {} + +Set pylint options *'g:pymode_lint_options_pylint'* +> + let g:pymode_lint_options_pylint = + \ {'max-line-length': g:pymode_options_max_line_length}) + +See http://docs.pylint.org/features.html#options for more info. -If this option is set to 0 then virtualenv support is disabled. ------------------------------------------------------------------------------- - *'pymode_utils_whitespaces'* -Values: 0 or 1. -Default: 1. -Autoremove unused whitespaces +============================================================================== +3. Rope support ~ + *pymode-rope* + +Pymode supports Rope refactoring operations, code completion and code assists. + +Commands: +|:PymodeRopeAutoImport| -- Resolve import for element under cursor +|:PymodeRopeModuleToPackage| -- Convert current module to package +|:PymodeRopeNewProject| -- Open new Rope project in current working directory +|:PymodeRopeRedo| -- Redo changes from last refactoring +|:PymodeRopeRegenerate| -- Regenerate the project cache +|:PymodeRopeRenameModule| -- Rename current module +|:PymodeRopeUndo| -- Undo changes from last refactoring + + +Turn on the rope script *'g:pymode_rope'* +> + let g:pymode_rope = 1 + +.ropeproject Folder ~ + *.ropeproject* + +*:PymodeRopeNewProject* [] -- Open new Rope project in the given path +*:PymodeRopeRegenerate* -- Regenerate the project cache + +Rope uses a folder inside projects for holding project configuration and data. +Its default name is `.ropeproject`. It is recommended that you do not add the +.ropeproject folder to version control system. + +Currently it is used for things such as: + +* The config.py file in this folder contains project configuration. Have + a look at the default config.py file (which is created when it + does not exist) for more information. +* It can be used for saving project history, so that the next time you open the + project you can undo past changes. +* It can be used to save information about object inferences. +* It can be used to save a global name cache, which is used for auto-import. + +By default, if `.ropeproject` is not found in the current directory, rope will +look recursively for it in parent folders. + +Warning: If rope finds `.ropeproject` in a parent dir, it will use it with +all its child directories, which may slow scanning down (because of many, +possibly unrelated, files) + +Enable searching for |.ropeproject| in parent directories + *'g:pymode_rope_lookup_project'* +> + let g:pymode_rope_lookup_project = 0 + +You can also manually set the rope project directory. If not specified rope will +use the current directory. + *'g:pymode_rope_project_root'* +> + let g:pymode_rope_project_root = "" + + +The location of the `.ropeproject` folder may also be overridden if you wish to +keep it outside of your project root. The rope library treats this folder as a +project resource, so the path will always be relative to your project root (a +leading '/' will be ignored). You may use `'..'` path segments to place the +folder outside of your project root. + *'g:pymode_rope_ropefolder'* +> + let g:pymode_rope_ropefolder='.ropeproject' + + + +Show documentation for element under cursor ~ + +Show documentation for object under cursor. *'g:pymode_rope_show_doc_bind'* +Leave empty to disable the key binding. +> + let g:pymode_rope_show_doc_bind = 'd' + +Regenerate project cache on every save (if file has been modified) +> + let g:pymode_rope_regenerate_on_write = 1 ------------------------------------------------------------------------------ - *'pymode_syntax'* -Values: 0 or 1. -Default: 1. +4.1 Completion ~ + *pymode-completion* + +By default you can use for autocompletion. The first entry will +be automatically selected and you can press to insert the entry in +your code. and / works too. + +Autocompletion is also called by typing a period in |Insert| mode by default. + + +Turn on code completion support in the plugin *'g:pymode_rope_completion'* +> + let g:pymode_rope_completion = 1 + +Turn on autocompletion when typing a period + *'g:pymode_rope_complete_on_dot'* +> + let g:pymode_rope_complete_on_dot = 1 + +Keymap for autocomplete *'g:pymode_rope_completion_bind'* +> + let g:pymode_rope_completion_bind = '' + +Extended autocompletion (rope could complete objects which have not been +imported) from project *'g:pymode_rope_autoimport'* +> + let g:pymode_rope_autoimport = 0 + +Load modules to autoimport by default *'g:pymode_rope_autoimport_modules'* +> + let g:pymode_rope_autoimport_modules = ['os', 'shutil', 'datetime'] + +Offer to unresolved import object after completion. +> + let g:pymode_rope_autoimport_import_after_complete = 0 -If this option is set to 0 then the custom syntax highlighting will -not be used. ------------------------------------------------------------------------------ - *'pymode_options_indent'* -Values: 0 or 1. -Default: 1. - -If this option is set to 1, pymode enable next options for python buffers: > - - setlocal cinwords=if,elif,else,for,while,try,except,finally,def,class - setlocal cindent - setlocal tabstop=4 - setlocal softtabstop=4 - setlocal shiftwidth=4 - setlocal shiftround - setlocal smartindent - setlocal smarttab - setlocal expandtab - setlocal autoindent -< +4.2 Find definition ~ + *pymode-rope-findit* + +By default when you press *g* on any object in your code you will be moved +to definition. +Leave empty for disable key binding. *'g:pymode_rope_goto_definition_bind'* +> + let g:pymode_rope_goto_definition_bind = 'g' + +Command for open window when definition has been found +Values are (`e`, `new`, `vnew`) *'g:pymode_rope_goto_definition_cmd'* +> + let g:pymode_rope_goto_definition_cmd = 'new' + ------------------------------------------------------------------------------ - *'pymode_options_fold'* -Values: 0 or 1. -Default: 1. +4.3 Refactoring ~ + *pymode-rope-refactoring* + +Rename method/function/class/variable in the project ~ + +Pymode can rename everything: classes, functions, modules, packages, methods, +variables and keyword arguments. + +Keymap for rename method/function/class/variables under cursor + *'g:pymode_rope_rename_bind'* +> + let g:pymode_rope_rename_bind = 'rr' + + +Rename a current module/package ~ + +*:PymodeRopeRenameModule* -- Rename current module + +Keymap for rename current module *'g:pymode_rope_rename_module_bind'* +> + let g:pymode_rope_rename_module_bind = 'r1r' + + +Imports ~ + +*:PymodeRopeAutoImport* -- Resolve import for element under cursor + +Organize imports sorts imports, too. It does that according to PEP8. Unused +imports will be dropped. +Keymap *'g:pymode_rope_organize_imports_bind'* +> + let g:pymode_rope_organize_imports_bind = 'ro' + +Insert import for current word under cursor *'g:pymode_rope_autoimport_bind'* +Should be enabled |'g:pymode_rope_autoimport'| +> + let g:pymode_rope_autoimport_bind = 'ra' + + +Convert module to package ~ + *'g:pymode_rope_module_to_package_bind'* + +*:PymodeRopeModuleToPackage* -- convert current module to package + +Keybinding: +> + let g:pymode_rope_module_to_package_bind = 'r1p' + + +Extract method/variable ~ + *pymode-rope-extract* + +Extract method/variable from selected lines. + + *'g:pymode_rope_extract_method_bind'* + *'g:pymode_rope_extract_variable_bind'* +> + let g:pymode_rope_extract_method_bind = 'rm' + let g:pymode_rope_extract_variable_bind = 'rl' + + +Use function ~ + *pymode-rope-use* + +It tries to find the places in which a function can be used and changes the +code to call it instead. +> + let g:pymode_rope_use_function_bind = 'ru' + + +Move method/fields ~ + *pymode-rope-move* + +It happens when you perform move refactoring on a method of a class. In this +refactoring, a method of a class is moved to the class of one of its +attributes. The old method will call the new method. If you want to change all +of the occurrences of the old method to use the new method you can inline it +afterwards. +> + let g:pymode_rope_move_bind = 'rv' + +Change function signature ~ +> + let g:pymode_rope_change_signature_bind = 'rs' -If this option is set to 1, pymode enable next options for python buffers: > - setlocal foldlevelstart=99 - setlocal foldlevel=99 - setlocal foldmethod=indent -< ------------------------------------------------------------------------------ - *'pymode_options_other'* -Values: 0 or 1. -Default: 1. +4.4 Undo/Redo changes ~ + *pymode-rope-undo* + *pymode-rope-redo* -If this option is set to 1, pymode enable next options for python buffers: > +Commands: - setlocal complete+=t - setlocal formatoptions-=t - setlocal number - setlocal nowrap - setlocal textwidth=80 -< +*:PymodeRopeUndo* -- Undo last changes in the project +*:PymodeRopeRedo* -- Redo last changes in the project ============================================================================== -3. Default Keys ~ - *PythonModeKeys* +5. Syntax ~ + *pymode-syntax* -For redifine keys see: |PythonModeOptions| +Turn on pymode syntax *'g:pymode_syntax'* +> + let g:pymode_syntax = 1 -================ ============================ -Key Command -================ ============================ -K Show python docs for current word under cursor -C-Space Rope code assist -r Run current buffer -b Set breakpoints -================ ============================ +Slower syntax synchronization that is better at handling code blocks in +docstrings. Consider disabling this on slower hardware. + *'g:pymode_syntax_slow_sync'* +> + let g:pymode_syntax_slow_sync = 1 - Note: - Also see: |RopeShortcuts| +Enable all python highlights *'g:pymode_syntax_all'* +> + let g:pymode_syntax_all = 1 +Highlight "print" as a function *'g:pymode_syntax_print_as_function'* +> + let g:pymode_syntax_print_as_function = 0 -============================================================================== -4. Commands ~ - *PythonModeCommands* +Highlight "async/await" keywords *'g:pymode_syntax_highlight_async_await'* +> + let g:pymode_syntax_highlight_async_await = g:pymode_syntax_all -*:Pydoc* *Pydoc* - Show python documentation +Highlight '=' operator *'g:pymode_syntax_highlight_equal_operator'* +> + let g:pymode_syntax_highlight_equal_operator = g:pymode_syntax_all -*:PyLintToggle* *PyLintToggle* - Enable, disable pylint +Highlight '*' operator *'g:pymode_syntax_highlight_stars_operator'* +> + let g:pymode_syntax_highlight_stars_operator = g:pymode_syntax_all -*:PyLint* *PyLint* - Check current buffer - -*:Pyrun* *Pyrun* - Run current buffer +Highlight 'self' keyword *'g:pymode_syntax_highlight_self'* +> + let g:pymode_syntax_highlight_self = g:pymode_syntax_all + +Highlight indent's errors *'g:pymode_syntax_indent_errors'* +> + let g:pymode_syntax_indent_errors = g:pymode_syntax_all + +Highlight space's errors *'g:pymode_syntax_space_errors'* +> + let g:pymode_syntax_space_errors = g:pymode_syntax_all + +Highlight string formatting *'g:pymode_syntax_string_formatting'* + *'g:pymode_syntax_string_format'* + *'g:pymode_syntax_string_templates'* + *'g:pymode_syntax_doctests'* +> + let g:pymode_syntax_string_formatting = g:pymode_syntax_all + let g:pymode_syntax_string_format = g:pymode_syntax_all + let g:pymode_syntax_string_templates = g:pymode_syntax_all + let g:pymode_syntax_doctests = g:pymode_syntax_all + +Highlight builtin objects (True, False, ...) *'g:pymode_syntax_builtin_objs'* +> + let g:pymode_syntax_builtin_objs = g:pymode_syntax_all + +Highlight builtin types (str, list, ...) *'g:pymode_syntax_builtin_types'* +> + let g:pymode_syntax_builtin_types = g:pymode_syntax_all + +Highlight exceptions (TypeError, ValueError, ...) + *'g:pymode_syntax_highlight_exceptions'* +> + let g:pymode_syntax_highlight_exceptions = g:pymode_syntax_all + +Highlight docstrings as pythonDocstring (otherwise as pythonString) + *'g:pymode_syntax_docstrings'* +> + let g:pymode_syntax_docstrings = g:pymode_syntax_all ============================================================================== -5. FAQ ~ - *PythonModeFAQ* +6. FAQ ~ + *pymode-faq* + +Python-mode doesn't work +------------------------ + +Open any python file and run ":call pymode#troubleshooting#test()", +fix the warning or send me the output. -Rope completion is very slow + +Rope completion is very slow *pymode-rope-slow* ---------------------------- -To work rope_ creates a service directory: `.ropeproject`. -If |'pymode_rope_guess_project'| set on (by default) and `.ropeproject` in current dir not found, rope scan `.ropeproject` on every dir in parent path. -If rope finded `.ropeproject` in parent dirs, rope set project for all child dir and scan may be slow for many dirs and files. +Rope creates a project-level service directory in |.ropeproject| + +If ``.ropeproject`` is not found in the current directory, rope will walk +upwards looking for a ``.ropeproject`` in every dir of the parent path. If +rope finds ``.ropeproject`` in a parent dir, it sets the project for all child +dirs and the scan may be slow for so many dirs and files. Solutions: -- Disable |'pymode_rope_guess_project'| to make rope always create `.ropeproject` in current dir. -- Delete `.ropeproject` from dip parent dir to make rope create `.ropeproject` in current dir. -- Press `po` or `:RopeOpenProject` to make force rope create `.ropeproject` in current dir. +- Delete `.ropeproject` from the parent dir to make rope create `.ropeproject` + in the current dir. +- Run ``:PymodeRopeNewProject`` to make rope create ``.ropeproject`` in the + current dir. +- Set |'g:pymode_rope_lookup_project'| to 0 for prevent searching in parent + dirs. + +You may also set |'g:pymode_rope_project_root'| to manually specify the project +root path. Pylint check is very slow ------------------------- -In some projects pylint_ may check slowly, because it also scan imported modules if posible. -Try use pyflakes, see |'pymode_lint_checker'|. +In some projects pylint may check slowly, because it also scans imported +modules if possible. Try using another code checker: see +|'g:pymode_lint_checkers'|. -You may set |exrc| and |secure| in your |vimrc| for auto set custom settings from `.vimrc` from your projects directories. +You may set |exrc| and |secure| in your |vimrc| to auto-set custom settings +from `.vimrc` from your projects directories. + + +OSX cannot import urandom +------------------------- + +See: https://groups.google.com/forum/?fromgroups=#!topic/vim_dev/2NXKF6kDONo + +The sequence of commands that fixed this: > - Example: On Flask projects I automaticly set 'g:pymode_lint_checker = "pyflakes"', on django 'g:pymode_lint_cheker = "pylint"' + brew unlink python + brew unlink macvim + brew remove macvim + brew install -v --force macvim + brew link macvim + brew link python < - ============================================================================== -6. Credits ~ - *PythonModeCredits* +7. Credits ~ + *pymode-credits* Kirill Klenov http://klen.github.com/ http://github.com/klen/ @@ -373,14 +771,36 @@ You may set |exrc| and |secure| in your |vimrc| for auto set custom settings fro Copyright (c) 2005 Divmod, Inc. http://www.divmod.com/ + PEP8: + Copyright (c) 2006 Johann C. Rocholl + http://github.com/jcrocholl/pep8 + + autopep8: + Copyright (c) 2012 hhatto + https://github.com/hhatto/autopep8 + + Python syntax for vim: + Copyright (c) 2010 Dmitry Vasiliev + http://www.hlabs.spb.ru/vim/python.vim + + PEP8 VIM indentation + Copyright (c) 2012 Hynek Schlawack + http://github.com/hynek/vim-python-pep8-indent + ============================================================================== -7. License ~ - *PythonModeLicense* +8. License ~ + *pymode-license* -The Python-mode is released under the GNU lesser general public license. +Python-mode is released under the GNU lesser general public license. See: http://www.gnu.org/copyleft/lesser.html +If you like this plugin, I would very appreciated if you kindly send me a postcard :) + +My address is: "Russia, 143500, MO, Istra, pos. Severny 8-3" to "Kirill Klenov". +Thanks for your support! + + ------------------------------------------------------------------------------ vim:tw=78:ts=8:ft=help:norl: diff --git a/doc/ropevim.txt b/doc/ropevim.txt deleted file mode 100644 index be42682b..00000000 --- a/doc/ropevim.txt +++ /dev/null @@ -1,337 +0,0 @@ -*ropevim.txt* *Ropevim* Rope in VIM - -============================================================================== -CONTENTS *Rope contents* - - 1.Refactoring Dialog......................|RopeRefactoringDialog| - 2.Finding Files...........................|RopeFindingFiles| - 3.Code Assist.............................|RopeCodeAssist| - 4.Enabling Autoimport.....................|RopeEnablingAutoimport| - 5.Filtering Resources.....................|RopeFilteringResources| - 6.Finding Occurrences.....................|RopeFindOccurrences| - 7.Dialog Batchset Command.................|RopeDialogBatchsetCommand| - 8.Variables...............................|RopeVariables| - 9.Keybindings.............................|RopeKeys| - - -============================================================================== -1. Refactoring Dialog ~ - *RopeRefactoringDialog* - -Ropevim refactorings use a special kind of dialog. Depending on the -refactoring, you'll be asked about the essential information a -refactoring needs to know (like the new name in rename refactoring). - -Next you'll see the base prompt of a refactoring dialog that shows -something like "Choose what to do". By entering the name of a -refactoring option you can set its value. After setting each option -you'll be returned back to the base prompt. Finally, you can ask rope -to perform, preview or cancel the refactoring. - -See |RopeKeys| section and try the refactorings yourself. - - -============================================================================== -2. Finding Files ~ - *RopeFindingFiles* - *:RopeFindFile* - *:RopeFindFileOtherWindow* - -By using |:RopeFindFile| (" p f" by default), you can search for -files in your project. When you complete the minibuffer you'll see -all files in the project; files are shown as their reversed paths. -For instance ``projectroot/docs/todo.txt`` is shown like -``todo.txt p 4 f") opens the -file in the other window. - - -============================================================================== -3. Code Assist ~ - *RopeCodeAssist* - *:RopeCodeAssist* - *:RopeLuckyAssist* - *'pymode_rope_vim_completion'* - *'pymode_rope_extended_complete'* - -|:RopeCodeAssist| command () will let you select from a list -of completions. |:RopeLuckyAssist| command () does not ask -anything; instead, it inserts the first proposal. - -You can tell ropevim to use vim's complete function in insert mode; -Add: > - - let pymode_rope_vim_completion=1 -< -to your '~/.vimrc' file. - - Note: - That when this variable is set, autoimport completions no longer - work since they need to insert an import to the top of the module, - too. - -By default autocomplete feature will use plain list of proposed completion -items. You can enable showing extended information about completion -proposals by setting : > - - let pymode_rope_extended_complete=1 -< -Completion menu list will show the proposed name itself, one letter which -shows where this proposal came from (it can be "L" for locals, "G" for -globals, "B" for builtins, or empty string if such scope definition is not -applicable), a short object type description (such as "func", "param", -"meth" and so forth) and a first line of proposed object's docstring (if it -has one). For function's keyword parameters the last field shows "*" symbol -if this param is required or "= " if it is not. - - -============================================================================== -4. Enabling Autoimport ~ - *RopeEnablingAutoimport* - *:RopevimAutoImport* - *:RopeGenerateAutoimportCache* - -Rope can propose and automatically import global names in other -modules. Rope maintains a cache of global names for each project. It -updates the cache only when modules are changed; if you want to cache -all your modules at once, use |:RopeGenerateAutoimportCache|. It -will cache all of the modules inside the project plus those whose -names are listed in |'pymode_rope_autoimport_modules'| list: > - - " add the name of modules you want to autoimport - let g:pymode_rope_autoimport_modules = ["os", "shutil"] -< -Now if you are in a buffer that contains: > - - rmtree -< - -and you execute |:RopevimAutoImport| you'll end up with: > - - from shutil import rmtree - rmtree -< -Also |:RopeCodeAssist| and |:RopeLuckyAssist| propose auto-imported -names by using "name : module" style. Selecting them will import -the module automatically. - - -============================================================================== -5. Filtering Resources ~ - *RopeFilteringResources* - -Some refactorings, restructuring and find occurrences take an option -called resources. This option can be used to limit the resources on -which a refactoring should be applied. - -It uses a simple format: each line starts with either '+' or '-'. -Each '+' means include the file (or its children if it's a folder) -that comes after it. '-' has the same meaning for exclusion. So -using: > - - +rope - +ropetest - -rope/contrib -< -means include all python files inside ``rope`` and ``ropetest`` -folders and their subfolder, but those that are in ``rope/contrib``. -Or: > - - -ropetest - -setup.py -< -means include all python files inside the project but ``setup.py`` and -those under ``ropetest`` folder. - - -============================================================================== -6. Finding Occurrences ~ - *RopeFindOccurrences* - -The find occurrences command (" f" by default) can be used to -find the occurrences of a python name. If ``unsure`` option is -``yes``, it will also show unsure occurrences; unsure occurrences are -indicated with a ``?`` mark in the end. - - Note: - That ropevim uses the quickfix feature of vim for - marking occurrence locations. - - -============================================================================== -7. Dialog Batchset Command ~ - *RopeDialogBatchsetCommand* - -When you use ropevim dialogs there is a command called ``batchset``. -It can set many options at the same time. After selecting this -command from dialog base prompt, you are asked to enter a string. - -``batchset`` strijgs can set the value of configs in two ways. The -single line form is like this: > - - name1 value1 - name2 value2 -< - -That is the name of config is followed its value. For multi-line -values you can use: > - - name1 - line1 - line2 - - name2 - line3 -< -Each line of the definition should start with a space or a tab. - Note: - That blank lines before the name of config definitions are ignored. - -``batchset`` command is useful when performing refactorings with long -configs, like restructurings: > - - pattern ${pycore}.create_module(${project}.root, ${name}) - - goal generate.create_module(${project}, ${name}) - - imports - from rope.contrib import generate - - args - pycore: type=rope.base.pycore.PyCore - project: type=rope.base.project.Project -< -.. ignore the two-space indents - -This is a valid ``batchset`` string for restructurings. - -Just for the sake of completeness, the reverse of the above -restructuring can be: > - - pattern ${create_module}(${project}, ${name}) - - goal ${project}.pycore.create_module(${project}.root, ${name}) - - args - create_module: name=rope.contrib.generate.create_module - project: type=rope.base.project.Project -< - -============================================================================== -8. Variables ~ - *RopeVariables* - -*'pymode_rope_codeassist_maxfixes'* The maximum number of syntax errors - to fix for code assists. - The default value is `1`. - -*'pymode_rope_local_prefix'* The prefix for ropevim refactorings. - Defaults to ` r`. - -*'pymode_rope_global_prefix'* The prefix for ropevim project commands - Defaults to ` p`. - -*'pymode_rope_enable_shortcuts'* Shows whether to bind ropevim shortcuts keys. - Defaults to `1`. - -*'pymode_rope_guess_project'* If non-zero, ropevim tries to guess and - open the project that contains the file on which - a ropevim command is performed when no project - is already open. - -*'pymode_rope_enable_autoimport'* Shows whether to enable autoimport. - -*'pymode_rope_autoimport_modules'* The name of modules whose global names should - be cached. |:RopeGenerateAutoimportCache| reads - this list and fills its cache. - -*'pymode_rope_autoimport_underlineds'* If set, autoimport will cache names starting - with underlines, too. - -*'pymode_rope_goto_def_newwin'* If set, ropevim will open a new buffer - for "go to definition" result if the definition - found is located in another file. By default the - file is open in the same buffer. - -*'pymode_rope_always_show_complete_menu'* If set, rope autocompletion menu -always show. - - -============================================================================== -9. Keybinding ~ - *RopeKeys* - -Uses almost the same keybinding as ropemacs. - Note: - That global commands have a ` p` prefix and local commands - have a `` r`` prefix. - You can change that (see |RopeVariables| section). - - -================ ============================ -Key Command -================ ============================ -C-x p o |:RopeOpenProject| -C-x p k |:RopeCloseProject| -C-x p f |:RopeFindFile| -C-x p 4 f |:RopeFindFileOtherWindow| -C-x p u |:RopeUndo| -C-x p r |:RopeRedo| -C-x p c |:RopeProjectConfig| -C-x p n [mpfd] |:RopeCreate|(Module|Package|File|Directory) - |:RopeWriteProject| - -C-c r r |:RopeRename| -C-c r l |:RopeExtractVariable| -C-c r m |:RopeExtractMethod| -C-c r i |:RopeInline| -C-c r v |:RopeMove| -C-c r x |:RopeRestructure| -C-c r u |:RopeUseFunction| -C-c r f |:RopeIntroduceFactory| -C-c r s |:RopeChangeSignature| -C-c r 1 r |:RopeRenameCurrentModule| -C-c r 1 v |:RopeMoveCurrentModule| -C-c r 1 p |:RopeModuleToPackage| - -C-c r o |:RopeOrganizeImports| -C-c r n [vfcmp] |:RopeGenerate|(Variable|Function|Class|Module|Package) - -C-c r a / |:RopeCodeAssist| -C-c r a g |:RopeGotoDefinition| -C-c r a d |:RopeShowDoc| -C-c r a f |:RopeFindOccurrences| -C-c r a ? |:RopeLuckyAssist| -C-c r a j |:RopeJumpToGlobal| -C-c r a c |:RopeShowCalltip| - |:RopeAnalyzeModule| - - |:RopeAutoImport| - |:RopeGenerateAutoimportCache| -=============== ============================ - - -============================================================================== -10. Shortcuts ~ - *RopeShortcuts* - -Some commands are used very frequently; specially the commands in -code-assist group. You can define your own shortcuts like this: > - - :map g :call RopeGotoDefinition() - -< - -================ ============================ -Key Command -================ ============================ - |:RopeCodeAssist| - |:RopeLuckyAssist| - g |:RopeGotoDefinition| - d |:RopeShowDoc| - f |:RopeFindOccurrences| -================ ============================ - ------------------------------------------------------------------------------- - - vim:tw=78:fo=tcq2:isk=!-~,^*,^\|,^\":ts=8:ft=help:norl: diff --git a/ftplugin/pyrex.vim b/ftplugin/pyrex.vim new file mode 100644 index 00000000..93e0556d --- /dev/null +++ b/ftplugin/pyrex.vim @@ -0,0 +1 @@ +runtime ftplugin/python/pymode.vim diff --git a/ftplugin/python/pymode.vim b/ftplugin/python/pymode.vim index 75104e28..97daecca 100644 --- a/ftplugin/python/pymode.vim +++ b/ftplugin/python/pymode.vim @@ -1,121 +1,212 @@ -if pymode#Default('b:pymode', 1) +if !g:pymode || pymode#default('b:pymode', 1) finish endif -" Syntax highlight -if !pymode#Default('g:pymode_syntax', 1) || g:pymode_syntax - let python_highlight_all=1 - let python_highlight_exceptions=1 - let python_highlight_builtins=1 -endif +if g:pymode_python == 'disable' -" Python indent options -if !pymode#Default('g:pymode_options_indent', 1) || g:pymode_options_indent - setlocal cinwords=if,elif,else,for,while,try,except,finally,def,class - setlocal cindent - setlocal tabstop=4 - setlocal softtabstop=4 - setlocal shiftwidth=4 - setlocal shiftround - setlocal smartindent - setlocal smarttab - setlocal expandtab - setlocal autoindent -endif + if g:pymode_warning + call pymode#error("Pymode requires vim compiled with +python. Most of features will be disabled.") + endif -" Python fold options -if !pymode#Default('g:pymode_options_fold', 1) || g:pymode_options_fold - setlocal foldlevelstart=99 - setlocal foldlevel=99 - setlocal foldmethod=indent -endif - -" Python other options -if !pymode#Default('g:pymode_options_other', 1) || g:pymode_options_other - setlocal complete+=t - setlocal formatoptions-=t - setlocal number - setlocal nowrap - setlocal textwidth=80 -endif + finish -" Fix path for project -if g:pymode - py curpath = vim.eval('getcwd()') - py curpath in sys.path or sys.path.append(curpath) -endif +else -" Add virtualenv paths -if g:pymode_virtualenv && exists("$VIRTUAL_ENV") - call pymode#virtualenv#Activate() -endif -" Python documentation -if g:pymode_doc +let b:pymode_modified = &modified + +" Init paths +if !pymode#default('g:pymode_init', 1) - " DESC: Set commands - command! -buffer -nargs=1 Pydoc call pymode#doc#Show("") + call pymode#init(expand(':p:h:h:h'), g:pymode_paths) + call pymode#virtualenv#init() + call pymode#breakpoint#init() - " DESC: Set keys - exe "nnoremap " g:pymode_doc_key ":call pymode#doc#Show(expand(''))" + PymodePython from pymode.utils import patch_paths + PymodePython patch_paths() + + endif endif +command! -buffer -nargs=1 PymodeVirtualenv call pymode#virtualenv#activate() -" PyLint -if g:pymode_lint +" Setup events for pymode +au! pymode BufWritePre call pymode#buffer_pre_write() +au! pymode BufWritePost call pymode#buffer_post_write() - " DESC: Set commands - command! -buffer -nargs=0 PyLintToggle :call pymode#lint#Toggle() - command! -buffer -nargs=0 PyLintCheckerToggle :call pymode#lint#ToggleChecker() - command! -buffer -nargs=0 PyLint :call pymode#lint#Check() +" Run python code +if g:pymode_run - " DESC: Set autocommands - if g:pymode_lint_write - au BufWritePost PyLint - endif + command! -buffer -nargs=0 -range=% PymodeRun call pymode#run#code_run(, ) -endif + exe "nnoremap " g:pymode_run_bind ":PymodeRun" + exe "vnoremap " g:pymode_run_bind ":PymodeRun" -" Rope -if g:pymode_rope +endif - " DESC: Set keys - noremap g :RopeGotoDefinition - noremap d :RopeShowDoc - noremap f :RopeFindOccurrences - noremap m :emenu Rope. - inoremap =RopeLuckyAssistInsertMode() +" Add/remove breakpoints +if g:pymode_breakpoint - let s:prascm = g:pymode_rope_always_show_complete_menu ? "" : "" - exe "inoremap =RopeCodeAssistInsertMode()" . s:prascm - exe "inoremap =RopeCodeAssistInsertMode()" . s:prascm + exe "nnoremap " g:pymode_breakpoint_bind ":call pymode#breakpoint#operate(line('.'))" endif -" Run code -if g:pymode_run +" Python folding +if g:pymode_folding - " DESC: Set commands - command! -buffer -nargs=0 Pyrun call pymode#run#Run() + setlocal foldmethod=expr + setlocal foldexpr=pymode#folding#expr(v:lnum) + setlocal foldtext=pymode#folding#text() - " DESC: Set keys - exe "nnoremap " g:pymode_run_key ":Pyrun" +endif +" Remove unused whitespaces +if g:pymode_trim_whitespaces + au BufWritePre call pymode#trim_whitespaces() endif -" Set breakpoints -if g:pymode_breakpoint +" Custom options +if g:pymode_options + setlocal complete+=t + setlocal formatoptions-=t + if v:version > 702 && !&relativenumber + setlocal number + endif + setlocal nowrap + exe "setlocal textwidth=" . g:pymode_options_max_line_length + if g:pymode_options_colorcolumn && exists('+colorcolumn') + setlocal colorcolumn=+1 + endif + setlocal commentstring=#%s + setlocal define=^\s*\\(def\\\\|class\\) +endif - " DESC: Set keys - exe "nnoremap " g:pymode_breakpoint_key ":call pymode#breakpoint#Set(line('.'))" +if g:pymode_lint -endif + command! -buffer -nargs=0 PymodeLintAuto :call pymode#lint#auto() + command! -buffer -nargs=0 PymodeLintToggle :call pymode#lint#toggle() + command! -buffer -nargs=0 PymodeLint :call pymode#lint#check() + + if v:version > 703 || (v:version == 703 && has('patch544')) + au! QuitPre call pymode#quit() + else + au! pymode BufWinLeave * silent! lclose + endif + + let b:pymode_error_line = -1 + + if g:pymode_lint_on_fly + au! pymode InsertLeave PymodeLint + endif + + if g:pymode_lint_message + au! pymode CursorMoved + au! pymode CursorMoved call pymode#lint#show_errormessage() + endif -" OPTION: g:pymode_utils_whitespaces -- bool. Remove unused whitespaces on save -call pymode#Default("g:pymode_utils_whitespaces", 1) + " Disabled for current release + if g:pymode_lint_async + " let &l:updatetime = g:pymode_lint_async_updatetime + " au! BufEnter call pymode#lint#start() + " au! BufLeave call pymode#lint#stop() + end -" Utils whitespaces -if g:pymode_utils_whitespaces - au BufWritePre :call setline(1,map(getline(1,"$"),'substitute(v:val,"\\s\\+$","","")')) endif + +" Show python documentation +if g:pymode_doc + + " Set commands + command! -buffer -nargs=1 PymodeDoc call pymode#doc#show("") + + " Set keys + exe "nnoremap " g:pymode_doc_bind ":call pymode#doc#find()" + exe "vnoremap " g:pymode_doc_bind ":call pymode#doc#show(@*)" + +end + +" Rope support +if g:pymode_rope + + if g:pymode_rope_goto_definition_bind != "" + exe "noremap " . g:pymode_rope_goto_definition_bind . " :call pymode#rope#goto_definition()" + endif + if g:pymode_rope_show_doc_bind != "" + exe "noremap " . g:pymode_rope_show_doc_bind . " :call pymode#rope#show_doc()" + end + if g:pymode_rope_find_it_bind != "" + exe "noremap " . g:pymode_rope_find_it_bind . " :call pymode#rope#find_it()" + end + if g:pymode_rope_organize_imports_bind != "" + exe "noremap " . g:pymode_rope_organize_imports_bind . " :call pymode#rope#organize_imports()" + end + + if g:pymode_rope_rename_bind != "" + exe "noremap " . g:pymode_rope_rename_bind . " :call pymode#rope#rename()" + end + + if g:pymode_rope_rename_module_bind != "" + exe "noremap " . g:pymode_rope_rename_module_bind . " :call pymode#rope#rename_module()" + end + + if g:pymode_rope_extract_method_bind != "" + exe "vnoremap " . g:pymode_rope_extract_method_bind . " :call pymode#rope#extract_method()" + end + + if g:pymode_rope_extract_variable_bind != "" + exe "vnoremap " . g:pymode_rope_extract_variable_bind . " :call pymode#rope#extract_variable()" + end + + if g:pymode_rope_inline_bind != "" + exe "noremap " . g:pymode_rope_inline_bind . " :call pymode#rope#inline()" + end + + if g:pymode_rope_move_bind != "" + exe "noremap " . g:pymode_rope_move_bind . " :call pymode#rope#move()" + end + + if g:pymode_rope_change_signature_bind != "" + exe "noremap " . g:pymode_rope_change_signature_bind . " :call pymode#rope#signature()" + end + + if g:pymode_rope_use_function_bind != "" + exe "noremap " . g:pymode_rope_use_function_bind . " :call pymode#rope#use_function()" + end + + if g:pymode_rope_generate_function_bind != "" + exe "noremap " . g:pymode_rope_generate_function_bind . " :call pymode#rope#generate_function()" + end + + if g:pymode_rope_generate_package_bind != "" + exe "noremap " . g:pymode_rope_generate_package_bind . " :call pymode#rope#generate_package()" + end + + if g:pymode_rope_generate_class_bind != "" + exe "noremap " . g:pymode_rope_generate_class_bind . " :call pymode#rope#generate_class()" + end + + if g:pymode_rope_module_to_package_bind != "" + exe "noremap " . g:pymode_rope_module_to_package_bind . " :call pymode#rope#module_to_package()" + end + + if g:pymode_rope_autoimport_bind != "" + exe "noremap " . g:pymode_rope_autoimport_bind . " :PymodeRopeAutoImport" + end + + if g:pymode_rope_completion && g:pymode_rope_complete_on_dot + inoremap . .=pymode#rope#complete_on_dot() + end + + command! -buffer -nargs=? PymodeRopeNewProject call pymode#rope#new() + command! -buffer PymodeRopeUndo call pymode#rope#undo() + command! -buffer PymodeRopeRedo call pymode#rope#redo() + command! -buffer PymodeRopeRenameModule call pymode#rope#rename_module() + command! -buffer PymodeRopeModuleToPackage call pymode#rope#module_to_package() + command! -buffer PymodeRopeRegenerate call pymode#rope#regenerate() + + if g:pymode_rope_autoimport + command! -buffer PymodeRopeAutoImport call pymode#rope#autoimport(expand('')) + end + +end diff --git a/logo.png b/logo.png new file mode 100644 index 00000000..b873f5cb Binary files /dev/null and b/logo.png differ diff --git a/plugin/pymode.vim b/plugin/pymode.vim index e67de660..3833cef3 100644 --- a/plugin/pymode.vim +++ b/plugin/pymode.vim @@ -1,279 +1,316 @@ -let g:pymode_version = "0.4.4" +" vi: fdl=1 +let g:pymode_version = "0.9.2" -command! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version +com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version +com! PymodeTroubleshooting call pymode#troubleshooting#test() -" OPTION: g:pymode -- bool. Run pymode. -if pymode#Default('g:pymode', 1) || !g:pymode - " DESC: Disable script loading +" Enable pymode by default :) +call pymode#default('g:pymode', 1) +call pymode#default('g:pymode_debug', 0) + +" DESC: Disable script loading +if !g:pymode || &cp finish endif -" DESC: Check python support -if !has('python') - echoerr expand(":t") . " required vim compiled with +python." - echoerr "Pymode pylint and rope plugins will be disabled." - let g:pymode_lint = 0 - let g:pymode_rope = 0 - let g:pymode_path = 0 - let g:pymode_virtualenv = 0 -endif +" Pymode needs +filetype plugin on -" DESC: Fix python path -if !pymode#Default('g:pymode_path', 1) || g:pymode_path -python << EOF -import sys, vim -from os import path as op +" OPTIONS: {{{ -sys.path = [ - op.join(op.dirname(op.dirname(vim.eval("expand(':p')"))), - 'pylibs'), vim.eval("getcwd()") ] + sys.path -EOF -endif +" Vim Python interpreter. Set to 'disable' for remove python features. +call pymode#default('g:pymode_python', '') -if !pymode#Default("g:pymode_lint", 1) || g:pymode_lint +" Disable pymode warnings +call pymode#default('g:pymode_warning', 1) - " OPTION: g:pymode_lint_write -- bool. Check code every save. - call pymode#Default("g:pymode_lint_write", 1) +" Additional python paths +call pymode#default('g:pymode_paths', []) - " OPTION: g:pymode_lint_checker -- str. Use pylint of pyflakes for check. - call pymode#Default("g:pymode_lint_checker", "pylint") +" Python documentation support +call pymode#default('g:pymode_doc', 1) +call pymode#default('g:pymode_doc_bind', 'K') - " OPTION: g:pymode_lint_config -- str. Path to pylint config file - call pymode#Default("g:pymode_lint_config", $HOME . "/.pylintrc") +" Enable/Disable pymode PEP8 indentation +call pymode#default("g:pymode_indent", 1) - " OPTION: g:pymode_lint_cwindow -- bool. Auto open cwindow if errors find - call pymode#Default("g:pymode_lint_cwindow", 1) +" Enable/disable pymode folding for pyfiles. +call pymode#default("g:pymode_folding", 1) +" Maximum file length to check for nested class/def statements +call pymode#default("g:pymode_folding_nest_limit", 1000) +" Change for folding customization (by example enable fold for 'if', 'for') +call pymode#default("g:pymode_folding_regex", '^\s*\%(class\|def\|async\s\+def\) .\+\(:\s\+\w\)\@!') - " OPTION: g:pymode_lint_jump -- int. Jump on first error. - call pymode#Default("g:pymode_lint_jump", 0) +" Enable/disable python motion operators +call pymode#default("g:pymode_motion", 1) - " OPTION: g:pymode_lint_minheight -- int. Minimal height of pymode lint window - call pymode#Default("g:pymode_lint_minheight", 3) +" Auto remove unused whitespaces on save +call pymode#default("g:pymode_trim_whitespaces", 1) - " OPTION: g:pymode_lint_maxheight -- int. Maximal height of pymode lint window - call pymode#Default("g:pymode_lint_maxheight", 6) +" Set recomended python options +call pymode#default("g:pymode_options", 1) +call pymode#default("g:pymode_options_max_line_length", 80) +call pymode#default("g:pymode_options_colorcolumn", 1) - " OPTION: g:pymode_lint_signs -- bool. Place error signs - if !pymode#Default("g:pymode_lint_signs", 1) || g:pymode_lint_signs +" Enable/disable vertical display of python documentation +call pymode#default("g:pymode_doc_vertical", 0) - " DESC: Signs definition - sign define W text=WW texthl=Todo - sign define C text=CC texthl=Comment - sign define R text=RR texthl=Visual - sign define E text=EE texthl=Error +" Minimal height of pymode quickfix window +call pymode#default('g:pymode_quickfix_maxheight', 6) - endif +" Maximal height of pymode quickfix window +call pymode#default('g:pymode_quickfix_minheight', 3) - " DESC: Set default pylint configuration - if !filereadable(g:pymode_lint_config) - let g:pymode_lint_config = expand(":p:h:h") . "/pylintrc" - endif +" LOAD VIRTUALENV {{{ +" +" Enable virtualenv support +call pymode#default('g:pymode_virtualenv', 1) -python << EOF -import os -import StringIO -import _ast -import re - -from logilab.astng.builder import MANAGER -from pylint import lint, checkers -from pyflakes import checker - - -# Pylint setup -linter = lint.PyLinter() -pylint_re = re.compile('^[^:]+:(\d+): \[([EWRCI]+)[^\]]*\] (.*)$') - -checkers.initialize(linter) -linter.set_option("output-format", "parseable") -linter.set_option("reports", 0) -linter.load_file_configuration(vim.eval("g:pymode_lint_config")) - -# Pyflakes setup - -# Pylint check -def pylint(): - filename = vim.current.buffer.name - MANAGER.astng_cache.clear() - linter.reporter.out = StringIO.StringIO() - linter.check(filename) - qf = [] - for w in linter.reporter.out.getvalue().split('\n'): - test = pylint_re.match(w) - test and qf.append(dict( - filename = filename, - bufnr = vim.current.buffer.number, - lnum = test.group(1), - type = test.group(2), - text = test.group(3), - )) - vim.command('let b:qf_list = %s' % repr(qf)) - -# Pyflakes check -def pyflakes(): - filename = vim.current.buffer.name - codeString = file(filename, 'U').read() + '\n' - qf = [] - try: - tree = compile(codeString, filename, "exec", _ast.PyCF_ONLY_AST) - - except SyntaxError, value: - msg = value.args[0] - if codeString is None: - vim.command('echoerr "%s: problem decoding source"' % filename) - else: - lineno, _, text = value.lineno, value.offset, value.text - qf.append(dict( - filename = filename, - bufnr = vim.current.buffer.number, - lnum = str(lineno), - text = msg, - type = 'E' - )) - - else: - w = checker.Checker(tree, filename) - w.messages.sort(lambda a, b: cmp(a.lineno, b.lineno)) - for w in w.messages: - qf.append(dict( - filename = filename, - bufnr = vim.current.buffer.number, - lnum = str(w.lineno), - text = w.message % w.message_args, - type = 'E' - )) - - vim.command('let b:qf_list = %s' % repr(qf)) -EOF -endif +" Get path to virtualenv (by default take from shell) +call pymode#default('g:pymode_virtualenv_path', $VIRTUAL_ENV) -if !pymode#Default("g:pymode_breakpoint", 1) || g:pymode_breakpoint +" Service variable (don't set it manually) +call pymode#default('g:pymode_virtualenv_enabled', '') - " OPTION: g:pymode_breakpoint_key -- string. Key for set/unset breakpoint. - call pymode#Default("g:pymode_breakpoint_key", "b") +" }}} - call pymode#Default("g:pymode_breakpoint_cmd", "import ipdb; ipdb.set_trace() ### XXX BREAKPOINT") +" RUN PYTHON {{{ +" +" Enable code running support +call pymode#default('g:pymode_run', 1) -endif +" Key's map for run python code +call pymode#default('g:pymode_run_bind', 'r') -if !pymode#Default("g:pymode_doc", 1) || g:pymode_doc +" }}} - if !pymode#CheckProgram("pydoc", "or disable pymode_doc.") - let g:pymode_doc = 0 - endif +" CHECK CODE {{{ +" +" Code checking +call pymode#default('g:pymode_lint', 1) - " OPTION: g:pymode_doc_key -- string. Key for show python documantation. - call pymode#Default("g:pymode_doc_key", "K") +" Check code asynchronously +call pymode#default('g:pymode_lint_async', 1) +call pymode#default('g:pymode_lint_async_updatetime', 1000) -endif +" Check code every save if file has been modified +call pymode#default("g:pymode_lint_on_write", 1) -if !pymode#Default("g:pymode_virtualenv", 1) || g:pymode_virtualenv +" Check code every save (every) +call pymode#default("g:pymode_lint_unmodified", 0) - call pymode#Default("g:pymode_virtualenv_enabled", []) +" Check code on fly +call pymode#default("g:pymode_lint_on_fly", 0) -endif +" Show message about error in command line +call pymode#default("g:pymode_lint_message", 1) -if !pymode#Default("g:pymode_run", 1) || g:pymode_run +" Choices are: pylint, pyflakes, pep8, mccabe +call pymode#default("g:pymode_lint_checkers", ['pyflakes', 'pep8', 'mccabe']) - if !pymode#CheckProgram("python", "or disable pymode_run.") - let g:pymode_run = 0 - endif +" Skip errors and warnings (e.g. E4,W) +call pymode#default("g:pymode_lint_ignore", "") + +" Select errors and warnings (e.g. E4,W) +call pymode#default("g:pymode_lint_select", "") + +" Auto open cwindow if any errors has been finded +call pymode#default("g:pymode_lint_cwindow", 1) + +" If not emply, errors will be sort by defined relevance +" E.g. let g:pymode_lint_sort = ['E', 'C', 'I'] " Errors first 'E', +" after them 'C' and ... +call pymode#default("g:pymode_lint_sort", []) + +" Place error signs +call pymode#default("g:pymode_lint_signs", 1) + +" Symbol's definitions +call pymode#default("g:pymode_lint_todo_symbol", "WW") +call pymode#default("g:pymode_lint_docs_symbol", "DD") +call pymode#default("g:pymode_lint_comment_symbol", "CC") +call pymode#default("g:pymode_lint_visual_symbol", "RR") +call pymode#default("g:pymode_lint_error_symbol", "EE") +call pymode#default("g:pymode_lint_info_symbol", "II") +call pymode#default("g:pymode_lint_pyflakes_symbol", "FF") + +" Code checkers options +call pymode#default("g:pymode_lint_options_pep8", + \ {'max_line_length': g:pymode_options_max_line_length}) + +call pymode#default("g:pymode_lint_options_pylint", + \ {'max-line-length': g:pymode_options_max_line_length}) + +call pymode#default("g:pymode_lint_options_mccabe", + \ {'complexity': 12}) + +call pymode#default("g:pymode_lint_options_pep257", {}) +call pymode#default("g:pymode_lint_options_pyflakes", { 'builtins': '_' }) + + +" }}} + +" SET/UNSET BREAKPOINTS {{{ +" + +" Create/remove breakpoints +call pymode#default('g:pymode_breakpoint', 1) + +" Key's map for add/remove breakpoint +call pymode#default('g:pymode_breakpoint_bind', 'b') + +" Default pattern for making breakpoints. Leave this empty for auto search available debuggers (pdb, ipdb, ...) +call pymode#default('g:pymode_breakpoint_cmd', '') + +" }}} + +" ROPE (refactoring, codeassist) {{{ +" +" Rope support +call pymode#default('g:pymode_rope', 1) + +" System plugin variable +call pymode#default('g:pymode_rope_current', '') + +" Configurable rope project root +call pymode#default('g:pymode_rope_project_root', '') + +" Configurable rope project folder (always relative to project root) +call pymode#default('g:pymode_rope_ropefolder', '.ropeproject') + +" If project hasnt been finded in current working directory, look at parents directory +call pymode#default('g:pymode_rope_lookup_project', 0) + +" Enable Rope completion +call pymode#default('g:pymode_rope_completion', 1) + +" Complete keywords from not imported modules (could make completion slower) +" Enable autoimport used modules +call pymode#default('g:pymode_rope_autoimport', 0) + +" Offer to import object after complete (if that not be imported before) +call pymode#default('g:pymode_rope_autoimport_import_after_complete', 0) - " OPTION: g:pymode_doc_key -- string. Key for show python documantation. - call pymode#Default("g:pymode_run_key", "r") +" Autoimported modules +call pymode#default('g:pymode_rope_autoimport_modules', ['os', 'shutil', 'datetime']) +" Bind keys to autoimport module for object under cursor +call pymode#default('g:pymode_rope_autoimport_bind', 'ra') + +" Automatic completion on dot +call pymode#default('g:pymode_rope_complete_on_dot', 1) + +" Bind keys for autocomplete (leave empty for disable) +call pymode#default('g:pymode_rope_completion_bind', '') + +" Bind keys for goto definition (leave empty for disable) +call pymode#default('g:pymode_rope_goto_definition_bind', 'g') + +" set command for open definition (e, new, vnew) +call pymode#default('g:pymode_rope_goto_definition_cmd', 'new') + +" Bind keys for show documentation (leave empty for disable) +call pymode#default('g:pymode_rope_show_doc_bind', 'd') + +" Bind keys for find occurencies (leave empty for disable) +call pymode#default('g:pymode_rope_find_it_bind', 'f') + +" Bind keys for organize imports (leave empty for disable) +call pymode#default('g:pymode_rope_organize_imports_bind', 'ro') + +" Bind keys for rename variable/method/class in the project (leave empty for disable) +call pymode#default('g:pymode_rope_rename_bind', 'rr') + +" Bind keys for rename module +call pymode#default('g:pymode_rope_rename_module_bind', 'r1r') + +" Bind keys for convert module to package +call pymode#default('g:pymode_rope_module_to_package_bind', 'r1p') + +" Creates a new function or method (depending on the context) from the selected lines +call pymode#default('g:pymode_rope_extract_method_bind', 'rm') + +" Creates a variable from the selected lines +call pymode#default('g:pymode_rope_extract_variable_bind', 'rl') + +" Inline refactoring +call pymode#default('g:pymode_rope_inline_bind', 'ri') + +" Move refactoring +call pymode#default('g:pymode_rope_move_bind', 'rv') + +" Generate function +call pymode#default('g:pymode_rope_generate_function_bind', 'rnf') + +" Generate class +call pymode#default('g:pymode_rope_generate_class_bind', 'rnc') + +" Generate package +call pymode#default('g:pymode_rope_generate_package_bind', 'rnp') + +" Change signature +call pymode#default('g:pymode_rope_change_signature_bind', 'rs') + +" Tries to find the places in which a function can be used and changes the +" code to call it instead +call pymode#default('g:pymode_rope_use_function_bind', 'ru') + +" Regenerate project cache on every save +call pymode#default('g:pymode_rope_regenerate_on_write', 1) + +" }}} + +" }}} + +" Prepare to plugin loading +if &compatible + set nocompatible +endif +filetype plugin on + +" Disable python-related functionality +" let g:pymode_python = 'disable' +" let g:pymode_python = 'python3' + +" UltiSnips Fixes +if !len(g:pymode_python) + if exists('g:_uspy') && g:_uspy == ':py' + let g:pymode_python = 'python' + elseif exists('g:_uspy') && g:_uspy == ':py3' + let g:pymode_python = 'python3' + elseif has("python") + let g:pymode_python = 'python' + elseif has("python3") + let g:pymode_python = 'python3' + else + let g:pymode_python = 'disable' + endif endif -if !pymode#Default("g:pymode_rope", 1) || g:pymode_rope - - " OPTION: g:pymode_rope_auto_project -- bool. Auto open ropeproject - call pymode#Default("g:pymode_rope_auto_project", 1) - - " OPTION: g:pymode_rope_enable_autoimport -- bool. Enable autoimport - call pymode#Default("g:pymode_rope_enable_autoimport", 1) - - " OPTION: g:pymode_rope_autoimport_generate -- bool. - call pymode#Default("g:pymode_rope_autoimport_generate", 1) - - " OPTION: g:pymode_rope_autoimport_underlines -- bool. - call pymode#Default("g:pymode_rope_autoimport_underlineds", 0) - - " OPTION: g:pymode_rope_codeassist_maxfiles -- bool. - call pymode#Default("g:pymode_rope_codeassist_maxfixes", 10) - - " OPTION: g:pymode_rope_sorted_completions -- bool. - call pymode#Default("g:pymode_rope_sorted_completions", 1) - - " OPTION: g:pymode_rope_extended_complete -- bool. - call pymode#Default("g:pymode_rope_extended_complete", 1) - - " OPTION: g:pymode_rope_autoimport_modules -- array. - call pymode#Default("g:pymode_rope_autoimport_modules", ["os","shutil","datetime"]) - - " OPTION: g:pymode_rope_confirm_saving -- bool. - call pymode#Default("g:pymode_rope_confirm_saving", 1) - - " OPTION: g:pymode_rope_global_prefix -- string. - call pymode#Default("g:pymode_rope_global_prefix", "p") - - " OPTION: g:pymode_rope_local_prefix -- string. - call pymode#Default("g:pymode_rope_local_prefix", "r") - - " OPTION: g:pymode_rope_vim_completion -- bool. - call pymode#Default("g:pymode_rope_vim_completion", 1) - - " OPTION: g:pymode_rope_guess_project -- bool. - call pymode#Default("g:pymode_rope_guess_project", 1) - - " OPTION: g:pymode_rope_goto_def_newwin -- bool. - call pymode#Default("g:pymode_rope_goto_def_newwin", 0) - - " OPTION: g:pymode_rope_always_show_complete_menu -- bool. - call pymode#Default("g:pymode_rope_always_show_complete_menu", 0) - - " DESC: Init Rope - py import ropevim - - fun! RopeCodeAssistInsertMode() "{{{ - call RopeCodeAssist() - return "" - endfunction "}}} - - fun! RopeLuckyAssistInsertMode() "{{{ - call RopeLuckyAssist() - return "" - endfunction "}}} - - fun! RopeOmni(findstart, base) "{{{ - " TODO: Fix omni - if a:findstart == 1 - let start = col('.') - 1 - return start - else - call RopeOmniComplete() - return g:pythoncomplete_completions - endif - endfunction "}}} - - " Rope menu - menu Rope.Autoimport :RopeAutoImport - menu Rope.ChangeSignature :RopeChangeSignature - menu Rope.CloseProject :RopeCloseProject - menu Rope.GenerateAutoImportCache :RopeGenerateAutoimportCache - menu Rope.ExtractVariable :RopeExtractVariable - menu Rope.ExtractMethod :RopeExtractMethod - menu Rope.Inline :RopeInline - menu Rope.IntroduceFactory :RopeIntroduceFactory - menu Rope.FindFile :RopeFindFile - menu Rope.OpenProject :RopeOpenProject - menu Rope.Move :RopeMove - menu Rope.MoveCurrentModule :RopeMoveCurrentModule - menu Rope.ModuleToPackage :RopeModuleToPackage - menu Rope.Redo :RopeRedo - menu Rope.Rename :RopeRename - menu Rope.RenameCurrentModule :RopeRenameCurrentModule - menu Rope.Restructure :RopeRestructure - menu Rope.Undo :RopeUndo - menu Rope.UseFunction :RopeUseFunction +if g:pymode_python == 'python' + + command! -nargs=1 PymodePython python + let g:UltiSnipsUsePythonVersion = 2 + +elseif g:pymode_python == 'python3' + + command! -nargs=1 PymodePython python3 + let g:UltiSnipsUsePythonVersion = 3 + +else + + let g:pymode_doc = 0 + let g:pymode_lint = 0 + let g:pymode_path = 0 + let g:pymode_rope = 0 + let g:pymode_run = 0 + let g:pymode_virtualenv = 0 + + command! -nargs=1 PymodePython echo endif + + +command! PymodeVersion echomsg "Pymode version: " . g:pymode_version . " interpreter: " . g:pymode_python . " lint: " . g:pymode_lint . " rope: " . g:pymode_rope + +augroup pymode diff --git a/pylama.ini b/pylama.ini new file mode 100644 index 00000000..9579796e --- /dev/null +++ b/pylama.ini @@ -0,0 +1,9 @@ +[pylama] +ignore=D213 +linters=pep8,pyflakes,pylint + +[pylama:pymode/libs*] +skip=1 + +[pylama:pylint] +disable=E1120,E1130,E1103,W1401,F0001 diff --git a/pylibs/logilab/__init__.py b/pylibs/logilab/__init__.py deleted file mode 100644 index 6f2b6e67..00000000 --- a/pylibs/logilab/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -"""generated file, don't modify or your data will be lost""" -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - pass diff --git a/pylibs/logilab/astng/__init__.py b/pylibs/logilab/astng/__init__.py deleted file mode 100644 index 70b2f3e3..00000000 --- a/pylibs/logilab/astng/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""Python Abstract Syntax Tree New Generation - -The aim of this module is to provide a common base representation of -python source code for projects such as pychecker, pyreverse, -pylint... Well, actually the development of this library is essentially -governed by pylint's needs. - -It extends class defined in the python's _ast module with some -additional methods and attributes. Instance attributes are added by a -builder object, which can either generate extended ast (let's call -them astng ;) by visiting an existent ast tree or by inspecting living -object. Methods are added by monkey patching ast classes. - -Main modules are: - -* nodes and scoped_nodes for more information about methods and - attributes added to different node classes - -* the manager contains a high level object to get astng trees from - source files and living objects. It maintains a cache of previously - constructed tree for quick access - -* builder contains the class responsible to build astng trees -""" -__doctype__ = "restructuredtext en" - -import sys -if sys.version_info >= (3, 0): - BUILTINS_MODULE = 'builtins' -else: - BUILTINS_MODULE = '__builtin__' - -# WARNING: internal imports order matters ! - -# make all exception classes accessible from astng package -from logilab.astng.exceptions import * - -# make all node classes accessible from astng package -from logilab.astng.nodes import * - -# trigger extra monkey-patching -from logilab.astng import inference - -# more stuff available -from logilab.astng import raw_building -from logilab.astng.bases import YES, Instance, BoundMethod, UnboundMethod -from logilab.astng.node_classes import are_exclusive, unpack_infer -from logilab.astng.scoped_nodes import builtin_lookup - -# make a manager instance (borg) as well as Project and Package classes -# accessible from astng package -from logilab.astng.manager import ASTNGManager, Project -MANAGER = ASTNGManager() -del ASTNGManager diff --git a/pylibs/logilab/astng/__pkginfo__.py b/pylibs/logilab/astng/__pkginfo__.py deleted file mode 100644 index 5832acbe..00000000 --- a/pylibs/logilab/astng/__pkginfo__.py +++ /dev/null @@ -1,44 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""logilab.astng packaging information""" - -distname = 'logilab-astng' - -modname = 'astng' -subpackage_of = 'logilab' - -numversion = (0, 23, 0) -version = '.'.join([str(num) for num in numversion]) - -install_requires = ['logilab-common >= 0.53.0'] - -license = 'LGPL' - -author = 'Logilab' -author_email = 'python-projects@lists.logilab.org' -mailinglist = "mailto://%s" % author_email -web = "http://www.logilab.org/project/%s" % distname -ftp = "ftp://ftp.logilab.org/pub/%s" % modname - -description = "rebuild a new abstract syntax tree from Python's ast" - -from os.path import join -include_dirs = [join('test', 'regrtest_data'), - join('test', 'data'), join('test', 'data2')] diff --git a/pylibs/logilab/astng/as_string.py b/pylibs/logilab/astng/as_string.py deleted file mode 100644 index 0a42668d..00000000 --- a/pylibs/logilab/astng/as_string.py +++ /dev/null @@ -1,427 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""This module renders ASTNG nodes to string representation. - -It will probably not work on bare _ast trees. -""" -import sys - - -INDENT = ' ' # 4 spaces ; keep indentation variable - - -def _import_string(names): - """return a list of (name, asname) formatted as a string""" - _names = [] - for name, asname in names: - if asname is not None: - _names.append('%s as %s' % (name, asname)) - else: - _names.append(name) - return ', '.join(_names) - - -class AsStringVisitor(object): - """Visitor to render an ASTNG node as string """ - - def __call__(self, node): - """Makes this visitor behave as a simple function""" - return node.accept(self) - - def _stmt_list(self, stmts): - """return a list of nodes to string""" - stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr]) - return INDENT + stmts.replace('\n', '\n'+INDENT) - - - ## visit_ methods ########################################### - - def visit_arguments(self, node): - """return an astng.Function node as string""" - return node.format_args() - - def visit_assattr(self, node): - """return an astng.AssAttr node as string""" - return self.visit_getattr(node) - - def visit_assert(self, node): - """return an astng.Assert node as string""" - if node.fail: - return 'assert %s, %s' % (node.test.accept(self), - node.fail.accept(self)) - return 'assert %s' % node.test.accept(self) - - def visit_assname(self, node): - """return an astng.AssName node as string""" - return node.name - - def visit_assign(self, node): - """return an astng.Assign node as string""" - lhs = ' = '.join([n.accept(self) for n in node.targets]) - return '%s = %s' % (lhs, node.value.accept(self)) - - def visit_augassign(self, node): - """return an astng.AugAssign node as string""" - return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self)) - - def visit_backquote(self, node): - """return an astng.Backquote node as string""" - return '`%s`' % node.value.accept(self) - - def visit_binop(self, node): - """return an astng.BinOp node as string""" - return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self)) - - def visit_boolop(self, node): - """return an astng.BoolOp node as string""" - return (' %s ' % node.op).join(['(%s)' % n.accept(self) - for n in node.values]) - - def visit_break(self, node): - """return an astng.Break node as string""" - return 'break' - - def visit_callfunc(self, node): - """return an astng.CallFunc node as string""" - expr_str = node.func.accept(self) - args = [arg.accept(self) for arg in node.args] - if node.starargs: - args.append( '*' + node.starargs.accept(self)) - if node.kwargs: - args.append( '**' + node.kwargs.accept(self)) - return '%s(%s)' % (expr_str, ', '.join(args)) - - def visit_class(self, node): - """return an astng.Class node as string""" - decorate = node.decorators and node.decorators.accept(self) or '' - bases = ', '.join([n.accept(self) for n in node.bases]) - bases = bases and '(%s)' % bases or '' - docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' - return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs, - self._stmt_list( node.body)) - - def visit_compare(self, node): - """return an astng.Compare node as string""" - rhs_str = ' '.join(['%s %s' % (op, expr.accept(self)) - for op, expr in node.ops]) - return '%s %s' % (node.left.accept(self), rhs_str) - - def visit_comprehension(self, node): - """return an astng.Comprehension node as string""" - ifs = ''.join([ ' if %s' % n.accept(self) for n in node.ifs]) - return 'for %s in %s%s' % (node.target.accept(self), - node.iter.accept(self), ifs ) - - def visit_const(self, node): - """return an astng.Const node as string""" - return repr(node.value) - - def visit_continue(self, node): - """return an astng.Continue node as string""" - return 'continue' - - def visit_delete(self, node): # XXX check if correct - """return an astng.Delete node as string""" - return 'del %s' % ', '.join([child.accept(self) - for child in node.targets]) - - def visit_delattr(self, node): - """return an astng.DelAttr node as string""" - return self.visit_getattr(node) - - def visit_delname(self, node): - """return an astng.DelName node as string""" - return node.name - - def visit_decorators(self, node): - """return an astng.Decorators node as string""" - return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes]) - - def visit_dict(self, node): - """return an astng.Dict node as string""" - return '{%s}' % ', '.join(['%s: %s' % (key.accept(self), - value.accept(self)) for key, value in node.items]) - - def visit_dictcomp(self, node): - """return an astng.DictComp node as string""" - return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self), - ' '.join([n.accept(self) for n in node.generators])) - - def visit_discard(self, node): - """return an astng.Discard node as string""" - return node.value.accept(self) - - def visit_emptynode(self, node): - """dummy method for visiting an Empty node""" - return '' - - def visit_excepthandler(self, node): - if node.type: - if node.name: - excs = 'except %s, %s' % (node.type.accept(self), - node.name.accept(self)) - else: - excs = 'except %s' % node.type.accept(self) - else: - excs = 'except' - return '%s:\n%s' % (excs, self._stmt_list(node.body)) - - def visit_ellipsis(self, node): - """return an astng.Ellipsis node as string""" - return '...' - - def visit_empty(self, node): - """return an Empty node as string""" - return '' - - def visit_exec(self, node): - """return an astng.Exec node as string""" - if node.locals: - return 'exec %s in %s, %s' % (node.expr.accept(self), - node.locals.accept(self), - node.globals.accept(self)) - if node.globals: - return 'exec %s in %s' % (node.expr.accept(self), - node.globals.accept(self)) - return 'exec %s' % node.expr.accept(self) - - def visit_extslice(self, node): - """return an astng.ExtSlice node as string""" - return ','.join( [dim.accept(self) for dim in node.dims] ) - - def visit_for(self, node): - """return an astng.For node as string""" - fors = 'for %s in %s:\n%s' % (node.target.accept(self), - node.iter.accept(self), - self._stmt_list( node.body)) - if node.orelse: - fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse)) - return fors - - def visit_from(self, node): - """return an astng.From node as string""" - return 'from %s import %s' % ('.' * (node.level or 0) + node.modname, - _import_string(node.names)) - - def visit_function(self, node): - """return an astng.Function node as string""" - decorate = node.decorators and node.decorators.accept(self) or '' - docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' - return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self), - docs, self._stmt_list(node.body)) - - def visit_genexpr(self, node): - """return an astng.GenExpr node as string""" - return '(%s %s)' % (node.elt.accept(self), ' '.join([n.accept(self) - for n in node.generators])) - - def visit_getattr(self, node): - """return an astng.Getattr node as string""" - return '%s.%s' % (node.expr.accept(self), node.attrname) - - def visit_global(self, node): - """return an astng.Global node as string""" - return 'global %s' % ', '.join(node.names) - - def visit_if(self, node): - """return an astng.If node as string""" - ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))] - if node.orelse:# XXX use elif ??? - ifs.append('else:\n%s' % self._stmt_list(node.orelse)) - return '\n'.join(ifs) - - def visit_ifexp(self, node): - """return an astng.IfExp node as string""" - return '%s if %s else %s' % (node.body.accept(self), - node.test.accept(self), node.orelse.accept(self)) - - def visit_import(self, node): - """return an astng.Import node as string""" - return 'import %s' % _import_string(node.names) - - def visit_keyword(self, node): - """return an astng.Keyword node as string""" - return '%s=%s' % (node.arg, node.value.accept(self)) - - def visit_lambda(self, node): - """return an astng.Lambda node as string""" - return 'lambda %s: %s' % (node.args.accept(self), node.body.accept(self)) - - def visit_list(self, node): - """return an astng.List node as string""" - return '[%s]' % ', '.join([child.accept(self) for child in node.elts]) - - def visit_listcomp(self, node): - """return an astng.ListComp node as string""" - return '[%s %s]' % (node.elt.accept(self), ' '.join([n.accept(self) - for n in node.generators])) - - def visit_module(self, node): - """return an astng.Module node as string""" - docs = node.doc and '"""%s"""\n\n' % node.doc or '' - return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n' - - def visit_name(self, node): - """return an astng.Name node as string""" - return node.name - - def visit_pass(self, node): - """return an astng.Pass node as string""" - return 'pass' - - def visit_print(self, node): - """return an astng.Print node as string""" - nodes = ', '.join([n.accept(self) for n in node.values]) - if not node.nl: - nodes = '%s,' % nodes - if node.dest: - return 'print >> %s, %s' % (node.dest.accept(self), nodes) - return 'print %s' % nodes - - def visit_raise(self, node): - """return an astng.Raise node as string""" - if node.exc: - if node.inst: - if node.tback: - return 'raise %s, %s, %s' % (node.exc.accept(self), - node.inst.accept(self), - node.tback.accept(self)) - return 'raise %s, %s' % (node.exc.accept(self), - node.inst.accept(self)) - return 'raise %s' % node.exc.accept(self) - return 'raise' - - def visit_return(self, node): - """return an astng.Return node as string""" - if node.value: - return 'return %s' % node.value.accept(self) - else: - return 'return' - - def visit_index(self, node): - """return a astng.Index node as string""" - return node.value.accept(self) - - def visit_set(self, node): - """return an astng.Set node as string""" - return '{%s}' % ', '.join([child.accept(self) for child in node.elts]) - - def visit_setcomp(self, node): - """return an astng.SetComp node as string""" - return '{%s %s}' % (node.elt.accept(self), ' '.join([n.accept(self) - for n in node.generators])) - - def visit_slice(self, node): - """return a astng.Slice node as string""" - lower = node.lower and node.lower.accept(self) or '' - upper = node.upper and node.upper.accept(self) or '' - step = node.step and node.step.accept(self) or '' - if step: - return '%s:%s:%s' % (lower, upper, step) - return '%s:%s' % (lower, upper) - - def visit_subscript(self, node): - """return an astng.Subscript node as string""" - return '%s[%s]' % (node.value.accept(self), node.slice.accept(self)) - - def visit_tryexcept(self, node): - """return an astng.TryExcept node as string""" - trys = ['try:\n%s' % self._stmt_list( node.body)] - for handler in node.handlers: - trys.append(handler.accept(self)) - if node.orelse: - trys.append('else:\n%s' % self._stmt_list(node.orelse)) - return '\n'.join(trys) - - def visit_tryfinally(self, node): - """return an astng.TryFinally node as string""" - return 'try:\n%s\nfinally:\n%s' % (self._stmt_list( node.body), - self._stmt_list(node.finalbody)) - - def visit_tuple(self, node): - """return an astng.Tuple node as string""" - return '(%s)' % ', '.join([child.accept(self) for child in node.elts]) - - def visit_unaryop(self, node): - """return an astng.UnaryOp node as string""" - if node.op == 'not': - operator = 'not ' - else: - operator = node.op - return '%s%s' % (operator, node.operand.accept(self)) - - def visit_while(self, node): - """return an astng.While node as string""" - whiles = 'while %s:\n%s' % (node.test.accept(self), - self._stmt_list(node.body)) - if node.orelse: - whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse)) - return whiles - - def visit_with(self, node): # 'with' without 'as' is possible - """return an astng.With node as string""" - as_var = node.vars and " as (%s)" % (node.vars.accept(self)) or "" - withs = 'with (%s)%s:\n%s' % (node.expr.accept(self), as_var, - self._stmt_list( node.body)) - return withs - - def visit_yield(self, node): - """yield an ast.Yield node as string""" - yi_val = node.value and (" " + node.value.accept(self)) or "" - return 'yield' + yi_val - - -class AsStringVisitor3k(AsStringVisitor): - """AsStringVisitor3k overwrites some AsStringVisitor methods""" - - def visit_excepthandler(self, node): - if node.type: - if node.name: - excs = 'except %s as %s' % (node.type.accept(self), - node.name.accept(self)) - else: - excs = 'except %s' % node.type.accept(self) - else: - excs = 'except' - return '%s:\n%s' % (excs, self._stmt_list(node.body)) - - def visit_nonlocal(self, node): - """return an astng.Nonlocal node as string""" - return 'nonlocal %s' % ', '.join(node.names) - - def visit_raise(self, node): - """return an astng.Raise node as string""" - if node.exc: - if node.cause: - return 'raise %s from %s' % (node.exc.accept(self), - node.cause.accept(self)) - return 'raise %s' % node.exc.accept(self) - return 'raise' - - def visit_starred(self, node): - """return Starred node as string""" - return "*" + node.value.accept(self) - -if sys.version_info >= (3, 0): - AsStringVisitor = AsStringVisitor3k - -# this visitor is stateless, thus it can be reused -as_string = AsStringVisitor() - diff --git a/pylibs/logilab/astng/builder.py b/pylibs/logilab/astng/builder.py deleted file mode 100644 index 16c92b06..00000000 --- a/pylibs/logilab/astng/builder.py +++ /dev/null @@ -1,226 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""The ASTNGBuilder makes astng from living object and / or from _ast - -The builder is not thread safe and can't be used to parse different sources -at the same time. -""" - -__docformat__ = "restructuredtext en" - -import sys, re -from os.path import splitext, basename, dirname, exists, abspath - -from logilab.common.modutils import modpath_from_file - -from logilab.astng.exceptions import ASTNGBuildingException, InferenceError -from logilab.astng.raw_building import InspectBuilder -from logilab.astng.rebuilder import TreeRebuilder -from logilab.astng.manager import ASTNGManager -from logilab.astng.bases import YES, Instance - -from _ast import PyCF_ONLY_AST -def parse(string): - return compile(string, "", 'exec', PyCF_ONLY_AST) - -if sys.version_info >= (3, 0): - from tokenize import detect_encoding - - def open_source_file(filename): - byte_stream = open(filename, 'bU') - encoding = detect_encoding(byte_stream.readline)[0] - stream = open(filename, 'U', encoding=encoding) - try: - data = stream.read() - except UnicodeError, uex: # wrong encodingg - # detect_encoding returns utf-8 if no encoding specified - msg = 'Wrong (%s) or no encoding specified' % encoding - raise ASTNGBuildingException(msg) - return stream, encoding, data - -else: - import re - - _ENCODING_RGX = re.compile("\s*#+.*coding[:=]\s*([-\w.]+)") - - def _guess_encoding(string): - """get encoding from a python file as string or return None if not found - """ - # check for UTF-8 byte-order mark - if string.startswith('\xef\xbb\xbf'): - return 'UTF-8' - for line in string.split('\n', 2)[:2]: - # check for encoding declaration - match = _ENCODING_RGX.match(line) - if match is not None: - return match.group(1) - - def open_source_file(filename): - """get data for parsing a file""" - stream = open(filename, 'U') - data = stream.read() - encoding = _guess_encoding(data) - return stream, encoding, data - -# ast NG builder ############################################################## - -MANAGER = ASTNGManager() - -class ASTNGBuilder(InspectBuilder): - """provide astng building methods""" - rebuilder = TreeRebuilder() - - def __init__(self, manager=None): - self._manager = manager or MANAGER - - def module_build(self, module, modname=None): - """build an astng from a living module instance - """ - node = None - path = getattr(module, '__file__', None) - if path is not None: - path_, ext = splitext(module.__file__) - if ext in ('.py', '.pyc', '.pyo') and exists(path_ + '.py'): - node = self.file_build(path_ + '.py', modname) - if node is None: - # this is a built-in module - # get a partial representation by introspection - node = self.inspect_build(module, modname=modname, path=path) - return node - - def file_build(self, path, modname=None): - """build astng from a source code file (i.e. from an ast) - - path is expected to be a python source file - """ - try: - stream, encoding, data = open_source_file(path) - except IOError, exc: - msg = 'Unable to load file %r (%s)' % (path, exc) - raise ASTNGBuildingException(msg) - except SyntaxError, exc: # py3k encoding specification error - raise ASTNGBuildingException(exc) - except LookupError, exc: # unknown encoding - raise ASTNGBuildingException(exc) - # get module name if necessary - if modname is None: - try: - modname = '.'.join(modpath_from_file(path)) - except ImportError: - modname = splitext(basename(path))[0] - # build astng representation - node = self.string_build(data, modname, path) - node.file_encoding = encoding - node.file_stream = stream - return node - - def string_build(self, data, modname='', path=None): - """build astng from source code string and return rebuilded astng""" - module = self._data_build(data, modname, path) - self._manager.astng_cache[module.name] = module - # post tree building steps after we stored the module in the cache: - for from_node in module._from_nodes: - self.add_from_names_to_locals(from_node) - # handle delayed assattr nodes - for delayed in module._delayed_assattr: - self.delayed_assattr(delayed) - for transformer in self._manager.transformers: - transformer(module) - return module - - def _data_build(self, data, modname, path): - """build tree node from data and add some informations""" - # this method could be wrapped with a pickle/cache function - node = parse(data + '\n') - if path is not None: - node_file = abspath(path) - else: - node_file = '' - if modname.endswith('.__init__'): - modname = modname[:-9] - package = True - else: - package = path and path.find('__init__.py') > -1 or False - self.rebuilder.init() - module = self.rebuilder.visit_module(node, modname, package) - module.file = module.path = node_file - module._from_nodes = self.rebuilder._from_nodes - module._delayed_assattr = self.rebuilder._delayed_assattr - return module - - def add_from_names_to_locals(self, node): - """store imported names to the locals; - resort the locals if coming from a delayed node - """ - - _key_func = lambda node: node.fromlineno - def sort_locals(my_list): - my_list.sort(key=_key_func) - for (name, asname) in node.names: - if name == '*': - try: - imported = node.root().import_module(node.modname) - except ASTNGBuildingException: - continue - for name in imported.wildcard_import_names(): - node.parent.set_local(name, node) - sort_locals(node.parent.scope().locals[name]) - else: - node.parent.set_local(asname or name, node) - sort_locals(node.parent.scope().locals[asname or name]) - - def delayed_assattr(self, node): - """visit a AssAttr node -> add name to locals, handle members - definition - """ - try: - frame = node.frame() - for infered in node.expr.infer(): - if infered is YES: - continue - try: - if infered.__class__ is Instance: - infered = infered._proxied - iattrs = infered.instance_attrs - elif isinstance(infered, Instance): - # Const, Tuple, ... we may be wrong, may be not, but - # anyway we don't want to pollute builtin's namespace - continue - elif infered.is_function: - iattrs = infered.instance_attrs - else: - iattrs = infered.locals - except AttributeError: - # XXX log error - #import traceback - #traceback.print_exc() - continue - values = iattrs.setdefault(node.attrname, []) - if node in values: - continue - # get assign in __init__ first XXX useful ? - if frame.name == '__init__' and values and not \ - values[0].frame().name == '__init__': - values.insert(0, node) - else: - values.append(node) - except InferenceError: - pass - diff --git a/pylibs/logilab/astng/exceptions.py b/pylibs/logilab/astng/exceptions.py deleted file mode 100644 index 7dd6135e..00000000 --- a/pylibs/logilab/astng/exceptions.py +++ /dev/null @@ -1,60 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""this module contains exceptions used in the astng library - -""" - -__doctype__ = "restructuredtext en" - -class ASTNGError(Exception): - """base exception class for all astng related exceptions""" - -class ASTNGBuildingException(ASTNGError): - """exception class when we are unable to build an astng representation""" - -class ResolveError(ASTNGError): - """base class of astng resolution/inference error""" - -class NotFoundError(ResolveError): - """raised when we are unable to resolve a name""" - -class InferenceError(ResolveError): - """raised when we are unable to infer a node""" - -class UnresolvableName(InferenceError): - """raised when we are unable to resolve a name""" - -class NoDefault(ASTNGError): - """raised by function's `default_value` method when an argument has - no default value - """ - diff --git a/pylibs/logilab/astng/inference.py b/pylibs/logilab/astng/inference.py deleted file mode 100644 index 62bd7d96..00000000 --- a/pylibs/logilab/astng/inference.py +++ /dev/null @@ -1,382 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""this module contains a set of functions to handle inference on astng trees -""" - -__doctype__ = "restructuredtext en" - -from itertools import chain -import sys - -from logilab.astng import nodes - -from logilab.astng.manager import ASTNGManager -from logilab.astng.exceptions import (ASTNGBuildingException, ASTNGError, - InferenceError, NoDefault, NotFoundError, UnresolvableName) -from logilab.astng.bases import YES, Instance, InferenceContext, Generator, \ - _infer_stmts, copy_context, path_wrapper, raise_if_nothing_infered -from logilab.astng.protocols import _arguments_infer_argname - -MANAGER = ASTNGManager() - - -class CallContext: - """when inferring a function call, this class is used to remember values - given as argument - """ - def __init__(self, args, starargs, dstarargs): - self.args = [] - self.nargs = {} - for arg in args: - if isinstance(arg, nodes.Keyword): - self.nargs[arg.arg] = arg.value - else: - self.args.append(arg) - self.starargs = starargs - self.dstarargs = dstarargs - - def infer_argument(self, funcnode, name, context): - """infer a function argument value according to the call context""" - # 1. search in named keywords - try: - return self.nargs[name].infer(context) - except KeyError: - # Function.args.args can be None in astng (means that we don't have - # information on argnames) - argindex = funcnode.args.find_argname(name)[0] - if argindex is not None: - # 2. first argument of instance/class method - if argindex == 0 and funcnode.type in ('method', 'classmethod'): - if context.boundnode is not None: - boundnode = context.boundnode - else: - # XXX can do better ? - boundnode = funcnode.parent.frame() - if funcnode.type == 'method': - if not isinstance(boundnode, Instance): - boundnode = Instance(boundnode) - return iter((boundnode,)) - if funcnode.type == 'classmethod': - return iter((boundnode,)) - # 2. search arg index - try: - return self.args[argindex].infer(context) - except IndexError: - pass - # 3. search in *args (.starargs) - if self.starargs is not None: - its = [] - for infered in self.starargs.infer(context): - if infered is YES: - its.append((YES,)) - continue - try: - its.append(infered.getitem(argindex, context).infer(context)) - except (InferenceError, AttributeError): - its.append((YES,)) - except (IndexError, TypeError): - continue - if its: - return chain(*its) - # 4. XXX search in **kwargs (.dstarargs) - if self.dstarargs is not None: - its = [] - for infered in self.dstarargs.infer(context): - if infered is YES: - its.append((YES,)) - continue - try: - its.append(infered.getitem(name, context).infer(context)) - except (InferenceError, AttributeError): - its.append((YES,)) - except (IndexError, TypeError): - continue - if its: - return chain(*its) - # 5. */** argument, (Tuple or Dict) - if name == funcnode.args.vararg: - return iter((nodes.const_factory(()))) - if name == funcnode.args.kwarg: - return iter((nodes.const_factory({}))) - # 6. return default value if any - try: - return funcnode.args.default_value(name).infer(context) - except NoDefault: - raise InferenceError(name) - - -# .infer method ############################################################### - - -def infer_end(self, context=None): - """inference's end for node such as Module, Class, Function, Const... - """ - yield self -nodes.Module.infer = infer_end -nodes.Class.infer = infer_end -nodes.Function.infer = infer_end -nodes.Lambda.infer = infer_end -nodes.Const.infer = infer_end -nodes.List.infer = infer_end -nodes.Tuple.infer = infer_end -nodes.Dict.infer = infer_end - - -def infer_name(self, context=None): - """infer a Name: use name lookup rules""" - frame, stmts = self.lookup(self.name) - if not stmts: - raise UnresolvableName(self.name) - context = context.clone() - context.lookupname = self.name - return _infer_stmts(stmts, context, frame) -nodes.Name.infer = path_wrapper(infer_name) -nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper - - -def infer_callfunc(self, context=None): - """infer a CallFunc node by trying to guess what the function returns""" - callcontext = context.clone() - callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs) - callcontext.boundnode = None - for callee in self.func.infer(context): - if callee is YES: - yield callee - continue - try: - if hasattr(callee, 'infer_call_result'): - for infered in callee.infer_call_result(self, callcontext): - yield infered - except InferenceError: - ## XXX log error ? - continue -nodes.CallFunc.infer = path_wrapper(raise_if_nothing_infered(infer_callfunc)) - - -def infer_import(self, context=None, asname=True): - """infer an Import node: return the imported module/object""" - name = context.lookupname - if name is None: - raise InferenceError() - if asname: - yield self.do_import_module(self.real_name(name)) - else: - yield self.do_import_module(name) -nodes.Import.infer = path_wrapper(infer_import) - -def infer_name_module(self, name): - context = InferenceContext() - context.lookupname = name - return self.infer(context, asname=False) -nodes.Import.infer_name_module = infer_name_module - - -def infer_from(self, context=None, asname=True): - """infer a From nodes: return the imported module/object""" - name = context.lookupname - if name is None: - raise InferenceError() - if asname: - name = self.real_name(name) - module = self.do_import_module(self.modname) - try: - context = copy_context(context) - context.lookupname = name - return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context) - except NotFoundError: - raise InferenceError(name) -nodes.From.infer = path_wrapper(infer_from) - - -def infer_getattr(self, context=None): - """infer a Getattr node by using getattr on the associated object""" - #context = context.clone() - for owner in self.expr.infer(context): - if owner is YES: - yield owner - continue - try: - context.boundnode = owner - for obj in owner.igetattr(self.attrname, context): - yield obj - context.boundnode = None - except (NotFoundError, InferenceError): - context.boundnode = None - except AttributeError: - # XXX method / function - context.boundnode = None -nodes.Getattr.infer = path_wrapper(raise_if_nothing_infered(infer_getattr)) -nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper - - -def infer_global(self, context=None): - if context.lookupname is None: - raise InferenceError() - try: - return _infer_stmts(self.root().getattr(context.lookupname), context) - except NotFoundError: - raise InferenceError() -nodes.Global.infer = path_wrapper(infer_global) - - -def infer_subscript(self, context=None): - """infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]""" - if isinstance(self.slice, nodes.Index): - index = self.slice.value.infer(context).next() - if index is YES: - yield YES - return - try: - # suppose it's a Tuple/List node (attribute error else) - assigned = self.value.getitem(index.value, context) - except AttributeError: - raise InferenceError() - except (IndexError, TypeError): - yield YES - return - for infered in assigned.infer(context): - yield infered - else: - raise InferenceError() -nodes.Subscript.infer = path_wrapper(infer_subscript) -nodes.Subscript.infer_lhs = raise_if_nothing_infered(infer_subscript) - - -UNARY_OP_METHOD = {'+': '__pos__', - '-': '__neg__', - '~': '__invert__', - 'not': None, # XXX not '__nonzero__' - } - -def infer_unaryop(self, context=None): - for operand in self.operand.infer(context): - try: - yield operand.infer_unary_op(self.op) - except TypeError: - continue - except AttributeError: - meth = UNARY_OP_METHOD[self.op] - if meth is None: - yield YES - else: - try: - # XXX just suppose if the type implement meth, returned type - # will be the same - operand.getattr(meth) - yield operand - except GeneratorExit: - raise - except: - yield YES -nodes.UnaryOp.infer = path_wrapper(infer_unaryop) - - -BIN_OP_METHOD = {'+': '__add__', - '-': '__sub__', - '/': '__div__', - '//': '__floordiv__', - '*': '__mul__', - '**': '__power__', - '%': '__mod__', - '&': '__and__', - '|': '__or__', - '^': '__xor__', - '<<': '__lshift__', - '>>': '__rshift__', - } - -def _infer_binop(operator, operand1, operand2, context, failures=None): - if operand1 is YES: - yield operand1 - return - try: - for valnode in operand1.infer_binary_op(operator, operand2, context): - yield valnode - except AttributeError: - try: - # XXX just suppose if the type implement meth, returned type - # will be the same - operand1.getattr(BIN_OP_METHOD[operator]) - yield operand1 - except: - if failures is None: - yield YES - else: - failures.append(operand1) - -def infer_binop(self, context=None): - failures = [] - for lhs in self.left.infer(context): - for val in _infer_binop(self.op, lhs, self.right, context, failures): - yield val - for lhs in failures: - for rhs in self.right.infer(context): - for val in _infer_binop(self.op, rhs, lhs, context): - yield val -nodes.BinOp.infer = path_wrapper(infer_binop) - - -def infer_arguments(self, context=None): - name = context.lookupname - if name is None: - raise InferenceError() - return _arguments_infer_argname(self, name, context) -nodes.Arguments.infer = infer_arguments - - -def infer_ass(self, context=None): - """infer a AssName/AssAttr: need to inspect the RHS part of the - assign node - """ - stmt = self.statement() - if isinstance(stmt, nodes.AugAssign): - return stmt.infer(context) - stmts = list(self.assigned_stmts(context=context)) - return _infer_stmts(stmts, context) -nodes.AssName.infer = path_wrapper(infer_ass) -nodes.AssAttr.infer = path_wrapper(infer_ass) - -def infer_augassign(self, context=None): - failures = [] - for lhs in self.target.infer_lhs(context): - for val in _infer_binop(self.op, lhs, self.value, context, failures): - yield val - for lhs in failures: - for rhs in self.value.infer(context): - for val in _infer_binop(self.op, rhs, lhs, context): - yield val -nodes.AugAssign.infer = path_wrapper(infer_augassign) - - -# no infer method on DelName and DelAttr (expected InferenceError) - - -def infer_empty_node(self, context=None): - if not self.has_underlying_object(): - yield YES - else: - try: - for infered in MANAGER.infer_astng_from_something(self.object, - context=context): - yield infered - except ASTNGError: - yield YES -nodes.EmptyNode.infer = path_wrapper(infer_empty_node) - diff --git a/pylibs/logilab/astng/inspector.py b/pylibs/logilab/astng/inspector.py deleted file mode 100644 index a4abd1f2..00000000 --- a/pylibs/logilab/astng/inspector.py +++ /dev/null @@ -1,289 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""visitor doing some postprocessing on the astng tree. -Try to resolve definitions (namespace) dictionary, relationship... - -This module has been imported from pyreverse -""" - -__docformat__ = "restructuredtext en" - -from os.path import dirname - -from logilab.common.modutils import get_module_part, is_relative, \ - is_standard_module - -from logilab import astng -from logilab.astng.exceptions import InferenceError -from logilab.astng.utils import LocalsVisitor - -class IdGeneratorMixIn: - """ - Mixin adding the ability to generate integer uid - """ - def __init__(self, start_value=0): - self.id_count = start_value - - def init_counter(self, start_value=0): - """init the id counter - """ - self.id_count = start_value - - def generate_id(self): - """generate a new identifier - """ - self.id_count += 1 - return self.id_count - - -class Linker(IdGeneratorMixIn, LocalsVisitor): - """ - walk on the project tree and resolve relationships. - - According to options the following attributes may be added to visited nodes: - - * uid, - a unique identifier for the node (on astng.Project, astng.Module, - astng.Class and astng.locals_type). Only if the linker has been instantiated - with tag=True parameter (False by default). - - * Function - a mapping from locals names to their bounded value, which may be a - constant like a string or an integer, or an astng node (on astng.Module, - astng.Class and astng.Function). - - * instance_attrs_type - as locals_type but for klass member attributes (only on astng.Class) - - * implements, - list of implemented interface _objects_ (only on astng.Class nodes) - """ - - def __init__(self, project, inherited_interfaces=0, tag=False): - IdGeneratorMixIn.__init__(self) - LocalsVisitor.__init__(self) - # take inherited interface in consideration or not - self.inherited_interfaces = inherited_interfaces - # tag nodes or not - self.tag = tag - # visited project - self.project = project - - - def visit_project(self, node): - """visit an astng.Project node - - * optionally tag the node with a unique id - """ - if self.tag: - node.uid = self.generate_id() - for module in node.modules: - self.visit(module) - - def visit_package(self, node): - """visit an astng.Package node - - * optionally tag the node with a unique id - """ - if self.tag: - node.uid = self.generate_id() - for subelmt in node.values(): - self.visit(subelmt) - - def visit_module(self, node): - """visit an astng.Module node - - * set the locals_type mapping - * set the depends mapping - * optionally tag the node with a unique id - """ - if hasattr(node, 'locals_type'): - return - node.locals_type = {} - node.depends = [] - if self.tag: - node.uid = self.generate_id() - - def visit_class(self, node): - """visit an astng.Class node - - * set the locals_type and instance_attrs_type mappings - * set the implements list and build it - * optionally tag the node with a unique id - """ - if hasattr(node, 'locals_type'): - return - node.locals_type = {} - if self.tag: - node.uid = self.generate_id() - # resolve ancestors - for baseobj in node.ancestors(recurs=False): - specializations = getattr(baseobj, 'specializations', []) - specializations.append(node) - baseobj.specializations = specializations - # resolve instance attributes - node.instance_attrs_type = {} - for assattrs in node.instance_attrs.values(): - for assattr in assattrs: - self.handle_assattr_type(assattr, node) - # resolve implemented interface - try: - node.implements = list(node.interfaces(self.inherited_interfaces)) - except InferenceError: - node.implements = () - - def visit_function(self, node): - """visit an astng.Function node - - * set the locals_type mapping - * optionally tag the node with a unique id - """ - if hasattr(node, 'locals_type'): - return - node.locals_type = {} - if self.tag: - node.uid = self.generate_id() - - link_project = visit_project - link_module = visit_module - link_class = visit_class - link_function = visit_function - - def visit_assname(self, node): - """visit an astng.AssName node - - handle locals_type - """ - # avoid double parsing done by different Linkers.visit - # running over the same project: - if hasattr(node, '_handled'): - return - node._handled = True - if node.name in node.frame(): - frame = node.frame() - else: - # the name has been defined as 'global' in the frame and belongs - # there. Btw the frame is not yet visited as the name is in the - # root locals; the frame hence has no locals_type attribute - frame = node.root() - try: - values = node.infered() - try: - already_infered = frame.locals_type[node.name] - for valnode in values: - if not valnode in already_infered: - already_infered.append(valnode) - except KeyError: - frame.locals_type[node.name] = values - except astng.InferenceError: - pass - - def handle_assattr_type(self, node, parent): - """handle an astng.AssAttr node - - handle instance_attrs_type - """ - try: - values = list(node.infer()) - try: - already_infered = parent.instance_attrs_type[node.attrname] - for valnode in values: - if not valnode in already_infered: - already_infered.append(valnode) - except KeyError: - parent.instance_attrs_type[node.attrname] = values - except astng.InferenceError: - pass - - def visit_import(self, node): - """visit an astng.Import node - - resolve module dependencies - """ - context_file = node.root().file - for name in node.names: - relative = is_relative(name[0], context_file) - self._imported_module(node, name[0], relative) - - - def visit_from(self, node): - """visit an astng.From node - - resolve module dependencies - """ - basename = node.modname - context_file = node.root().file - if context_file is not None: - relative = is_relative(basename, context_file) - else: - relative = False - for name in node.names: - if name[0] == '*': - continue - # analyze dependencies - fullname = '%s.%s' % (basename, name[0]) - if fullname.find('.') > -1: - try: - # XXX: don't use get_module_part, missing package precedence - fullname = get_module_part(fullname) - except ImportError: - continue - if fullname != basename: - self._imported_module(node, fullname, relative) - - - def compute_module(self, context_name, mod_path): - """return true if the module should be added to dependencies""" - package_dir = dirname(self.project.path) - if context_name == mod_path: - return 0 - elif is_standard_module(mod_path, (package_dir,)): - return 1 - return 0 - - # protected methods ######################################################## - - def _imported_module(self, node, mod_path, relative): - """notify an imported module, used to analyze dependencies - """ - module = node.root() - context_name = module.name - if relative: - mod_path = '%s.%s' % ('.'.join(context_name.split('.')[:-1]), - mod_path) - if self.compute_module(context_name, mod_path): - # handle dependencies - if not hasattr(module, 'depends'): - module.depends = [] - mod_paths = module.depends - if not mod_path in mod_paths: - mod_paths.append(mod_path) diff --git a/pylibs/logilab/astng/manager.py b/pylibs/logilab/astng/manager.py deleted file mode 100644 index 8a4f02bb..00000000 --- a/pylibs/logilab/astng/manager.py +++ /dev/null @@ -1,299 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""astng manager: avoid multiple astng build of a same module when -possible by providing a class responsible to get astng representation -from various source and using a cache of built modules) -""" - -__docformat__ = "restructuredtext en" - -import sys -import os -from os.path import dirname, basename, abspath, join, isdir, exists - -from logilab.common.modutils import NoSourceFile, is_python_source, \ - file_from_modpath, load_module_from_name, modpath_from_file, \ - get_module_files, get_source_file, zipimport -from logilab.common.configuration import OptionsProviderMixIn - -from logilab.astng.exceptions import ASTNGBuildingException - -def astng_wrapper(func, modname): - """wrapper to give to ASTNGManager.project_from_files""" - print 'parsing %s...' % modname - try: - return func(modname) - except ASTNGBuildingException, exc: - print exc - except Exception, exc: - import traceback - traceback.print_exc() - -def _silent_no_wrap(func, modname): - """silent wrapper that doesn't do anything; can be used for tests""" - return func(modname) - -def safe_repr(obj): - try: - return repr(obj) - except: - return '???' - - - -class ASTNGManager(OptionsProviderMixIn): - """the astng manager, responsible to build astng from files - or modules. - - Use the Borg pattern. - """ - - name = 'astng loader' - options = (("ignore", - {'type' : "csv", 'metavar' : "", - 'dest' : "black_list", "default" : ('CVS',), - 'help' : "add (may be a directory) to the black list\ -. It should be a base name, not a path. You may set this option multiple times\ -."}), - ("project", - {'default': "No Name", 'type' : 'string', 'short': 'p', - 'metavar' : '', - 'help' : 'set the project name.'}), - ) - brain = {} - def __init__(self): - self.__dict__ = ASTNGManager.brain - if not self.__dict__: - OptionsProviderMixIn.__init__(self) - self.load_defaults() - # NOTE: cache entries are added by the [re]builder - self.astng_cache = {} - self._mod_file_cache = {} - self.transformers = [] - - def astng_from_file(self, filepath, modname=None, fallback=True, source=False): - """given a module name, return the astng object""" - try: - filepath = get_source_file(filepath, include_no_ext=True) - source = True - except NoSourceFile: - pass - if modname is None: - try: - modname = '.'.join(modpath_from_file(filepath)) - except ImportError: - modname = filepath - if modname in self.astng_cache: - return self.astng_cache[modname] - if source: - from logilab.astng.builder import ASTNGBuilder - return ASTNGBuilder(self).file_build(filepath, modname) - elif fallback and modname: - return self.astng_from_module_name(modname) - raise ASTNGBuildingException('unable to get astng for file %s' % - filepath) - - def astng_from_module_name(self, modname, context_file=None): - """given a module name, return the astng object""" - if modname in self.astng_cache: - return self.astng_cache[modname] - if modname == '__main__': - from logilab.astng.builder import ASTNGBuilder - return ASTNGBuilder(self).string_build('', modname) - old_cwd = os.getcwd() - if context_file: - os.chdir(dirname(context_file)) - try: - filepath = self.file_from_module_name(modname, context_file) - if filepath is not None and not is_python_source(filepath): - module = self.zip_import_data(filepath) - if module is not None: - return module - if filepath is None or not is_python_source(filepath): - try: - module = load_module_from_name(modname) - except Exception, ex: - msg = 'Unable to load module %s (%s)' % (modname, ex) - raise ASTNGBuildingException(msg) - return self.astng_from_module(module, modname) - return self.astng_from_file(filepath, modname, fallback=False) - finally: - os.chdir(old_cwd) - - def zip_import_data(self, filepath): - if zipimport is None: - return None - from logilab.astng.builder import ASTNGBuilder - builder = ASTNGBuilder(self) - for ext in ('.zip', '.egg'): - try: - eggpath, resource = filepath.rsplit(ext + '/', 1) - except ValueError: - continue - try: - importer = zipimport.zipimporter(eggpath + ext) - zmodname = resource.replace('/', '.') - if importer.is_package(resource): - zmodname = zmodname + '.__init__' - module = builder.string_build(importer.get_source(resource), - zmodname, filepath) - return module - except: - continue - return None - - def file_from_module_name(self, modname, contextfile): - try: - value = self._mod_file_cache[(modname, contextfile)] - except KeyError: - try: - value = file_from_modpath(modname.split('.'), - context_file=contextfile) - except ImportError, ex: - msg = 'Unable to load module %s (%s)' % (modname, ex) - value = ASTNGBuildingException(msg) - self._mod_file_cache[(modname, contextfile)] = value - if isinstance(value, ASTNGBuildingException): - raise value - return value - - def astng_from_module(self, module, modname=None): - """given an imported module, return the astng object""" - modname = modname or module.__name__ - if modname in self.astng_cache: - return self.astng_cache[modname] - try: - # some builtin modules don't have __file__ attribute - filepath = module.__file__ - if is_python_source(filepath): - return self.astng_from_file(filepath, modname) - except AttributeError: - pass - from logilab.astng.builder import ASTNGBuilder - return ASTNGBuilder(self).module_build(module, modname) - - def astng_from_class(self, klass, modname=None): - """get astng for the given class""" - if modname is None: - try: - modname = klass.__module__ - except AttributeError: - raise ASTNGBuildingException( - 'Unable to get module for class %s' % safe_repr(klass)) - modastng = self.astng_from_module_name(modname) - return modastng.getattr(klass.__name__)[0] # XXX - - - def infer_astng_from_something(self, obj, context=None): - """infer astng for the given class""" - if hasattr(obj, '__class__') and not isinstance(obj, type): - klass = obj.__class__ - else: - klass = obj - try: - modname = klass.__module__ - except AttributeError: - raise ASTNGBuildingException( - 'Unable to get module for %s' % safe_repr(klass)) - except Exception, ex: - raise ASTNGBuildingException( - 'Unexpected error while retrieving module for %s: %s' - % (safe_repr(klass), ex)) - try: - name = klass.__name__ - except AttributeError: - raise ASTNGBuildingException( - 'Unable to get name for %s' % safe_repr(klass)) - except Exception, ex: - raise ASTNGBuildingException( - 'Unexpected error while retrieving name for %s: %s' - % (safe_repr(klass), ex)) - # take care, on living object __module__ is regularly wrong :( - modastng = self.astng_from_module_name(modname) - if klass is obj: - for infered in modastng.igetattr(name, context): - yield infered - else: - for infered in modastng.igetattr(name, context): - yield infered.instanciate_class() - - def project_from_files(self, files, func_wrapper=astng_wrapper, - project_name=None, black_list=None): - """return a Project from a list of files or modules""" - # build the project representation - project_name = project_name or self.config.project - black_list = black_list or self.config.black_list - project = Project(project_name) - for something in files: - if not exists(something): - fpath = file_from_modpath(something.split('.')) - elif isdir(something): - fpath = join(something, '__init__.py') - else: - fpath = something - astng = func_wrapper(self.astng_from_file, fpath) - if astng is None: - continue - # XXX why is first file defining the project.path ? - project.path = project.path or astng.file - project.add_module(astng) - base_name = astng.name - # recurse in package except if __init__ was explicitly given - if astng.package and something.find('__init__') == -1: - # recurse on others packages / modules if this is a package - for fpath in get_module_files(dirname(astng.file), - black_list): - astng = func_wrapper(self.astng_from_file, fpath) - if astng is None or astng.name == base_name: - continue - project.add_module(astng) - return project - - def register_transformer(self, transformer): - self.transformers.append(transformer) - -class Project: - """a project handle a set of modules / packages""" - def __init__(self, name=''): - self.name = name - self.path = None - self.modules = [] - self.locals = {} - self.__getitem__ = self.locals.__getitem__ - self.__iter__ = self.locals.__iter__ - self.values = self.locals.values - self.keys = self.locals.keys - self.items = self.locals.items - - def add_module(self, node): - self.locals[node.name] = node - self.modules.append(node) - - def get_module(self, name): - return self.locals[name] - - def get_children(self): - return self.modules - - def __repr__(self): - return '' % (self.name, id(self), - len(self.modules)) - - diff --git a/pylibs/logilab/astng/nodes.py b/pylibs/logilab/astng/nodes.py deleted file mode 100644 index 56b9980f..00000000 --- a/pylibs/logilab/astng/nodes.py +++ /dev/null @@ -1,75 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -""" -on all nodes : - .is_statement, returning true if the node should be considered as a - statement node - .root(), returning the root node of the tree (i.e. a Module) - .previous_sibling(), returning previous sibling statement node - .next_sibling(), returning next sibling statement node - .statement(), returning the first parent node marked as statement node - .frame(), returning the first node defining a new local scope (i.e. - Module, Function or Class) - .set_local(name, node), define an identifier on the first parent frame, - with the node defining it. This is used by the astng builder and should not - be used from out there. - -on From and Import : - .real_name(name), - - -""" - -__docformat__ = "restructuredtext en" - -from logilab.astng.node_classes import Arguments, AssAttr, Assert, Assign, \ - AssName, AugAssign, Backquote, BinOp, BoolOp, Break, CallFunc, Compare, \ - Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, \ - Dict, Discard, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, \ - From, Getattr, Global, If, IfExp, Import, Index, Keyword, \ - List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, \ - TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, \ - const_factory -from logilab.astng.scoped_nodes import Module, GenExpr, Lambda, DictComp, \ - ListComp, SetComp, Function, Class - -ALL_NODE_CLASSES = ( - Arguments, AssAttr, Assert, Assign, AssName, AugAssign, - Backquote, BinOp, BoolOp, Break, - CallFunc, Class, Compare, Comprehension, Const, Continue, - Decorators, DelAttr, DelName, Delete, - Dict, DictComp, Discard, - Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, - For, From, Function, - Getattr, GenExpr, Global, - If, IfExp, Import, Index, - Keyword, - Lambda, List, ListComp, - Name, Nonlocal, - Module, - Pass, Print, - Raise, Return, - Set, SetComp, Slice, Starred, Subscript, - TryExcept, TryFinally, Tuple, - UnaryOp, - While, With, - Yield, - ) - diff --git a/pylibs/logilab/astng/protocols.py b/pylibs/logilab/astng/protocols.py deleted file mode 100644 index d8c02e38..00000000 --- a/pylibs/logilab/astng/protocols.py +++ /dev/null @@ -1,321 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""this module contains a set of functions to handle python protocols for nodes -where it makes sense. -""" - -__doctype__ = "restructuredtext en" - -from logilab.astng.exceptions import InferenceError, NoDefault -from logilab.astng.node_classes import unpack_infer -from logilab.astng.bases import copy_context, \ - raise_if_nothing_infered, yes_if_nothing_infered, Instance, Generator, YES -from logilab.astng.nodes import const_factory -from logilab.astng import nodes - -# unary operations ############################################################ - -def tl_infer_unary_op(self, operator): - if operator == 'not': - return const_factory(not bool(self.elts)) - raise TypeError() # XXX log unsupported operation -nodes.Tuple.infer_unary_op = tl_infer_unary_op -nodes.List.infer_unary_op = tl_infer_unary_op - - -def dict_infer_unary_op(self, operator): - if operator == 'not': - return const_factory(not bool(self.items)) - raise TypeError() # XXX log unsupported operation -nodes.Dict.infer_unary_op = dict_infer_unary_op - - -def const_infer_unary_op(self, operator): - if operator == 'not': - return const_factory(not self.value) - # XXX log potentially raised TypeError - elif operator == '+': - return const_factory(+self.value) - else: # operator == '-': - return const_factory(-self.value) -nodes.Const.infer_unary_op = const_infer_unary_op - - -# binary operations ########################################################### - -BIN_OP_IMPL = {'+': lambda a, b: a + b, - '-': lambda a, b: a - b, - '/': lambda a, b: a / b, - '//': lambda a, b: a // b, - '*': lambda a, b: a * b, - '**': lambda a, b: a ** b, - '%': lambda a, b: a % b, - '&': lambda a, b: a & b, - '|': lambda a, b: a | b, - '^': lambda a, b: a ^ b, - '<<': lambda a, b: a << b, - '>>': lambda a, b: a >> b, - } -for key, impl in BIN_OP_IMPL.items(): - BIN_OP_IMPL[key+'='] = impl - -def const_infer_binary_op(self, operator, other, context): - for other in other.infer(context): - if isinstance(other, nodes.Const): - try: - impl = BIN_OP_IMPL[operator] - - try: - yield const_factory(impl(self.value, other.value)) - except Exception: - # ArithmeticError is not enough: float >> float is a TypeError - # TODO : let pylint know about the problem - pass - except TypeError: - # XXX log TypeError - continue - elif other is YES: - yield other - else: - try: - for val in other.infer_binary_op(operator, self, context): - yield val - except AttributeError: - yield YES -nodes.Const.infer_binary_op = yes_if_nothing_infered(const_infer_binary_op) - - -def tl_infer_binary_op(self, operator, other, context): - for other in other.infer(context): - if isinstance(other, self.__class__) and operator == '+': - node = self.__class__() - elts = [n for elt in self.elts for n in elt.infer(context) - if not n is YES] - elts += [n for elt in other.elts for n in elt.infer(context) - if not n is YES] - node.elts = elts - yield node - elif isinstance(other, nodes.Const) and operator == '*': - if not isinstance(other.value, int): - yield YES - continue - node = self.__class__() - elts = [n for elt in self.elts for n in elt.infer(context) - if not n is YES] * other.value - node.elts = elts - yield node - elif isinstance(other, Instance) and not isinstance(other, nodes.Const): - yield YES - # XXX else log TypeError -nodes.Tuple.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op) -nodes.List.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op) - - -def dict_infer_binary_op(self, operator, other, context): - for other in other.infer(context): - if isinstance(other, Instance) and isinstance(other._proxied, nodes.Class): - yield YES - # XXX else log TypeError -nodes.Dict.infer_binary_op = yes_if_nothing_infered(dict_infer_binary_op) - - -# assignment ################################################################## - -"""the assigned_stmts method is responsible to return the assigned statement -(e.g. not inferred) according to the assignment type. - -The `asspath` argument is used to record the lhs path of the original node. -For instance if we want assigned statements for 'c' in 'a, (b,c)', asspath -will be [1, 1] once arrived to the Assign node. - -The `context` argument is the current inference context which should be given -to any intermediary inference necessary. -""" - -def _resolve_looppart(parts, asspath, context): - """recursive function to resolve multiple assignments on loops""" - asspath = asspath[:] - index = asspath.pop(0) - for part in parts: - if part is YES: - continue - # XXX handle __iter__ and log potentially detected errors - if not hasattr(part, 'itered'): - continue - try: - itered = part.itered() - except TypeError: - continue # XXX log error - for stmt in itered: - try: - assigned = stmt.getitem(index, context) - except (AttributeError, IndexError): - continue - except TypeError, exc: # stmt is unsubscriptable Const - continue - if not asspath: - # we achieved to resolved the assignment path, - # don't infer the last part - yield assigned - elif assigned is YES: - break - else: - # we are not yet on the last part of the path - # search on each possibly inferred value - try: - for infered in _resolve_looppart(assigned.infer(context), - asspath, context): - yield infered - except InferenceError: - break - - -def for_assigned_stmts(self, node, context=None, asspath=None): - if asspath is None: - for lst in self.iter.infer(context): - if isinstance(lst, (nodes.Tuple, nodes.List)): - for item in lst.elts: - yield item - else: - for infered in _resolve_looppart(self.iter.infer(context), - asspath, context): - yield infered - -nodes.For.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts) -nodes.Comprehension.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts) - - -def mulass_assigned_stmts(self, node, context=None, asspath=None): - if asspath is None: - asspath = [] - asspath.insert(0, self.elts.index(node)) - return self.parent.assigned_stmts(self, context, asspath) -nodes.Tuple.assigned_stmts = mulass_assigned_stmts -nodes.List.assigned_stmts = mulass_assigned_stmts - - -def assend_assigned_stmts(self, context=None): - return self.parent.assigned_stmts(self, context=context) -nodes.AssName.assigned_stmts = assend_assigned_stmts -nodes.AssAttr.assigned_stmts = assend_assigned_stmts - - -def _arguments_infer_argname(self, name, context): - # arguments information may be missing, in which case we can't do anything - # more - if not (self.args or self.vararg or self.kwarg): - yield YES - return - # first argument of instance/class method - if self.args and getattr(self.args[0], 'name', None) == name: - functype = self.parent.type - if functype == 'method': - yield Instance(self.parent.parent.frame()) - return - if functype == 'classmethod': - yield self.parent.parent.frame() - return - if name == self.vararg: - yield const_factory(()) - return - if name == self.kwarg: - yield const_factory({}) - return - # if there is a default value, yield it. And then yield YES to reflect - # we can't guess given argument value - try: - context = copy_context(context) - for infered in self.default_value(name).infer(context): - yield infered - yield YES - except NoDefault: - yield YES - - -def arguments_assigned_stmts(self, node, context, asspath=None): - if context.callcontext: - # reset call context/name - callcontext = context.callcontext - context = copy_context(context) - context.callcontext = None - for infered in callcontext.infer_argument(self.parent, node.name, context): - yield infered - return - for infered in _arguments_infer_argname(self, node.name, context): - yield infered -nodes.Arguments.assigned_stmts = arguments_assigned_stmts - - -def assign_assigned_stmts(self, node, context=None, asspath=None): - if not asspath: - yield self.value - return - for infered in _resolve_asspart(self.value.infer(context), asspath, context): - yield infered -nodes.Assign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts) -nodes.AugAssign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts) - - -def _resolve_asspart(parts, asspath, context): - """recursive function to resolve multiple assignments""" - asspath = asspath[:] - index = asspath.pop(0) - for part in parts: - if hasattr(part, 'getitem'): - try: - assigned = part.getitem(index, context) - # XXX raise a specific exception to avoid potential hiding of - # unexpected exception ? - except (TypeError, IndexError): - return - if not asspath: - # we achieved to resolved the assignment path, don't infer the - # last part - yield assigned - elif assigned is YES: - return - else: - # we are not yet on the last part of the path search on each - # possibly inferred value - try: - for infered in _resolve_asspart(assigned.infer(context), - asspath, context): - yield infered - except InferenceError: - return - - -def excepthandler_assigned_stmts(self, node, context=None, asspath=None): - for assigned in unpack_infer(self.type): - if isinstance(assigned, nodes.Class): - assigned = Instance(assigned) - yield assigned -nodes.ExceptHandler.assigned_stmts = raise_if_nothing_infered(excepthandler_assigned_stmts) - - -def with_assigned_stmts(self, node, context=None, asspath=None): - if asspath is None: - for lst in self.vars.infer(context): - if isinstance(lst, (nodes.Tuple, nodes.List)): - for item in lst.nodes: - yield item -nodes.With.assigned_stmts = raise_if_nothing_infered(with_assigned_stmts) - - diff --git a/pylibs/logilab/astng/rebuilder.py b/pylibs/logilab/astng/rebuilder.py deleted file mode 100644 index bac7a095..00000000 --- a/pylibs/logilab/astng/rebuilder.py +++ /dev/null @@ -1,864 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""this module contains utilities for rebuilding a _ast tree in -order to get a single ASTNG representation -""" - -import sys -from _ast import (Expr as Discard, Str, - # binary operators - Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor, - LShift, RShift, - # logical operators - And, Or, - # unary operators - UAdd, USub, Not, Invert, - # comparison operators - Eq, Gt, GtE, In, Is, IsNot, Lt, LtE, NotEq, NotIn, - ) - -from logilab.astng.exceptions import ASTNGBuildingException -from logilab.astng import nodes as new - - -_BIN_OP_CLASSES = {Add: '+', - BitAnd: '&', - BitOr: '|', - BitXor: '^', - Div: '/', - FloorDiv: '//', - Mod: '%', - Mult: '*', - Pow: '**', - Sub: '-', - LShift: '<<', - RShift: '>>'} - -_BOOL_OP_CLASSES = {And: 'and', - Or: 'or'} - -_UNARY_OP_CLASSES = {UAdd: '+', - USub: '-', - Not: 'not', - Invert: '~'} - -_CMP_OP_CLASSES = {Eq: '==', - Gt: '>', - GtE: '>=', - In: 'in', - Is: 'is', - IsNot: 'is not', - Lt: '<', - LtE: '<=', - NotEq: '!=', - NotIn: 'not in'} - -CONST_NAME_TRANSFORMS = {'None': None, - 'True': True, - 'False': False} - -REDIRECT = {'arguments': 'Arguments', - 'Attribute': 'Getattr', - 'comprehension': 'Comprehension', - 'Call': 'CallFunc', - 'ClassDef': 'Class', - "ListCompFor": 'Comprehension', - "GenExprFor": 'Comprehension', - 'excepthandler': 'ExceptHandler', - 'Expr': 'Discard', - 'FunctionDef': 'Function', - 'GeneratorExp': 'GenExpr', - 'ImportFrom': 'From', - 'keyword': 'Keyword', - 'Repr': 'Backquote', - } - -def _init_set_doc(node, newnode): - newnode.doc = None - try: - if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str): - newnode.tolineno = node.body[0].lineno - newnode.doc = node.body[0].value.s - node.body = node.body[1:] - - except IndexError: - pass # ast built from scratch - -def _lineno_parent(oldnode, newnode, parent): - newnode.parent = parent - if hasattr(oldnode, 'lineno'): - newnode.lineno = oldnode.lineno - if hasattr(oldnode, 'col_offset'): - newnode.col_offset = oldnode.col_offset - -def _set_infos(oldnode, newnode, parent): - newnode.parent = parent - if hasattr(oldnode, 'lineno'): - newnode.lineno = oldnode.lineno - if hasattr(oldnode, 'col_offset'): - newnode.col_offset = oldnode.col_offset - newnode.set_line_info(newnode.last_child()) # set_line_info accepts None - - - - -class TreeRebuilder(object): - """Rebuilds the _ast tree to become an ASTNG tree""" - - _visit_meths = {} - def __init__(self): - self.init() - - def init(self): - self.asscontext = None - self._metaclass = [''] - self._global_names = [] - self._from_nodes = [] - self._delayed_assattr = [] - - def visit(self, node, parent): - cls = node.__class__ - if cls in self._visit_meths: - return self._visit_meths[cls](node, parent) - else: - cls_name = cls.__name__ - visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower() - visit_method = getattr(self, visit_name) - self._visit_meths[cls] = visit_method - return visit_method(node, parent) - - def _save_assignment(self, node, name=None): - """save assignement situation since node.parent is not available yet""" - if self._global_names and node.name in self._global_names[-1]: - node.root().set_local(node.name, node) - else: - node.parent.set_local(node.name, node) - - - def visit_arguments(self, node, parent): - """visit a Arguments node by returning a fresh instance of it""" - newnode = new.Arguments() - _lineno_parent(node, newnode, parent) - self.asscontext = "Ass" - newnode.args = [self.visit(child, newnode) for child in node.args] - self.asscontext = None - newnode.defaults = [self.visit(child, newnode) for child in node.defaults] - newnode.vararg = node.vararg - newnode.kwarg = node.kwarg - # save argument names in locals: - if node.vararg: - newnode.parent.set_local(newnode.vararg, newnode) - if node.kwarg: - newnode.parent.set_local(newnode.kwarg, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_assattr(self, node, parent): - """visit a AssAttr node by returning a fresh instance of it""" - assc, self.asscontext = self.asscontext, None - newnode = new.AssAttr() - _lineno_parent(node, newnode, parent) - newnode.expr = self.visit(node.expr, newnode) - self.asscontext = assc - self._delayed_assattr.append(newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_assert(self, node, parent): - """visit a Assert node by returning a fresh instance of it""" - newnode = new.Assert() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) - if node.msg is not None: - newnode.fail = self.visit(node.msg, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_assign(self, node, parent): - """visit a Assign node by returning a fresh instance of it""" - newnode = new.Assign() - _lineno_parent(node, newnode, parent) - self.asscontext = "Ass" - newnode.targets = [self.visit(child, newnode) for child in node.targets] - self.asscontext = None - newnode.value = self.visit(node.value, newnode) - # set some function or metaclass infos XXX explain ? - klass = newnode.parent.frame() - if (isinstance(klass, new.Class) - and isinstance(newnode.value, new.CallFunc) - and isinstance(newnode.value.func, new.Name)): - func_name = newnode.value.func.name - for ass_node in newnode.targets: - try: - meth = klass[ass_node.name] - if isinstance(meth, new.Function): - if func_name in ('classmethod', 'staticmethod'): - meth.type = func_name - elif func_name == 'classproperty': # see lgc.decorators - meth.type = 'classmethod' - meth.extra_decorators.append(newnode.value) - except (AttributeError, KeyError): - continue - elif getattr(newnode.targets[0], 'name', None) == '__metaclass__': - # XXX check more... - self._metaclass[-1] = 'type' # XXX get the actual metaclass - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_assname(self, node, parent, node_name=None): - '''visit a node and return a AssName node''' - newnode = new.AssName() - _set_infos(node, newnode, parent) - newnode.name = node_name - self._save_assignment(newnode) - return newnode - - def visit_augassign(self, node, parent): - """visit a AugAssign node by returning a fresh instance of it""" - newnode = new.AugAssign() - _lineno_parent(node, newnode, parent) - newnode.op = _BIN_OP_CLASSES[node.op.__class__] + "=" - self.asscontext = "Ass" - newnode.target = self.visit(node.target, newnode) - self.asscontext = None - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_backquote(self, node, parent): - """visit a Backquote node by returning a fresh instance of it""" - newnode = new.Backquote() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_binop(self, node, parent): - """visit a BinOp node by returning a fresh instance of it""" - newnode = new.BinOp() - _lineno_parent(node, newnode, parent) - newnode.left = self.visit(node.left, newnode) - newnode.right = self.visit(node.right, newnode) - newnode.op = _BIN_OP_CLASSES[node.op.__class__] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_boolop(self, node, parent): - """visit a BoolOp node by returning a fresh instance of it""" - newnode = new.BoolOp() - _lineno_parent(node, newnode, parent) - newnode.values = [self.visit(child, newnode) for child in node.values] - newnode.op = _BOOL_OP_CLASSES[node.op.__class__] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_break(self, node, parent): - """visit a Break node by returning a fresh instance of it""" - newnode = new.Break() - _set_infos(node, newnode, parent) - return newnode - - def visit_callfunc(self, node, parent): - """visit a CallFunc node by returning a fresh instance of it""" - newnode = new.CallFunc() - _lineno_parent(node, newnode, parent) - newnode.func = self.visit(node.func, newnode) - newnode.args = [self.visit(child, newnode) for child in node.args] - if node.starargs is not None: - newnode.starargs = self.visit(node.starargs, newnode) - if node.kwargs is not None: - newnode.kwargs = self.visit(node.kwargs, newnode) - newnode.args.extend(self.visit(child, newnode) for child in node.keywords) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_class(self, node, parent): - """visit a Class node to become astng""" - self._metaclass.append(self._metaclass[-1]) - newnode = new.Class(node.name, None) - _lineno_parent(node, newnode, parent) - _init_set_doc(node, newnode) - newnode.bases = [self.visit(child, newnode) for child in node.bases] - newnode.body = [self.visit(child, newnode) for child in node.body] - if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6 - newnode.decorators = self.visit_decorators(node, newnode) - newnode.set_line_info(newnode.last_child()) - metaclass = self._metaclass.pop() - if not newnode.bases: - # no base classes, detect new / style old style according to - # current scope - newnode._newstyle = metaclass == 'type' - newnode.parent.frame().set_local(newnode.name, newnode) - return newnode - - def visit_const(self, node, parent): - """visit a Const node by returning a fresh instance of it""" - newnode = new.Const(node.value) - _set_infos(node, newnode, parent) - return newnode - - def visit_continue(self, node, parent): - """visit a Continue node by returning a fresh instance of it""" - newnode = new.Continue() - _set_infos(node, newnode, parent) - return newnode - - def visit_compare(self, node, parent): - """visit a Compare node by returning a fresh instance of it""" - newnode = new.Compare() - _lineno_parent(node, newnode, parent) - newnode.left = self.visit(node.left, newnode) - newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode)) - for (op, expr) in zip(node.ops, node.comparators)] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_comprehension(self, node, parent): - """visit a Comprehension node by returning a fresh instance of it""" - newnode = new.Comprehension() - _lineno_parent(node, newnode, parent) - self.asscontext = "Ass" - newnode.target = self.visit(node.target, newnode) - self.asscontext = None - newnode.iter = self.visit(node.iter, newnode) - newnode.ifs = [self.visit(child, newnode) for child in node.ifs] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_decorators(self, node, parent): - """visit a Decorators node by returning a fresh instance of it""" - # /!\ node is actually a _ast.Function node while - # parent is a astng.nodes.Function node - newnode = new.Decorators() - _lineno_parent(node, newnode, parent) - if 'decorators' in node._fields: # py < 2.6, i.e. 2.5 - decorators = node.decorators - else: - decorators= node.decorator_list - newnode.nodes = [self.visit(child, newnode) for child in decorators] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_delete(self, node, parent): - """visit a Delete node by returning a fresh instance of it""" - newnode = new.Delete() - _lineno_parent(node, newnode, parent) - self.asscontext = "Del" - newnode.targets = [self.visit(child, newnode) for child in node.targets] - self.asscontext = None - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_dict(self, node, parent): - """visit a Dict node by returning a fresh instance of it""" - newnode = new.Dict() - _lineno_parent(node, newnode, parent) - newnode.items = [(self.visit(key, newnode), self.visit(value, newnode)) - for key, value in zip(node.keys, node.values)] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_dictcomp(self, node, parent): - """visit a DictComp node by returning a fresh instance of it""" - newnode = new.DictComp() - _lineno_parent(node, newnode, parent) - newnode.key = self.visit(node.key, newnode) - newnode.value = self.visit(node.value, newnode) - newnode.generators = [self.visit(child, newnode) - for child in node.generators] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_discard(self, node, parent): - """visit a Discard node by returning a fresh instance of it""" - newnode = new.Discard() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_ellipsis(self, node, parent): - """visit an Ellipsis node by returning a fresh instance of it""" - newnode = new.Ellipsis() - _set_infos(node, newnode, parent) - return newnode - - def visit_emptynode(self, node, parent): - """visit an EmptyNode node by returning a fresh instance of it""" - newnode = new.EmptyNode() - _set_infos(node, newnode, parent) - return newnode - - def visit_excepthandler(self, node, parent): - """visit an ExceptHandler node by returning a fresh instance of it""" - newnode = new.ExceptHandler() - _lineno_parent(node, newnode, parent) - if node.type is not None: - newnode.type = self.visit(node.type, newnode) - if node.name is not None: - # /!\ node.name can be a tuple - self.asscontext = "Ass" - newnode.name = self.visit(node.name, newnode) - self.asscontext = None - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_exec(self, node, parent): - """visit an Exec node by returning a fresh instance of it""" - newnode = new.Exec() - _lineno_parent(node, newnode, parent) - newnode.expr = self.visit(node.body, newnode) - if node.globals is not None: - newnode.globals = self.visit(node.globals, newnode) - if node.locals is not None: - newnode.locals = self.visit(node.locals, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_extslice(self, node, parent): - """visit an ExtSlice node by returning a fresh instance of it""" - newnode = new.ExtSlice() - _lineno_parent(node, newnode, parent) - newnode.dims = [self.visit(dim, newnode) for dim in node.dims] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_for(self, node, parent): - """visit a For node by returning a fresh instance of it""" - newnode = new.For() - _lineno_parent(node, newnode, parent) - self.asscontext = "Ass" - newnode.target = self.visit(node.target, newnode) - self.asscontext = None - newnode.iter = self.visit(node.iter, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_from(self, node, parent): - """visit a From node by returning a fresh instance of it""" - names = [(alias.name, alias.asname) for alias in node.names] - newnode = new.From(node.module or '', names, node.level) - _set_infos(node, newnode, parent) - # store From names to add them to locals after building - self._from_nodes.append(newnode) - return newnode - - def visit_function(self, node, parent): - """visit an Function node to become astng""" - self._global_names.append({}) - newnode = new.Function(node.name, None) - _lineno_parent(node, newnode, parent) - _init_set_doc(node, newnode) - newnode.args = self.visit(node.args, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - if 'decorators' in node._fields: # py < 2.6 - attr = 'decorators' - else: - attr = 'decorator_list' - decorators = getattr(node, attr) - if decorators: - newnode.decorators = self.visit_decorators(node, newnode) - newnode.set_line_info(newnode.last_child()) - self._global_names.pop() - frame = newnode.parent.frame() - if isinstance(frame, new.Class): - if newnode.name == '__new__': - newnode.type = 'classmethod' - else: - newnode.type = 'method' - if newnode.decorators is not None: - for decorator_expr in newnode.decorators.nodes: - if isinstance(decorator_expr, new.Name): - if decorator_expr.name in ('classmethod', 'staticmethod'): - newnode.type = decorator_expr.name - elif decorator_expr.name == 'classproperty': - newnode.type = 'classmethod' - frame.set_local(newnode.name, newnode) - return newnode - - def visit_genexpr(self, node, parent): - """visit a GenExpr node by returning a fresh instance of it""" - newnode = new.GenExpr() - _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode) - newnode.generators = [self.visit(child, newnode) for child in node.generators] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_getattr(self, node, parent): - """visit a Getattr node by returning a fresh instance of it""" - if self.asscontext == "Del": - # FIXME : maybe we should reintroduce and visit_delattr ? - # for instance, deactivating asscontext - newnode = new.DelAttr() - elif self.asscontext == "Ass": - # FIXME : maybe we should call visit_assattr ? - newnode = new.AssAttr() - self._delayed_assattr.append(newnode) - else: - newnode = new.Getattr() - _lineno_parent(node, newnode, parent) - asscontext, self.asscontext = self.asscontext, None - newnode.expr = self.visit(node.value, newnode) - self.asscontext = asscontext - newnode.attrname = node.attr - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_global(self, node, parent): - """visit an Global node to become astng""" - newnode = new.Global(node.names) - _set_infos(node, newnode, parent) - if self._global_names: # global at the module level, no effect - for name in node.names: - self._global_names[-1].setdefault(name, []).append(newnode) - return newnode - - def visit_if(self, node, parent): - """visit a If node by returning a fresh instance of it""" - newnode = new.If() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_ifexp(self, node, parent): - """visit a IfExp node by returning a fresh instance of it""" - newnode = new.IfExp() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) - newnode.body = self.visit(node.body, newnode) - newnode.orelse = self.visit(node.orelse, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_import(self, node, parent): - """visit a Import node by returning a fresh instance of it""" - newnode = new.Import() - _set_infos(node, newnode, parent) - newnode.names = [(alias.name, alias.asname) for alias in node.names] - # save import names in parent's locals: - for (name, asname) in newnode.names: - name = asname or name - newnode.parent.set_local(name.split('.')[0], newnode) - return newnode - - def visit_index(self, node, parent): - """visit a Index node by returning a fresh instance of it""" - newnode = new.Index() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_keyword(self, node, parent): - """visit a Keyword node by returning a fresh instance of it""" - newnode = new.Keyword() - _lineno_parent(node, newnode, parent) - newnode.arg = node.arg - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_lambda(self, node, parent): - """visit a Lambda node by returning a fresh instance of it""" - newnode = new.Lambda() - _lineno_parent(node, newnode, parent) - newnode.args = self.visit(node.args, newnode) - newnode.body = self.visit(node.body, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_list(self, node, parent): - """visit a List node by returning a fresh instance of it""" - newnode = new.List() - _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode) for child in node.elts] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_listcomp(self, node, parent): - """visit a ListComp node by returning a fresh instance of it""" - newnode = new.ListComp() - _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode) - newnode.generators = [self.visit(child, newnode) - for child in node.generators] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_module(self, node, modname, package): - """visit a Module node by returning a fresh instance of it""" - newnode = new.Module(modname, None) - newnode.package = package - _lineno_parent(node, newnode, parent=None) - _init_set_doc(node, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_name(self, node, parent): - """visit a Name node by returning a fresh instance of it""" - # True and False can be assigned to something in py2x, so we have to - # check first the asscontext - if self.asscontext == "Del": - newnode = new.DelName() - elif self.asscontext is not None: # Ass - assert self.asscontext == "Ass" - newnode = new.AssName() - elif node.id in CONST_NAME_TRANSFORMS: - newnode = new.Const(CONST_NAME_TRANSFORMS[node.id]) - _set_infos(node, newnode, parent) - return newnode - else: - newnode = new.Name() - _lineno_parent(node, newnode, parent) - newnode.name = node.id - # XXX REMOVE me : - if self.asscontext in ('Del', 'Ass'): # 'Aug' ?? - self._save_assignment(newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_bytes(self, node, parent): - """visit a Bytes node by returning a fresh instance of Const""" - newnode = new.Const(node.s) - _set_infos(node, newnode, parent) - return newnode - - def visit_num(self, node, parent): - """visit a Num node by returning a fresh instance of Const""" - newnode = new.Const(node.n) - _set_infos(node, newnode, parent) - return newnode - - def visit_pass(self, node, parent): - """visit a Pass node by returning a fresh instance of it""" - newnode = new.Pass() - _set_infos(node, newnode, parent) - return newnode - - def visit_str(self, node, parent): - """visit a Str node by returning a fresh instance of Const""" - newnode = new.Const(node.s) - _set_infos(node, newnode, parent) - return newnode - - def visit_print(self, node, parent): - """visit a Print node by returning a fresh instance of it""" - newnode = new.Print() - _lineno_parent(node, newnode, parent) - newnode.nl = node.nl - if node.dest is not None: - newnode.dest = self.visit(node.dest, newnode) - newnode.values = [self.visit(child, newnode) for child in node.values] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_raise(self, node, parent): - """visit a Raise node by returning a fresh instance of it""" - newnode = new.Raise() - _lineno_parent(node, newnode, parent) - if node.type is not None: - newnode.exc = self.visit(node.type, newnode) - if node.inst is not None: - newnode.inst = self.visit(node.inst, newnode) - if node.tback is not None: - newnode.tback = self.visit(node.tback, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_return(self, node, parent): - """visit a Return node by returning a fresh instance of it""" - newnode = new.Return() - _lineno_parent(node, newnode, parent) - if node.value is not None: - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_set(self, node, parent): - """visit a Tuple node by returning a fresh instance of it""" - newnode = new.Set() - _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode) for child in node.elts] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_setcomp(self, node, parent): - """visit a SetComp node by returning a fresh instance of it""" - newnode = new.SetComp() - _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode) - newnode.generators = [self.visit(child, newnode) - for child in node.generators] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_slice(self, node, parent): - """visit a Slice node by returning a fresh instance of it""" - newnode = new.Slice() - _lineno_parent(node, newnode, parent) - if node.lower is not None: - newnode.lower = self.visit(node.lower, newnode) - if node.upper is not None: - newnode.upper = self.visit(node.upper, newnode) - if node.step is not None: - newnode.step = self.visit(node.step, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_subscript(self, node, parent): - """visit a Subscript node by returning a fresh instance of it""" - newnode = new.Subscript() - _lineno_parent(node, newnode, parent) - subcontext, self.asscontext = self.asscontext, None - newnode.value = self.visit(node.value, newnode) - newnode.slice = self.visit(node.slice, newnode) - self.asscontext = subcontext - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_tryexcept(self, node, parent): - """visit a TryExcept node by returning a fresh instance of it""" - newnode = new.TryExcept() - _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.handlers = [self.visit(child, newnode) for child in node.handlers] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_tryfinally(self, node, parent): - """visit a TryFinally node by returning a fresh instance of it""" - newnode = new.TryFinally() - _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_tuple(self, node, parent): - """visit a Tuple node by returning a fresh instance of it""" - newnode = new.Tuple() - _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode) for child in node.elts] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_unaryop(self, node, parent): - """visit a UnaryOp node by returning a fresh instance of it""" - newnode = new.UnaryOp() - _lineno_parent(node, newnode, parent) - newnode.operand = self.visit(node.operand, newnode) - newnode.op = _UNARY_OP_CLASSES[node.op.__class__] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_while(self, node, parent): - """visit a While node by returning a fresh instance of it""" - newnode = new.While() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.orelse = [self.visit(child, newnode) for child in node.orelse] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_with(self, node, parent): - """visit a With node by returning a fresh instance of it""" - newnode = new.With() - _lineno_parent(node, newnode, parent) - newnode.expr = self.visit(node.context_expr, newnode) - self.asscontext = "Ass" - if node.optional_vars is not None: - newnode.vars = self.visit(node.optional_vars, newnode) - self.asscontext = None - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_yield(self, node, parent): - """visit a Yield node by returning a fresh instance of it""" - newnode = new.Yield() - _lineno_parent(node, newnode, parent) - if node.value is not None: - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - -class TreeRebuilder3k(TreeRebuilder): - """extend and overwrite TreeRebuilder for python3k""" - - def visit_arg(self, node, parent): - """visit a arg node by returning a fresh AssName instance""" - # the node is coming from py>=3.0, but we use AssName in py2.x - # XXX or we should instead introduce a Arg node in astng ? - return self.visit_assname(node, parent, node.arg) - - def visit_excepthandler(self, node, parent): - """visit an ExceptHandler node by returning a fresh instance of it""" - newnode = new.ExceptHandler() - _lineno_parent(node, newnode, parent) - if node.type is not None: - newnode.type = self.visit(node.type, newnode) - if node.name is not None: - newnode.name = self.visit_assname(node, newnode, node.name) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_nonlocal(self, node, parent): - """visit a Nonlocal node and return a new instance of it""" - newnode = new.Nonlocal(node.names) - _set_infos(node, newnode, parent) - return newnode - - def visit_raise(self, node, parent): - """visit a Raise node by returning a fresh instance of it""" - newnode = new.Raise() - _lineno_parent(node, newnode, parent) - # no traceback; anyway it is not used in Pylint - if node.exc is not None: - newnode.exc = self.visit(node.exc, newnode) - if node.cause is not None: - newnode.cause = self.visit(node.cause, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - def visit_starred(self, node, parent): - """visit a Starred node and return a new instance of it""" - newnode = new.Starred() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode) - newnode.set_line_info(newnode.last_child()) - return newnode - - -if sys.version_info >= (3, 0): - TreeRebuilder = TreeRebuilder3k - - diff --git a/pylibs/logilab/astng/scoped_nodes.py b/pylibs/logilab/astng/scoped_nodes.py deleted file mode 100644 index 41ad0c64..00000000 --- a/pylibs/logilab/astng/scoped_nodes.py +++ /dev/null @@ -1,972 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""This module contains the classes for "scoped" node, i.e. which are opening a -new local scope in the language definition : Module, Class, Function (and -Lambda, GenExpr, DictComp and SetComp to some extent). -""" -from __future__ import with_statement - -__doctype__ = "restructuredtext en" - -import sys -from itertools import chain - -from logilab.common.compat import builtins -from logilab.common.decorators import cached - -from logilab.astng import BUILTINS_MODULE -from logilab.astng.exceptions import NotFoundError, NoDefault, \ - ASTNGBuildingException, InferenceError -from logilab.astng.node_classes import Const, DelName, DelAttr, \ - Dict, From, List, Name, Pass, Raise, Return, Tuple, Yield, \ - are_exclusive, LookupMixIn, const_factory as cf, unpack_infer -from logilab.astng.bases import NodeNG, InferenceContext, Instance,\ - YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \ - BUILTINS_NAME -from logilab.astng.mixins import FilterStmtsMixin -from logilab.astng.bases import Statement -from logilab.astng.manager import ASTNGManager - - -def remove_nodes(func, cls): - def wrapper(*args, **kwargs): - nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)] - if not nodes: - raise NotFoundError() - return nodes - return wrapper - - -def function_to_method(n, klass): - if isinstance(n, Function): - if n.type == 'classmethod': - return BoundMethod(n, klass) - if n.type != 'staticmethod': - return UnboundMethod(n) - return n - -def std_special_attributes(self, name, add_locals=True): - if add_locals: - locals = self.locals - else: - locals = {} - if name == '__name__': - return [cf(self.name)] + locals.get(name, []) - if name == '__doc__': - return [cf(self.doc)] + locals.get(name, []) - if name == '__dict__': - return [Dict()] + locals.get(name, []) - raise NotFoundError(name) - -MANAGER = ASTNGManager() -def builtin_lookup(name): - """lookup a name into the builtin module - return the list of matching statements and the astng for the builtin - module - """ - builtin_astng = MANAGER.astng_from_module(builtins) - if name == '__dict__': - return builtin_astng, () - try: - stmts = builtin_astng.locals[name] - except KeyError: - stmts = () - return builtin_astng, stmts - - -# TODO move this Mixin to mixins.py; problem: 'Function' in _scope_lookup -class LocalsDictNodeNG(LookupMixIn, NodeNG): - """ this class provides locals handling common to Module, Function - and Class nodes, including a dict like interface for direct access - to locals information - """ - - # attributes below are set by the builder module or by raw factories - - # dictionary of locals with name as key and node defining the local as - # value - - def qname(self): - """return the 'qualified' name of the node, eg module.name, - module.class.name ... - """ - if self.parent is None: - return self.name - return '%s.%s' % (self.parent.frame().qname(), self.name) - - def frame(self): - """return the first parent frame node (i.e. Module, Function or Class) - """ - return self - - def scope(self): - """return the first node defining a new scope (i.e. Module, - Function, Class, Lambda but also GenExpr, DictComp and SetComp) - """ - return self - - - def _scope_lookup(self, node, name, offset=0): - """XXX method for interfacing the scope lookup""" - try: - stmts = node._filter_stmts(self.locals[name], self, offset) - except KeyError: - stmts = () - if stmts: - return self, stmts - if self.parent: # i.e. not Module - # nested scope: if parent scope is a function, that's fine - # else jump to the module - pscope = self.parent.scope() - if not pscope.is_function: - pscope = pscope.root() - return pscope.scope_lookup(node, name) - return builtin_lookup(name) # Module - - - - def set_local(self, name, stmt): - """define in locals ( is the node defining the name) - if the node is a Module node (i.e. has globals), add the name to - globals - - if the name is already defined, ignore it - """ - #assert not stmt in self.locals.get(name, ()), (self, stmt) - self.locals.setdefault(name, []).append(stmt) - - __setitem__ = set_local - - def _append_node(self, child): - """append a child, linking it in the tree""" - self.body.append(child) - child.parent = self - - def add_local_node(self, child_node, name=None): - """append a child which should alter locals to the given node""" - if name != '__class__': - # add __class__ node as a child will cause infinite recursion later! - self._append_node(child_node) - self.set_local(name or child_node.name, child_node) - - - def __getitem__(self, item): - """method from the `dict` interface returning the first node - associated with the given name in the locals dictionary - - :type item: str - :param item: the name of the locally defined object - :raises KeyError: if the name is not defined - """ - return self.locals[item][0] - - def __iter__(self): - """method from the `dict` interface returning an iterator on - `self.keys()` - """ - return iter(self.keys()) - - def keys(self): - """method from the `dict` interface returning a tuple containing - locally defined names - """ - return self.locals.keys() - - def values(self): - """method from the `dict` interface returning a tuple containing - locally defined nodes which are instance of `Function` or `Class` - """ - return [self[key] for key in self.keys()] - - def items(self): - """method from the `dict` interface returning a list of tuple - containing each locally defined name with its associated node, - which is an instance of `Function` or `Class` - """ - return zip(self.keys(), self.values()) - - - def __contains__(self, name): - return name in self.locals - has_key = __contains__ - -# Module ##################################################################### - -class Module(LocalsDictNodeNG): - _astng_fields = ('body',) - - fromlineno = 0 - lineno = 0 - - # attributes below are set by the builder module or by raw factories - - # the file from which as been extracted the astng representation. It may - # be None if the representation has been built from a built-in module - file = None - # the module name - name = None - # boolean for astng built from source (i.e. ast) - pure_python = None - # boolean for package module - package = None - # dictionary of globals with name as key and node defining the global - # as value - globals = None - - # names of python special attributes (handled by getattr impl.) - special_attributes = set(('__name__', '__doc__', '__file__', '__path__', - '__dict__')) - # names of module attributes available through the global scope - scope_attrs = set(('__name__', '__doc__', '__file__', '__path__')) - - def __init__(self, name, doc, pure_python=True): - self.name = name - self.doc = doc - self.pure_python = pure_python - self.locals = self.globals = {} - self.body = [] - - def block_range(self, lineno): - """return block line numbers. - - start from the beginning whatever the given lineno - """ - return self.fromlineno, self.tolineno - - def scope_lookup(self, node, name, offset=0): - if name in self.scope_attrs and not name in self.locals: - try: - return self, self.getattr(name) - except NotFoundError: - return self, () - return self._scope_lookup(node, name, offset) - - def pytype(self): - return '%s.module' % BUILTINS_MODULE - - def display_type(self): - return 'Module' - - def getattr(self, name, context=None, ignore_locals=False): - if name in self.special_attributes: - if name == '__file__': - return [cf(self.file)] + self.locals.get(name, []) - if name == '__path__' and self.package: - return [List()] + self.locals.get(name, []) - return std_special_attributes(self, name) - if not ignore_locals and name in self.locals: - return self.locals[name] - if self.package: - try: - return [self.import_module(name, relative_only=True)] - except ASTNGBuildingException: - raise NotFoundError(name) - except Exception:# XXX pylint tests never pass here; do we need it? - import traceback - traceback.print_exc() - raise NotFoundError(name) - getattr = remove_nodes(getattr, DelName) - - def igetattr(self, name, context=None): - """inferred getattr""" - # set lookup name since this is necessary to infer on import nodes for - # instance - context = copy_context(context) - context.lookupname = name - try: - return _infer_stmts(self.getattr(name, context), context, frame=self) - except NotFoundError: - raise InferenceError(name) - - def fully_defined(self): - """return True if this module has been built from a .py file - and so contains a complete representation including the code - """ - return self.file is not None and self.file.endswith('.py') - - def statement(self): - """return the first parent node marked as statement node - consider a module as a statement... - """ - return self - - def previous_sibling(self): - """module has no sibling""" - return - - def next_sibling(self): - """module has no sibling""" - return - - if sys.version_info < (2, 8): - def absolute_import_activated(self): - for stmt in self.locals.get('absolute_import', ()): - if isinstance(stmt, From) and stmt.modname == '__future__': - return True - return False - else: - absolute_import_activated = lambda self: True - - def import_module(self, modname, relative_only=False, level=None): - """import the given module considering self as context""" - if relative_only and level is None: - level = 0 - absmodname = self.relative_to_absolute_name(modname, level) - try: - return MANAGER.astng_from_module_name(absmodname) - except ASTNGBuildingException: - # we only want to import a sub module or package of this module, - # skip here - if relative_only: - raise - return MANAGER.astng_from_module_name(modname) - - def relative_to_absolute_name(self, modname, level): - """return the absolute module name for a relative import. - - The relative import can be implicit or explicit. - """ - # XXX this returns non sens when called on an absolute import - # like 'pylint.checkers.logilab.astng.utils' - # XXX doesn't return absolute name if self.name isn't absolute name - if self.absolute_import_activated() and level is None: - return modname - if level: - if self.package: - level = level - 1 - package_name = self.name.rsplit('.', level)[0] - elif self.package: - package_name = self.name - else: - package_name = self.name.rsplit('.', 1)[0] - if package_name: - if not modname: - return package_name - return '%s.%s' % (package_name, modname) - return modname - - - def wildcard_import_names(self): - """return the list of imported names when this module is 'wildcard - imported' - - It doesn't include the '__builtins__' name which is added by the - current CPython implementation of wildcard imports. - """ - # take advantage of a living module if it exists - try: - living = sys.modules[self.name] - except KeyError: - pass - else: - try: - return living.__all__ - except AttributeError: - return [name for name in living.__dict__.keys() - if not name.startswith('_')] - # else lookup the astng - # - # We separate the different steps of lookup in try/excepts - # to avoid catching too many Exceptions - # However, we can not analyse dynamically constructed __all__ - try: - all = self['__all__'] - except KeyError: - return [name for name in self.keys() if not name.startswith('_')] - try: - explicit = all.assigned_stmts().next() - except InferenceError: - return [name for name in self.keys() if not name.startswith('_')] - except AttributeError: - # not an assignment node - # XXX infer? - return [name for name in self.keys() if not name.startswith('_')] - try: - # should be a Tuple/List of constant string / 1 string not allowed - return [const.value for const in explicit.elts] - except AttributeError: - return [name for name in self.keys() if not name.startswith('_')] - - -class ComprehensionScope(LocalsDictNodeNG): - def frame(self): - return self.parent.frame() - - scope_lookup = LocalsDictNodeNG._scope_lookup - - -class GenExpr(ComprehensionScope): - _astng_fields = ('elt', 'generators') - - def __init__(self): - self.locals = {} - self.elt = None - self.generators = [] - - -class DictComp(ComprehensionScope): - _astng_fields = ('key', 'value', 'generators') - - def __init__(self): - self.locals = {} - self.key = None - self.value = None - self.generators = [] - - -class SetComp(ComprehensionScope): - _astng_fields = ('elt', 'generators') - - def __init__(self): - self.locals = {} - self.elt = None - self.generators = [] - - -class _ListComp(NodeNG): - """class representing a ListComp node""" - _astng_fields = ('elt', 'generators') - elt = None - generators = None - -if sys.version_info >= (3, 0): - class ListComp(_ListComp, ComprehensionScope): - """class representing a ListComp node""" - def __init__(self): - self.locals = {} -else: - class ListComp(_ListComp): - """class representing a ListComp node""" - -# Function ################################################################### - - -class Lambda(LocalsDictNodeNG, FilterStmtsMixin): - _astng_fields = ('args', 'body',) - - # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod' - type = 'function' - - def __init__(self): - self.locals = {} - self.args = [] - self.body = [] - - def pytype(self): - if 'method' in self.type: - return '%s.instancemethod' % BUILTINS_MODULE - return '%s.function' % BUILTINS_MODULE - - def display_type(self): - if 'method' in self.type: - return 'Method' - return 'Function' - - def callable(self): - return True - - def argnames(self): - """return a list of argument names""" - if self.args.args: # maybe None with builtin functions - names = _rec_get_names(self.args.args) - else: - names = [] - if self.args.vararg: - names.append(self.args.vararg) - if self.args.kwarg: - names.append(self.args.kwarg) - return names - - def infer_call_result(self, caller, context=None): - """infer what a function is returning when called""" - return self.body.infer(context) - - def scope_lookup(self, node, name, offset=0): - if node in self.args.defaults: - frame = self.parent.frame() - # line offset to avoid that def func(f=func) resolve the default - # value to the defined function - offset = -1 - else: - # check this is not used in function decorators - frame = self - return frame._scope_lookup(node, name, offset) - - -class Function(Statement, Lambda): - _astng_fields = ('decorators', 'args', 'body') - - special_attributes = set(('__name__', '__doc__', '__dict__')) - is_function = True - # attributes below are set by the builder module or by raw factories - blockstart_tolineno = None - decorators = None - - def __init__(self, name, doc): - self.locals = {} - self.args = [] - self.body = [] - self.decorators = None - self.name = name - self.doc = doc - self.extra_decorators = [] - self.instance_attrs = {} - - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - # lineno is the line number of the first decorator, we want the def statement lineno - if self.decorators is not None: - self.fromlineno += len(self.decorators.nodes) - self.tolineno = lastchild.tolineno - self.blockstart_tolineno = self.args.tolineno - - def block_range(self, lineno): - """return block line numbers. - - start from the "def" position whatever the given lineno - """ - return self.fromlineno, self.tolineno - - def getattr(self, name, context=None): - """this method doesn't look in the instance_attrs dictionary since it's - done by an Instance proxy at inference time. - """ - if name == '__module__': - return [cf(self.root().qname())] - if name in self.instance_attrs: - return self.instance_attrs[name] - return std_special_attributes(self, name, False) - - def is_method(self): - """return true if the function node should be considered as a method""" - # check we are defined in a Class, because this is usually expected - # (e.g. pylint...) when is_method() return True - return self.type != 'function' and isinstance(self.parent.frame(), Class) - - def decoratornames(self): - """return a list of decorator qualified names""" - result = set() - decoratornodes = [] - if self.decorators is not None: - decoratornodes += self.decorators.nodes - decoratornodes += self.extra_decorators - for decnode in decoratornodes: - for infnode in decnode.infer(): - result.add(infnode.qname()) - return result - decoratornames = cached(decoratornames) - - def is_bound(self): - """return true if the function is bound to an Instance or a class""" - return self.type == 'classmethod' - - def is_abstract(self, pass_is_abstract=True): - """return true if the method is abstract - It's considered as abstract if the only statement is a raise of - NotImplementError, or, if pass_is_abstract, a pass statement - """ - for child_node in self.body: - if isinstance(child_node, Raise): - if child_node.raises_not_implemented(): - return True - if pass_is_abstract and isinstance(child_node, Pass): - return True - return False - # empty function is the same as function with a single "pass" statement - if pass_is_abstract: - return True - - def is_generator(self): - """return true if this is a generator function""" - # XXX should be flagged, not computed - try: - return self.nodes_of_class(Yield, skip_klass=Function).next() - except StopIteration: - return False - - def infer_call_result(self, caller, context=None): - """infer what a function is returning when called""" - if self.is_generator(): - yield Generator(self) - return - returns = self.nodes_of_class(Return, skip_klass=Function) - for returnnode in returns: - if returnnode.value is None: - yield Const(None) - else: - try: - for infered in returnnode.value.infer(context): - yield infered - except InferenceError: - yield YES - - -def _rec_get_names(args, names=None): - """return a list of all argument names""" - if names is None: - names = [] - for arg in args: - if isinstance(arg, Tuple): - _rec_get_names(arg.elts, names) - else: - names.append(arg.name) - return names - - -# Class ###################################################################### - -def _class_type(klass, ancestors=None): - """return a Class node type to differ metaclass, interface and exception - from 'regular' classes - """ - # XXX we have to store ancestors in case we have a ancestor loop - if klass._type is not None: - return klass._type - if klass.name == 'type': - klass._type = 'metaclass' - elif klass.name.endswith('Interface'): - klass._type = 'interface' - elif klass.name.endswith('Exception'): - klass._type = 'exception' - else: - if ancestors is None: - ancestors = set() - if klass in ancestors: - # XXX we are in loop ancestors, and have found no type - klass._type = 'class' - return 'class' - ancestors.add(klass) - # print >> sys.stderr, '_class_type', repr(klass) - for base in klass.ancestors(recurs=False): - if _class_type(base, ancestors) != 'class': - klass._type = base.type - break - if klass._type is None: - klass._type = 'class' - return klass._type - -def _iface_hdlr(iface_node): - """a handler function used by interfaces to handle suspicious - interface nodes - """ - return True - - -class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin): - - # some of the attributes below are set by the builder module or - # by a raw factories - - # a dictionary of class instances attributes - _astng_fields = ('decorators', 'bases', 'body') # name - - decorators = None - special_attributes = set(('__name__', '__doc__', '__dict__', '__module__', - '__bases__', '__mro__', '__subclasses__')) - blockstart_tolineno = None - - _type = None - type = property(_class_type, - doc="class'type, possible values are 'class' | " - "'metaclass' | 'interface' | 'exception'") - - def __init__(self, name, doc): - self.instance_attrs = {} - self.locals = {} - self.bases = [] - self.body = [] - self.name = name - self.doc = doc - - def _newstyle_impl(self, context=None): - if context is None: - context = InferenceContext() - if self._newstyle is not None: - return self._newstyle - for base in self.ancestors(recurs=False, context=context): - if base._newstyle_impl(context): - self._newstyle = True - break - if self._newstyle is None: - self._newstyle = False - return self._newstyle - - _newstyle = None - newstyle = property(_newstyle_impl, - doc="boolean indicating if it's a new style class" - "or not") - - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - self.blockstart_tolineno = self.bases and self.bases[-1].tolineno or self.fromlineno - if lastchild is not None: - self.tolineno = lastchild.tolineno - # else this is a class with only a docstring, then tolineno is (should be) already ok - - def block_range(self, lineno): - """return block line numbers. - - start from the "class" position whatever the given lineno - """ - return self.fromlineno, self.tolineno - - def pytype(self): - if self.newstyle: - return '%s.type' % BUILTINS_MODULE - return '%s.classobj' % BUILTINS_MODULE - - def display_type(self): - return 'Class' - - def callable(self): - return True - - def infer_call_result(self, caller, context=None): - """infer what a class is returning when called""" - yield Instance(self) - - def scope_lookup(self, node, name, offset=0): - if node in self.bases: - frame = self.parent.frame() - # line offset to avoid that class A(A) resolve the ancestor to - # the defined class - offset = -1 - else: - frame = self - return frame._scope_lookup(node, name, offset) - - # list of parent class as a list of string (i.e. names as they appear - # in the class definition) XXX bw compat - def basenames(self): - return [bnode.as_string() for bnode in self.bases] - basenames = property(basenames) - - def ancestors(self, recurs=True, context=None): - """return an iterator on the node base classes in a prefixed - depth first order - - :param recurs: - boolean indicating if it should recurse or return direct - ancestors only - """ - # FIXME: should be possible to choose the resolution order - # XXX inference make infinite loops possible here (see BaseTransformer - # manipulation in the builder module for instance) - yielded = set([self]) - if context is None: - context = InferenceContext() - for stmt in self.bases: - with context.restore_path(): - try: - for baseobj in stmt.infer(context): - if not isinstance(baseobj, Class): - # duh ? - continue - if baseobj in yielded: - continue # cf xxx above - yielded.add(baseobj) - yield baseobj - if recurs: - for grandpa in baseobj.ancestors(True, context): - if grandpa in yielded: - continue # cf xxx above - yielded.add(grandpa) - yield grandpa - except InferenceError: - # XXX log error ? - continue - - def local_attr_ancestors(self, name, context=None): - """return an iterator on astng representation of parent classes - which have defined in their locals - """ - for astng in self.ancestors(context=context): - if name in astng: - yield astng - - def instance_attr_ancestors(self, name, context=None): - """return an iterator on astng representation of parent classes - which have defined in their instance attribute dictionary - """ - for astng in self.ancestors(context=context): - if name in astng.instance_attrs: - yield astng - - def has_base(self, node): - return node in self.bases - - def local_attr(self, name, context=None): - """return the list of assign node associated to name in this class - locals or in its parents - - :raises `NotFoundError`: - if no attribute with this name has been find in this class or - its parent classes - """ - try: - return self.locals[name] - except KeyError: - # get if from the first parent implementing it if any - for class_node in self.local_attr_ancestors(name, context): - return class_node.locals[name] - raise NotFoundError(name) - local_attr = remove_nodes(local_attr, DelAttr) - - def instance_attr(self, name, context=None): - """return the astng nodes associated to name in this class instance - attributes dictionary and in its parents - - :raises `NotFoundError`: - if no attribute with this name has been find in this class or - its parent classes - """ - values = self.instance_attrs.get(name, []) - # get all values from parents - for class_node in self.instance_attr_ancestors(name, context): - values += class_node.instance_attrs[name] - if not values: - raise NotFoundError(name) - return values - instance_attr = remove_nodes(instance_attr, DelAttr) - - def instanciate_class(self): - """return Instance of Class node, else return self""" - return Instance(self) - - def getattr(self, name, context=None): - """this method doesn't look in the instance_attrs dictionary since it's - done by an Instance proxy at inference time. - - It may return a YES object if the attribute has not been actually - found but a __getattr__ or __getattribute__ method is defined - """ - values = self.locals.get(name, []) - if name in self.special_attributes: - if name == '__module__': - return [cf(self.root().qname())] + values - # FIXME : what is expected by passing the list of ancestors to cf: - # you can just do [cf(tuple())] + values without breaking any test - # this is ticket http://www.logilab.org/ticket/52785 - if name == '__bases__': - return [cf(tuple(self.ancestors(recurs=False, context=context)))] + values - # XXX need proper meta class handling + MRO implementation - if name == '__mro__' and self.newstyle: - # XXX mro is read-only but that's not our job to detect that - return [cf(tuple(self.ancestors(recurs=True, context=context)))] + values - return std_special_attributes(self, name) - # don't modify the list in self.locals! - values = list(values) - for classnode in self.ancestors(recurs=True, context=context): - values += classnode.locals.get(name, []) - if not values: - raise NotFoundError(name) - return values - - def igetattr(self, name, context=None): - """inferred getattr, need special treatment in class to handle - descriptors - """ - # set lookup name since this is necessary to infer on import nodes for - # instance - context = copy_context(context) - context.lookupname = name - try: - for infered in _infer_stmts(self.getattr(name, context), context, - frame=self): - # yield YES object instead of descriptors when necessary - if not isinstance(infered, Const) and isinstance(infered, Instance): - try: - infered._proxied.getattr('__get__', context) - except NotFoundError: - yield infered - else: - yield YES - else: - yield function_to_method(infered, self) - except NotFoundError: - if not name.startswith('__') and self.has_dynamic_getattr(context): - # class handle some dynamic attributes, return a YES object - yield YES - else: - raise InferenceError(name) - - def has_dynamic_getattr(self, context=None): - """return True if the class has a custom __getattr__ or - __getattribute__ method - """ - # need to explicitly handle optparse.Values (setattr is not detected) - if self.name == 'Values' and self.root().name == 'optparse': - return True - try: - self.getattr('__getattr__', context) - return True - except NotFoundError: - #if self.newstyle: XXX cause an infinite recursion error - try: - getattribute = self.getattr('__getattribute__', context)[0] - if getattribute.root().name != BUILTINS_NAME: - # class has a custom __getattribute__ defined - return True - except NotFoundError: - pass - return False - - def methods(self): - """return an iterator on all methods defined in the class and - its ancestors - """ - done = {} - for astng in chain(iter((self,)), self.ancestors()): - for meth in astng.mymethods(): - if meth.name in done: - continue - done[meth.name] = None - yield meth - - def mymethods(self): - """return an iterator on all methods defined in the class""" - for member in self.values(): - if isinstance(member, Function): - yield member - - def interfaces(self, herited=True, handler_func=_iface_hdlr): - """return an iterator on interfaces implemented by the given - class node - """ - # FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)... - try: - implements = Instance(self).getattr('__implements__')[0] - except NotFoundError: - return - if not herited and not implements.frame() is self: - return - found = set() - missing = False - for iface in unpack_infer(implements): - if iface is YES: - missing = True - continue - if not iface in found and handler_func(iface): - found.add(iface) - yield iface - if missing: - raise InferenceError() - - diff --git a/pylibs/logilab/astng/setup.py b/pylibs/logilab/astng/setup.py deleted file mode 100644 index e4858056..00000000 --- a/pylibs/logilab/astng/setup.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# pylint: disable=W0404,W0622,W0704,W0613 -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-astng is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""Generic Setup script, takes package info from __pkginfo__.py file. -""" -__docformat__ = "restructuredtext en" - -import os -import sys -import shutil -from os.path import isdir, exists, join - -try: - if os.environ.get('NO_SETUPTOOLS'): - raise ImportError() - from setuptools import setup - from setuptools.command import install_lib - USE_SETUPTOOLS = 1 -except ImportError: - from distutils.core import setup - from distutils.command import install_lib - USE_SETUPTOOLS = 0 - -try: - # python3 - from distutils.command.build_py import build_py_2to3 as build_py -except ImportError: - # python2.x - from distutils.command.build_py import build_py - -sys.modules.pop('__pkginfo__', None) -# import optional features -__pkginfo__ = __import__("__pkginfo__") -# import required features -from __pkginfo__ import modname, version, license, description, \ - web, author, author_email -# import optional features - -distname = getattr(__pkginfo__, 'distname', modname) -scripts = getattr(__pkginfo__, 'scripts', []) -data_files = getattr(__pkginfo__, 'data_files', None) -subpackage_of = getattr(__pkginfo__, 'subpackage_of', None) -include_dirs = getattr(__pkginfo__, 'include_dirs', []) -ext_modules = getattr(__pkginfo__, 'ext_modules', None) -install_requires = getattr(__pkginfo__, 'install_requires', None) -dependency_links = getattr(__pkginfo__, 'dependency_links', []) - -STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build') - -IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~') - -if exists('README'): - long_description = open('README').read() -else: - long_description = '' - -def ensure_scripts(linux_scripts): - """Creates the proper script names required for each platform - (taken from 4Suite) - """ - from distutils import util - if util.get_platform()[:3] == 'win': - scripts_ = [script + '.bat' for script in linux_scripts] - else: - scripts_ = linux_scripts - return scripts_ - -def get_packages(directory, prefix): - """return a list of subpackages for the given directory""" - result = [] - for package in os.listdir(directory): - absfile = join(directory, package) - if isdir(absfile): - if exists(join(absfile, '__init__.py')) or \ - package in ('test', 'tests'): - if prefix: - result.append('%s.%s' % (prefix, package)) - else: - result.append(package) - result += get_packages(absfile, result[-1]) - return result - -EMPTY_FILE = '''"""generated file, don't modify or your data will be lost""" -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - pass -''' - -class MyInstallLib(install_lib.install_lib): - """extend install_lib command to handle package __init__.py and - include_dirs variable if necessary - """ - def run(self): - """overridden from install_lib class""" - install_lib.install_lib.run(self) - # create Products.__init__.py if needed - if subpackage_of: - product_init = join(self.install_dir, subpackage_of, '__init__.py') - if not exists(product_init): - self.announce('creating %s' % product_init) - stream = open(product_init, 'w') - stream.write(EMPTY_FILE) - stream.close() - # manually install included directories if any - if include_dirs: - if subpackage_of: - base = join(subpackage_of, modname) - else: - base = modname - for directory in include_dirs: - dest = join(self.install_dir, base, directory) - shutil.rmtree(dest, ignore_errors=True) - shutil.copytree(directory, dest) - -def install(**kwargs): - """setup entry point""" - if USE_SETUPTOOLS: - if '--force-manifest' in sys.argv: - sys.argv.remove('--force-manifest') - # install-layout option was introduced in 2.5.3-1~exp1 - elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv: - sys.argv.remove('--install-layout=deb') - if subpackage_of: - package = subpackage_of + '.' + modname - kwargs['package_dir'] = {package : '.'} - packages = [package] + get_packages(os.getcwd(), package) - if USE_SETUPTOOLS: - kwargs['namespace_packages'] = [subpackage_of] - else: - kwargs['package_dir'] = {modname : '.'} - packages = [modname] + get_packages(os.getcwd(), modname) - if USE_SETUPTOOLS and install_requires: - kwargs['install_requires'] = install_requires - kwargs['dependency_links'] = dependency_links - kwargs['packages'] = packages - return setup(name = distname, - version = version, - license = license, - description = description, - long_description = long_description, - author = author, - author_email = author_email, - url = web, - scripts = ensure_scripts(scripts), - data_files = data_files, - ext_modules = ext_modules, - cmdclass = {'install_lib': MyInstallLib, - 'build_py': build_py}, - **kwargs - ) - -if __name__ == '__main__' : - install() diff --git a/pylibs/logilab/astng/utils.py b/pylibs/logilab/astng/utils.py deleted file mode 100644 index ba317c8e..00000000 --- a/pylibs/logilab/astng/utils.py +++ /dev/null @@ -1,241 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com -# -# This file is part of logilab-astng. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""this module contains some utilities to navigate in the tree or to -extract information from it -""" - -__docformat__ = "restructuredtext en" - -from logilab.astng.exceptions import ASTNGBuildingException - - -class ASTWalker: - """a walker visiting a tree in preorder, calling on the handler: - - * visit_ on entering a node, where class name is the class of - the node in lower case - - * leave_ on leaving a node, where class name is the class of - the node in lower case - """ - - def __init__(self, handler): - self.handler = handler - self._cache = {} - - def walk(self, node, _done=None): - """walk on the tree from , getting callbacks from handler""" - if _done is None: - _done = set() - if node in _done: - raise AssertionError((id(node), node, node.parent)) - _done.add(node) - self.visit(node) - for child_node in node.get_children(): - self.handler.set_context(node, child_node) - assert child_node is not node - self.walk(child_node, _done) - self.leave(node) - assert node.parent is not node - - def get_callbacks(self, node): - """get callbacks from handler for the visited node""" - klass = node.__class__ - methods = self._cache.get(klass) - if methods is None: - handler = self.handler - kid = klass.__name__.lower() - e_method = getattr(handler, 'visit_%s' % kid, - getattr(handler, 'visit_default', None)) - l_method = getattr(handler, 'leave_%s' % kid, - getattr(handler, 'leave_default', None)) - self._cache[klass] = (e_method, l_method) - else: - e_method, l_method = methods - return e_method, l_method - - def visit(self, node): - """walk on the tree from , getting callbacks from handler""" - method = self.get_callbacks(node)[0] - if method is not None: - method(node) - - def leave(self, node): - """walk on the tree from , getting callbacks from handler""" - method = self.get_callbacks(node)[1] - if method is not None: - method(node) - - -class LocalsVisitor(ASTWalker): - """visit a project by traversing the locals dictionary""" - def __init__(self): - ASTWalker.__init__(self, self) - self._visited = {} - - def visit(self, node): - """launch the visit starting from the given node""" - if node in self._visited: - return - self._visited[node] = 1 # FIXME: use set ? - methods = self.get_callbacks(node) - if methods[0] is not None: - methods[0](node) - if 'locals' in node.__dict__: # skip Instance and other proxy - for name, local_node in node.items(): - self.visit(local_node) - if methods[1] is not None: - return methods[1](node) - - -def _check_children(node): - """a helper function to check children - parent relations""" - for child in node.get_children(): - ok = False - if child is None: - print "Hm, child of %s is None" % node - continue - if not hasattr(child, 'parent'): - print " ERROR: %s has child %s %x with no parent" % (node, child, id(child)) - elif not child.parent: - print " ERROR: %s has child %s %x with parent %r" % (node, child, id(child), child.parent) - elif child.parent is not node: - print " ERROR: %s %x has child %s %x with wrong parent %s" % (node, - id(node), child, id(child), child.parent) - else: - ok = True - if not ok: - print "lines;", node.lineno, child.lineno - print "of module", node.root(), node.root().name - raise ASTNGBuildingException - _check_children(child) - - -from _ast import PyCF_ONLY_AST -def parse(string): - return compile(string, "", 'exec', PyCF_ONLY_AST) - -class TreeTester(object): - '''A helper class to see _ast tree and compare with astng tree - - indent: string for tree indent representation - lineno: bool to tell if we should print the line numbers - - >>> tester = TreeTester('print') - >>> print tester.native_tree_repr() - - - . body = [ - . - . . nl = True - . ] - >>> print tester.astng_tree_repr() - Module() - body = [ - Print() - dest = - values = [ - ] - ] - ''' - - indent = '. ' - lineno = False - - def __init__(self, sourcecode): - self._string = '' - self.sourcecode = sourcecode - self._ast_node = None - self.build_ast() - - def build_ast(self): - """build the _ast tree from the source code""" - self._ast_node = parse(self.sourcecode) - - def native_tree_repr(self, node=None, indent=''): - """get a nice representation of the _ast tree""" - self._string = '' - if node is None: - node = self._ast_node - self._native_repr_tree(node, indent) - return self._string - - - def _native_repr_tree(self, node, indent, _done=None): - """recursive method for the native tree representation""" - from _ast import Load as _Load, Store as _Store, Del as _Del - from _ast import AST as Node - if _done is None: - _done = set() - if node in _done: - self._string += '\nloop in tree: %r (%s)' % (node, - getattr(node, 'lineno', None)) - return - _done.add(node) - self._string += '\n' + indent + '<%s>' % node.__class__.__name__ - indent += self.indent - if not hasattr(node, '__dict__'): - self._string += '\n' + self.indent + " ** node has no __dict__ " + str(node) - return - node_dict = node.__dict__ - if hasattr(node, '_attributes'): - for a in node._attributes: - attr = node_dict[a] - if attr is None: - continue - if a in ("lineno", "col_offset") and not self.lineno: - continue - self._string +='\n' + indent + a + " = " + repr(attr) - for field in node._fields or (): - attr = node_dict[field] - if attr is None: - continue - if isinstance(attr, list): - if not attr: - continue - self._string += '\n' + indent + field + ' = [' - for elt in attr: - self._native_repr_tree(elt, indent, _done) - self._string += '\n' + indent + ']' - continue - if isinstance(attr, (_Load, _Store, _Del)): - continue - if isinstance(attr, Node): - self._string += '\n' + indent + field + " = " - self._native_repr_tree(attr, indent, _done) - else: - self._string += '\n' + indent + field + " = " + repr(attr) - - - def build_astng_tree(self): - """build astng tree from the _ast tree - """ - from logilab.astng.builder import ASTNGBuilder - tree = ASTNGBuilder().string_build(self.sourcecode) - return tree - - def astng_tree_repr(self, ids=False): - """build the astng tree and return a nice tree representation""" - mod = self.build_astng_tree() - return mod.repr_tree(ids) - - -__all__ = ('LocalsVisitor', 'ASTWalker',) - diff --git a/pylibs/logilab/common/__init__.py b/pylibs/logilab/common/__init__.py deleted file mode 100644 index 8d063e2c..00000000 --- a/pylibs/logilab/common/__init__.py +++ /dev/null @@ -1,171 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Logilab common library (aka Logilab's extension to the standard library). - -:type STD_BLACKLIST: tuple -:var STD_BLACKLIST: directories ignored by default by the functions in - this package which have to recurse into directories - -:type IGNORED_EXTENSIONS: tuple -:var IGNORED_EXTENSIONS: file extensions that may usually be ignored -""" -__docformat__ = "restructuredtext en" -from logilab.common.__pkginfo__ import version as __version__ - -STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build') - -IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~', '.swp', '.orig') - -# set this to False if you've mx DateTime installed but you don't want your db -# adapter to use it (should be set before you got a connection) -USE_MX_DATETIME = True - - -class attrdict(dict): - """A dictionary for which keys are also accessible as attributes.""" - def __getattr__(self, attr): - try: - return self[attr] - except KeyError: - raise AttributeError(attr) - -class dictattr(dict): - def __init__(self, proxy): - self.__proxy = proxy - - def __getitem__(self, attr): - try: - return getattr(self.__proxy, attr) - except AttributeError: - raise KeyError(attr) - -class nullobject(object): - def __repr__(self): - return '' - def __nonzero__(self): - return False - -class tempattr(object): - def __init__(self, obj, attr, value): - self.obj = obj - self.attr = attr - self.value = value - - def __enter__(self): - self.oldvalue = getattr(self.obj, self.attr) - setattr(self.obj, self.attr, self.value) - return self.obj - - def __exit__(self, exctype, value, traceback): - setattr(self.obj, self.attr, self.oldvalue) - - - -# flatten ----- -# XXX move in a specific module and use yield instead -# do not mix flatten and translate -# -# def iterable(obj): -# try: iter(obj) -# except: return False -# return True -# -# def is_string_like(obj): -# try: obj +'' -# except (TypeError, ValueError): return False -# return True -# -#def is_scalar(obj): -# return is_string_like(obj) or not iterable(obj) -# -#def flatten(seq): -# for item in seq: -# if is_scalar(item): -# yield item -# else: -# for subitem in flatten(item): -# yield subitem - -def flatten(iterable, tr_func=None, results=None): - """Flatten a list of list with any level. - - If tr_func is not None, it should be a one argument function that'll be called - on each final element. - - :rtype: list - - >>> flatten([1, [2, 3]]) - [1, 2, 3] - """ - if results is None: - results = [] - for val in iterable: - if isinstance(val, (list, tuple)): - flatten(val, tr_func, results) - elif tr_func is None: - results.append(val) - else: - results.append(tr_func(val)) - return results - - -# XXX is function below still used ? - -def make_domains(lists): - """ - Given a list of lists, return a list of domain for each list to produce all - combinations of possibles values. - - :rtype: list - - Example: - - >>> make_domains(['a', 'b'], ['c','d', 'e']) - [['a', 'b', 'a', 'b', 'a', 'b'], ['c', 'c', 'd', 'd', 'e', 'e']] - """ - domains = [] - for iterable in lists: - new_domain = iterable[:] - for i in range(len(domains)): - domains[i] = domains[i]*len(iterable) - if domains: - missing = (len(domains[0]) - len(iterable)) / len(iterable) - i = 0 - for j in range(len(iterable)): - value = iterable[j] - for dummy in range(missing): - new_domain.insert(i, value) - i += 1 - i += 1 - domains.append(new_domain) - return domains - - -# private stuff ################################################################ - -def _handle_blacklist(blacklist, dirnames, filenames): - """remove files/directories in the black list - - dirnames/filenames are usually from os.walk - """ - for norecurs in blacklist: - if norecurs in dirnames: - dirnames.remove(norecurs) - elif norecurs in filenames: - filenames.remove(norecurs) - diff --git a/pylibs/logilab/common/__pkginfo__.py b/pylibs/logilab/common/__pkginfo__.py deleted file mode 100644 index a8a937be..00000000 --- a/pylibs/logilab/common/__pkginfo__.py +++ /dev/null @@ -1,42 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""logilab.common packaging information""" -__docformat__ = "restructuredtext en" - -distname = 'logilab-common' -modname = 'common' -subpackage_of = 'logilab' -subpackage_master = True - -numversion = (0, 56, 2) -version = '.'.join([str(num) for num in numversion]) - -license = 'LGPL' # 2.1 or later -description = "collection of low-level Python packages and modules used by Logilab projects" -web = "http://www.logilab.org/project/%s" % distname -ftp = "ftp://ftp.logilab.org/pub/%s" % modname -mailinglist = "mailto://python-projects@lists.logilab.org" -author = "Logilab" -author_email = "contact@logilab.fr" - - -from os.path import join -scripts = [join('bin', 'pytest')] -include_dirs = [join('test', 'data')] - -install_requires = ['unittest2 >= 0.5.1'] diff --git a/pylibs/logilab/common/cache.py b/pylibs/logilab/common/cache.py deleted file mode 100644 index 11ed1370..00000000 --- a/pylibs/logilab/common/cache.py +++ /dev/null @@ -1,114 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Cache module, with a least recently used algorithm for the management of the -deletion of entries. - - - - -""" -__docformat__ = "restructuredtext en" - -from threading import Lock - -from logilab.common.decorators import locked - -_marker = object() - -class Cache(dict): - """A dictionary like cache. - - inv: - len(self._usage) <= self.size - len(self.data) <= self.size - """ - - def __init__(self, size=100): - """ Warning : Cache.__init__() != dict.__init__(). - Constructor does not take any arguments beside size. - """ - assert size >= 0, 'cache size must be >= 0 (0 meaning no caching)' - self.size = size - self._usage = [] - self._lock = Lock() - super(Cache, self).__init__() - - def _acquire(self): - self._lock.acquire() - - def _release(self): - self._lock.release() - - def _update_usage(self, key): - if not self._usage: - self._usage.append(key) - elif self._usage[-1] != key: - try: - self._usage.remove(key) - except ValueError: - # we are inserting a new key - # check the size of the dictionary - # and remove the oldest item in the cache - if self.size and len(self._usage) >= self.size: - super(Cache, self).__delitem__(self._usage[0]) - del self._usage[0] - self._usage.append(key) - else: - pass # key is already the most recently used key - - def __getitem__(self, key): - value = super(Cache, self).__getitem__(key) - self._update_usage(key) - return value - __getitem__ = locked(_acquire, _release)(__getitem__) - - def __setitem__(self, key, item): - # Just make sure that size > 0 before inserting a new item in the cache - if self.size > 0: - super(Cache, self).__setitem__(key, item) - self._update_usage(key) - __setitem__ = locked(_acquire, _release)(__setitem__) - - def __delitem__(self, key): - super(Cache, self).__delitem__(key) - self._usage.remove(key) - __delitem__ = locked(_acquire, _release)(__delitem__) - - def clear(self): - super(Cache, self).clear() - self._usage = [] - clear = locked(_acquire, _release)(clear) - - def pop(self, key, default=_marker): - if key in self: - self._usage.remove(key) - #if default is _marker: - # return super(Cache, self).pop(key) - return super(Cache, self).pop(key, default) - pop = locked(_acquire, _release)(pop) - - def popitem(self): - raise NotImplementedError() - - def setdefault(self, key, default=None): - raise NotImplementedError() - - def update(self, other): - raise NotImplementedError() - - diff --git a/pylibs/logilab/common/changelog.py b/pylibs/logilab/common/changelog.py deleted file mode 100644 index 74f51241..00000000 --- a/pylibs/logilab/common/changelog.py +++ /dev/null @@ -1,236 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Manipulation of upstream change log files. - -The upstream change log files format handled is simpler than the one -often used such as those generated by the default Emacs changelog mode. - -Sample ChangeLog format:: - - Change log for project Yoo - ========================== - - -- - * add a new functionality - - 2002-02-01 -- 0.1.1 - * fix bug #435454 - * fix bug #434356 - - 2002-01-01 -- 0.1 - * initial release - - -There is 3 entries in this change log, one for each released version and one -for the next version (i.e. the current entry). -Each entry contains a set of messages corresponding to changes done in this -release. -All the non empty lines before the first entry are considered as the change -log title. -""" - -__docformat__ = "restructuredtext en" - -import sys -from stat import S_IWRITE - -BULLET = '*' -SUBBULLET = '-' -INDENT = ' ' * 4 - -class NoEntry(Exception): - """raised when we are unable to find an entry""" - -class EntryNotFound(Exception): - """raised when we are unable to find a given entry""" - -class Version(tuple): - """simple class to handle soft version number has a tuple while - correctly printing it as X.Y.Z - """ - def __new__(cls, versionstr): - if isinstance(versionstr, basestring): - versionstr = versionstr.strip(' :') # XXX (syt) duh? - parsed = cls.parse(versionstr) - else: - parsed = versionstr - return tuple.__new__(cls, parsed) - - @classmethod - def parse(cls, versionstr): - versionstr = versionstr.strip(' :') - try: - return [int(i) for i in versionstr.split('.')] - except ValueError, ex: - raise ValueError("invalid literal for version '%s' (%s)"%(versionstr, ex)) - - def __str__(self): - return '.'.join([str(i) for i in self]) - -# upstream change log ######################################################### - -class ChangeLogEntry(object): - """a change log entry, i.e. a set of messages associated to a version and - its release date - """ - version_class = Version - - def __init__(self, date=None, version=None, **kwargs): - self.__dict__.update(kwargs) - if version: - self.version = self.version_class(version) - else: - self.version = None - self.date = date - self.messages = [] - - def add_message(self, msg): - """add a new message""" - self.messages.append(([msg], [])) - - def complete_latest_message(self, msg_suite): - """complete the latest added message - """ - if not self.messages: - raise ValueError('unable to complete last message as there is no previous message)') - if self.messages[-1][1]: # sub messages - self.messages[-1][1][-1].append(msg_suite) - else: # message - self.messages[-1][0].append(msg_suite) - - def add_sub_message(self, sub_msg, key=None): - if not self.messages: - raise ValueError('unable to complete last message as there is no previous message)') - if key is None: - self.messages[-1][1].append([sub_msg]) - else: - raise NotImplementedError("sub message to specific key are not implemented yet") - - def write(self, stream=sys.stdout): - """write the entry to file """ - stream.write('%s -- %s\n' % (self.date or '', self.version or '')) - for msg, sub_msgs in self.messages: - stream.write('%s%s %s\n' % (INDENT, BULLET, msg[0])) - stream.write(''.join(msg[1:])) - if sub_msgs: - stream.write('\n') - for sub_msg in sub_msgs: - stream.write('%s%s %s\n' % (INDENT * 2, SUBBULLET, sub_msg[0])) - stream.write(''.join(sub_msg[1:])) - stream.write('\n') - - stream.write('\n\n') - -class ChangeLog(object): - """object representation of a whole ChangeLog file""" - - entry_class = ChangeLogEntry - - def __init__(self, changelog_file, title=''): - self.file = changelog_file - self.title = title - self.additional_content = '' - self.entries = [] - self.load() - - def __repr__(self): - return '' % (self.file, id(self), - len(self.entries)) - - def add_entry(self, entry): - """add a new entry to the change log""" - self.entries.append(entry) - - def get_entry(self, version='', create=None): - """ return a given changelog entry - if version is omitted, return the current entry - """ - if not self.entries: - if version or not create: - raise NoEntry() - self.entries.append(self.entry_class()) - if not version: - if self.entries[0].version and create is not None: - self.entries.insert(0, self.entry_class()) - return self.entries[0] - version = self.version_class(version) - for entry in self.entries: - if entry.version == version: - return entry - raise EntryNotFound() - - def add(self, msg, create=None): - """add a new message to the latest opened entry""" - entry = self.get_entry(create=create) - entry.add_message(msg) - - def load(self): - """ read a logilab's ChangeLog from file """ - try: - stream = open(self.file) - except IOError: - return - last = None - expect_sub = False - for line in stream.readlines(): - sline = line.strip() - words = sline.split() - # if new entry - if len(words) == 1 and words[0] == '--': - expect_sub = False - last = self.entry_class() - self.add_entry(last) - # if old entry - elif len(words) == 3 and words[1] == '--': - expect_sub = False - last = self.entry_class(words[0], words[2]) - self.add_entry(last) - # if title - elif sline and last is None: - self.title = '%s%s' % (self.title, line) - # if new entry - elif sline and sline[0] == BULLET: - expect_sub = False - last.add_message(sline[1:].strip()) - # if new sub_entry - elif expect_sub and sline and sline[0] == SUBBULLET: - last.add_sub_message(sline[1:].strip()) - # if new line for current entry - elif sline and last.messages: - last.complete_latest_message(line) - else: - expect_sub = True - self.additional_content += line - stream.close() - - def format_title(self): - return '%s\n\n' % self.title.strip() - - def save(self): - """write back change log""" - # filetutils isn't importable in appengine, so import locally - from logilab.common.fileutils import ensure_fs_mode - ensure_fs_mode(self.file, S_IWRITE) - self.write(open(self.file, 'w')) - - def write(self, stream=sys.stdout): - """write changelog to stream""" - stream.write(self.format_title()) - for entry in self.entries: - entry.write(stream) - diff --git a/pylibs/logilab/common/clcommands.py b/pylibs/logilab/common/clcommands.py deleted file mode 100644 index 411931bb..00000000 --- a/pylibs/logilab/common/clcommands.py +++ /dev/null @@ -1,332 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Helper functions to support command line tools providing more than -one command. - -e.g called as "tool command [options] args..." where and are -command'specific -""" - -__docformat__ = "restructuredtext en" - -import sys -import logging -from os.path import basename - -from logilab.common.configuration import Configuration -from logilab.common.logging_ext import init_log, get_threshold -from logilab.common.deprecation import deprecated - - -class BadCommandUsage(Exception): - """Raised when an unknown command is used or when a command is not - correctly used (bad options, too much / missing arguments...). - - Trigger display of command usage. - """ - -class CommandError(Exception): - """Raised when a command can't be processed and we want to display it and - exit, without traceback nor usage displayed. - """ - - -# command line access point #################################################### - -class CommandLine(dict): - """Usage: - - >>> LDI = cli.CommandLine('ldi', doc='Logilab debian installer', - version=version, rcfile=RCFILE) - >>> LDI.register(MyCommandClass) - >>> LDI.register(MyOtherCommandClass) - >>> LDI.run(sys.argv[1:]) - - Arguments: - - * `pgm`, the program name, default to `basename(sys.argv[0])` - - * `doc`, a short description of the command line tool - - * `copyright`, additional doc string that will be appended to the generated - doc - - * `version`, version number of string of the tool. If specified, global - --version option will be available. - - * `rcfile`, path to a configuration file. If specified, global --C/--rc-file - option will be available? self.rcfile = rcfile - - * `logger`, logger to propagate to commands, default to - `logging.getLogger(self.pgm))` - """ - def __init__(self, pgm=None, doc=None, copyright=None, version=None, - rcfile=None, logthreshold=logging.ERROR, - check_duplicated_command=True): - if pgm is None: - pgm = basename(sys.argv[0]) - self.pgm = pgm - self.doc = doc - self.copyright = copyright - self.version = version - self.rcfile = rcfile - self.logger = None - self.logthreshold = logthreshold - self.check_duplicated_command = check_duplicated_command - - def register(self, cls, force=False): - """register the given :class:`Command` subclass""" - assert not self.check_duplicated_command or force or not cls.name in self, \ - 'a command %s is already defined' % cls.name - self[cls.name] = cls - return cls - - def run(self, args): - """main command line access point: - * init logging - * handle global options (-h/--help, --version, -C/--rc-file) - * check command - * run command - - Terminate by :exc:`SystemExit` - """ - init_log(debug=True, # so that we use StreamHandler - logthreshold=self.logthreshold, - logformat='%(levelname)s: %(message)s') - try: - arg = args.pop(0) - except IndexError: - self.usage_and_exit(1) - if arg in ('-h', '--help'): - self.usage_and_exit(0) - if self.version is not None and arg in ('--version'): - print self.version - sys.exit(0) - rcfile = self.rcfile - if rcfile is not None and arg in ('-C', '--rc-file'): - try: - rcfile = args.pop(0) - arg = args.pop(0) - except IndexError: - self.usage_and_exit(1) - try: - command = self.get_command(arg) - except KeyError: - print 'ERROR: no %s command' % arg - print - self.usage_and_exit(1) - try: - sys.exit(command.main_run(args, rcfile)) - except KeyboardInterrupt, exc: - print 'Interrupted', - if str(exc): - print ': %s' % exc, - print - sys.exit(4) - except BadCommandUsage, err: - print 'ERROR:', err - print - print command.help() - sys.exit(1) - - def create_logger(self, handler, logthreshold=None): - logger = logging.Logger(self.pgm) - logger.handlers = [handler] - if logthreshold is None: - logthreshold = get_threshold(self.logthreshold) - logger.setLevel(logthreshold) - return logger - - def get_command(self, cmd, logger=None): - if logger is None: - logger = self.logger - if logger is None: - logger = self.logger = logging.getLogger(self.pgm) - logger.setLevel(get_threshold(self.logthreshold)) - return self[cmd](logger) - - def usage(self): - """display usage for the main program (i.e. when no command supplied) - and exit - """ - print 'usage:', self.pgm, - if self.rcfile: - print '[--rc-file=]', - print ' [options] ...' - if self.doc: - print '\n%s' % self.doc - print ''' -Type "%(pgm)s --help" for more information about a specific -command. Available commands are :\n''' % self.__dict__ - max_len = max([len(cmd) for cmd in self]) - padding = ' ' * max_len - for cmdname, cmd in sorted(self.items()): - if not cmd.hidden: - print ' ', (cmdname + padding)[:max_len], cmd.short_description() - if self.rcfile: - print ''' -Use --rc-file= / -C before the command -to specify a configuration file. Default to %s. -''' % self.rcfile - print '''%(pgm)s -h/--help - display this usage information and exit''' % self.__dict__ - if self.version: - print '''%(pgm)s -v/--version - display version configuration and exit''' % self.__dict__ - if self.copyright: - print '\n', self.copyright - - def usage_and_exit(self, status): - self.usage() - sys.exit(status) - - -# base command classes ######################################################### - -class Command(Configuration): - """Base class for command line commands. - - Class attributes: - - * `name`, the name of the command - - * `min_args`, minimum number of arguments, None if unspecified - - * `max_args`, maximum number of arguments, None if unspecified - - * `arguments`, string describing arguments, used in command usage - - * `hidden`, boolean flag telling if the command should be hidden, e.g. does - not appear in help's commands list - - * `options`, options list, as allowed by :mod:configuration - """ - - arguments = '' - name = '' - # hidden from help ? - hidden = False - # max/min args, None meaning unspecified - min_args = None - max_args = None - - @classmethod - def description(cls): - return cls.__doc__.replace(' ', '') - - @classmethod - def short_description(cls): - return cls.description().split('.')[0] - - def __init__(self, logger): - usage = '%%prog %s %s\n\n%s' % (self.name, self.arguments, - self.description()) - Configuration.__init__(self, usage=usage) - self.logger = logger - - def check_args(self, args): - """check command's arguments are provided""" - if self.min_args is not None and len(args) < self.min_args: - raise BadCommandUsage('missing argument') - if self.max_args is not None and len(args) > self.max_args: - raise BadCommandUsage('too many arguments') - - def main_run(self, args, rcfile=None): - """Run the command and return status 0 if everything went fine. - - If :exc:`CommandError` is raised by the underlying command, simply log - the error and return status 2. - - Any other exceptions, including :exc:`BadCommandUsage` will be - propagated. - """ - if rcfile: - self.load_file_configuration(rcfile) - args = self.load_command_line_configuration(args) - try: - self.check_args(args) - self.run(args) - except CommandError, err: - self.logger.error(err) - return 2 - return 0 - - def run(self, args): - """run the command with its specific arguments""" - raise NotImplementedError() - - -class ListCommandsCommand(Command): - """list available commands, useful for bash completion.""" - name = 'listcommands' - arguments = '[command]' - hidden = True - - def run(self, args): - """run the command with its specific arguments""" - if args: - command = args.pop() - cmd = _COMMANDS[command] - for optname, optdict in cmd.options: - print '--help' - print '--' + optname - else: - commands = sorted(_COMMANDS.keys()) - for command in commands: - cmd = _COMMANDS[command] - if not cmd.hidden: - print command - - -# deprecated stuff ############################################################# - -_COMMANDS = CommandLine() - -DEFAULT_COPYRIGHT = '''\ -Copyright (c) 2004-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -http://www.logilab.fr/ -- mailto:contact@logilab.fr''' - -@deprecated('use cls.register(cli)') -def register_commands(commands): - """register existing commands""" - for command_klass in commands: - _COMMANDS.register(command_klass) - -@deprecated('use args.pop(0)') -def main_run(args, doc=None, copyright=None, version=None): - """command line tool: run command specified by argument list (without the - program name). Raise SystemExit with status 0 if everything went fine. - - >>> main_run(sys.argv[1:]) - """ - _COMMANDS.doc = doc - _COMMANDS.copyright = copyright - _COMMANDS.version = version - _COMMANDS.run(args) - -@deprecated('use args.pop(0)') -def pop_arg(args_list, expected_size_after=None, msg="Missing argument"): - """helper function to get and check command line arguments""" - try: - value = args_list.pop(0) - except IndexError: - raise BadCommandUsage(msg) - if expected_size_after is not None and len(args_list) > expected_size_after: - raise BadCommandUsage('too many arguments') - return value - diff --git a/pylibs/logilab/common/cli.py b/pylibs/logilab/common/cli.py deleted file mode 100644 index 42837329..00000000 --- a/pylibs/logilab/common/cli.py +++ /dev/null @@ -1,208 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Command line interface helper classes. - -It provides some default commands, a help system, a default readline -configuration with completion and persistent history. - -Example:: - - class BookShell(CLIHelper): - - def __init__(self): - # quit and help are builtins - # CMD_MAP keys are commands, values are topics - self.CMD_MAP['pionce'] = _("Sommeil") - self.CMD_MAP['ronfle'] = _("Sommeil") - CLIHelper.__init__(self) - - help_do_pionce = ("pionce", "pionce duree", _("met ton corps en veille")) - def do_pionce(self): - print 'nap is good' - - help_do_ronfle = ("ronfle", "ronfle volume", _("met les autres en veille")) - def do_ronfle(self): - print 'fuuuuuuuuuuuu rhhhhhrhrhrrh' - - cl = BookShell() -""" - -__docformat__ = "restructuredtext en" - -from logilab.common.compat import raw_input, builtins -if not hasattr(builtins, '_'): - builtins._ = str - - -def init_readline(complete_method, histfile=None): - """Init the readline library if available.""" - try: - import readline - readline.parse_and_bind("tab: complete") - readline.set_completer(complete_method) - string = readline.get_completer_delims().replace(':', '') - readline.set_completer_delims(string) - if histfile is not None: - try: - readline.read_history_file(histfile) - except IOError: - pass - import atexit - atexit.register(readline.write_history_file, histfile) - except: - print 'readline is not available :-(' - - -class Completer : - """Readline completer.""" - - def __init__(self, commands): - self.list = commands - - def complete(self, text, state): - """Hook called by readline when is pressed.""" - n = len(text) - matches = [] - for cmd in self.list : - if cmd[:n] == text : - matches.append(cmd) - try: - return matches[state] - except IndexError: - return None - - -class CLIHelper: - """An abstract command line interface client which recognize commands - and provide an help system. - """ - - CMD_MAP = {'help': _("Others"), - 'quit': _("Others"), - } - CMD_PREFIX = '' - - def __init__(self, histfile=None) : - self._topics = {} - self.commands = None - self._completer = Completer(self._register_commands()) - init_readline(self._completer.complete, histfile) - - def run(self): - """loop on user input, exit on EOF""" - while True: - try: - line = raw_input('>>> ') - except EOFError: - print - break - s_line = line.strip() - if not s_line: - continue - args = s_line.split() - if args[0] in self.commands: - try: - cmd = 'do_%s' % self.commands[args[0]] - getattr(self, cmd)(*args[1:]) - except EOFError: - break - except: - import traceback - traceback.print_exc() - else: - try: - self.handle_line(s_line) - except: - import traceback - traceback.print_exc() - - def handle_line(self, stripped_line): - """Method to overload in the concrete class (should handle - lines which are not commands). - """ - raise NotImplementedError() - - - # private methods ######################################################### - - def _register_commands(self): - """ register available commands method and return the list of - commands name - """ - self.commands = {} - self._command_help = {} - commands = [attr[3:] for attr in dir(self) if attr[:3] == 'do_'] - for command in commands: - topic = self.CMD_MAP[command] - help_method = getattr(self, 'help_do_%s' % command) - self._topics.setdefault(topic, []).append(help_method) - self.commands[self.CMD_PREFIX + command] = command - self._command_help[command] = help_method - return self.commands.keys() - - def _print_help(self, cmd, syntax, explanation): - print _('Command %s') % cmd - print _('Syntax: %s') % syntax - print '\t', explanation - print - - - # predefined commands ##################################################### - - def do_help(self, command=None) : - """base input of the help system""" - if command in self._command_help: - self._print_help(*self._command_help[command]) - elif command is None or command not in self._topics: - print _("Use help or help .") - print _("Available topics are:") - topics = sorted(self._topics.keys()) - for topic in topics: - print '\t', topic - print - print _("Available commands are:") - commands = self.commands.keys() - commands.sort() - for command in commands: - print '\t', command[len(self.CMD_PREFIX):] - - else: - print _('Available commands about %s:') % command - print - for command_help_method in self._topics[command]: - try: - if callable(command_help_method): - self._print_help(*command_help_method()) - else: - self._print_help(*command_help_method) - except: - import traceback - traceback.print_exc() - print 'ERROR in help method %s'% ( - command_help_method.func_name) - - help_do_help = ("help", "help [topic|command]", - _("print help message for the given topic/command or \ -available topics when no argument")) - - def do_quit(self): - """quit the CLI""" - raise EOFError() - - def help_do_quit(self): - return ("quit", "quit", _("quit the application")) diff --git a/pylibs/logilab/common/compat.py b/pylibs/logilab/common/compat.py deleted file mode 100644 index 943b8175..00000000 --- a/pylibs/logilab/common/compat.py +++ /dev/null @@ -1,233 +0,0 @@ -# pylint: disable=E0601,W0622,W0611 -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Wrappers around some builtins introduced in python 2.3, 2.4 and -2.5, making them available in for earlier versions of python. - -See another compatibility snippets from other projects: - - :mod:`lib2to3.fixes` - :mod:`coverage.backward` - :mod:`unittest2.compatibility` -""" - -from __future__ import generators - -__docformat__ = "restructuredtext en" - -import os -import sys -from warnings import warn - -import __builtin__ as builtins # 2to3 will tranform '__builtin__' to 'builtins' - -if sys.version_info < (3, 0): - str_to_bytes = str - def str_encode(string, encoding): - if isinstance(string, unicode): - return string.encode(encoding) - return str(string) -else: - def str_to_bytes(string): - return str.encode(string) - # we have to ignore the encoding in py3k to be able to write a string into a - # TextIOWrapper or like object (which expect an unicode string) - def str_encode(string, encoding): - return str(string) - -# XXX shouldn't we remove this and just let 2to3 do his job ? -try: - callable = callable -except NameError:# callable removed from py3k - import collections - def callable(something): - return isinstance(something, collections.Callable) - del collections - -if sys.version_info < (3, 0): - raw_input = raw_input -else: - raw_input = input - -# Pythons 2 and 3 differ on where to get StringIO -if sys.version_info < (3, 0): - from cStringIO import StringIO - FileIO = file - BytesIO = StringIO - reload = reload -else: - from io import FileIO, BytesIO, StringIO - from imp import reload - -# Where do pickles come from? -try: - import cPickle as pickle -except ImportError: - import pickle - -from logilab.common.deprecation import deprecated - -from itertools import izip, chain, imap -if sys.version_info < (3, 0):# 2to3 will remove the imports - izip = deprecated('izip exists in itertools since py2.3')(izip) - imap = deprecated('imap exists in itertools since py2.3')(imap) -chain = deprecated('chain exists in itertools since py2.3')(chain) - -sum = deprecated('sum exists in builtins since py2.3')(sum) -enumerate = deprecated('enumerate exists in builtins since py2.3')(enumerate) -frozenset = deprecated('frozenset exists in builtins since py2.4')(frozenset) -reversed = deprecated('reversed exists in builtins since py2.4')(reversed) -sorted = deprecated('sorted exists in builtins since py2.4')(sorted) -max = deprecated('max exists in builtins since py2.4')(max) - - -# Python2.5 builtins -try: - any = any - all = all -except NameError: - def any(iterable): - """any(iterable) -> bool - - Return True if bool(x) is True for any x in the iterable. - """ - for elt in iterable: - if elt: - return True - return False - - def all(iterable): - """all(iterable) -> bool - - Return True if bool(x) is True for all values x in the iterable. - """ - for elt in iterable: - if not elt: - return False - return True - - -# Python2.5 subprocess added functions and exceptions -try: - from subprocess import Popen -except ImportError: - # gae or python < 2.3 - - class CalledProcessError(Exception): - """This exception is raised when a process run by check_call() returns - a non-zero exit status. The exit status will be stored in the - returncode attribute.""" - def __init__(self, returncode, cmd): - self.returncode = returncode - self.cmd = cmd - def __str__(self): - return "Command '%s' returned non-zero exit status %d" % (self.cmd, - self.returncode) - - def call(*popenargs, **kwargs): - """Run command with arguments. Wait for command to complete, then - return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ - # workaround: subprocess.Popen(cmd, stdout=sys.stdout) fails - # see http://bugs.python.org/issue1531862 - if "stdout" in kwargs: - fileno = kwargs.get("stdout").fileno() - del kwargs['stdout'] - return Popen(stdout=os.dup(fileno), *popenargs, **kwargs).wait() - return Popen(*popenargs, **kwargs).wait() - - def check_call(*popenargs, **kwargs): - """Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - check_call(["ls", "-l"]) - """ - retcode = call(*popenargs, **kwargs) - cmd = kwargs.get("args") - if cmd is None: - cmd = popenargs[0] - if retcode: - raise CalledProcessError(retcode, cmd) - return retcode - -try: - from os.path import relpath -except ImportError: # python < 2.6 - from os.path import curdir, abspath, sep, commonprefix, pardir, join - def relpath(path, start=curdir): - """Return a relative version of a path""" - - if not path: - raise ValueError("no path specified") - - start_list = abspath(start).split(sep) - path_list = abspath(path).split(sep) - - # Work out how much of the filepath is shared by start and path. - i = len(commonprefix([start_list, path_list])) - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - - -# XXX don't know why tests don't pass if I don't do that : -_real_set, set = set, deprecated('set exists in builtins since py2.4')(set) -if (2, 5) <= sys.version_info[:2]: - InheritableSet = _real_set -else: - class InheritableSet(_real_set): - """hacked resolving inheritancy issue from old style class in 2.4""" - def __new__(cls, *args, **kwargs): - if args: - new_args = (args[0], ) - else: - new_args = () - obj = _real_set.__new__(cls, *new_args) - obj.__init__(*args, **kwargs) - return obj - -# XXX shouldn't we remove this and just let 2to3 do his job ? -# range or xrange? -try: - range = xrange -except NameError: - range = range - -# ConfigParser was renamed to the more-standard configparser -try: - import configparser -except ImportError: - import ConfigParser as configparser - -try: - import json -except ImportError: - try: - import simplejson as json - except ImportError: - json = None diff --git a/pylibs/logilab/common/configuration.py b/pylibs/logilab/common/configuration.py deleted file mode 100644 index 0eafa10a..00000000 --- a/pylibs/logilab/common/configuration.py +++ /dev/null @@ -1,1069 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Classes to handle advanced configuration in simple to complex applications. - -Allows to load the configuration from a file or from command line -options, to generate a sample configuration file or to display -program's usage. Fills the gap between optik/optparse and ConfigParser -by adding data types (which are also available as a standalone optik -extension in the `optik_ext` module). - - -Quick start: simplest usage ---------------------------- - -.. python :: - - >>> import sys - >>> from logilab.common.configuration import Configuration - >>> options = [('dothis', {'type':'yn', 'default': True, 'metavar': ''}), - ... ('value', {'type': 'string', 'metavar': ''}), - ... ('multiple', {'type': 'csv', 'default': ('yop',), - ... 'metavar': '', - ... 'help': 'you can also document the option'}), - ... ('number', {'type': 'int', 'default':2, 'metavar':''}), - ... ] - >>> config = Configuration(options=options, name='My config') - >>> print config['dothis'] - True - >>> print config['value'] - None - >>> print config['multiple'] - ('yop',) - >>> print config['number'] - 2 - >>> print config.help() - Usage: [options] - - Options: - -h, --help show this help message and exit - --dothis= - --value= - --multiple= - you can also document the option [current: none] - --number= - - >>> f = open('myconfig.ini', 'w') - >>> f.write('''[MY CONFIG] - ... number = 3 - ... dothis = no - ... multiple = 1,2,3 - ... ''') - >>> f.close() - >>> config.load_file_configuration('myconfig.ini') - >>> print config['dothis'] - False - >>> print config['value'] - None - >>> print config['multiple'] - ['1', '2', '3'] - >>> print config['number'] - 3 - >>> sys.argv = ['mon prog', '--value', 'bacon', '--multiple', '4,5,6', - ... 'nonoptionargument'] - >>> print config.load_command_line_configuration() - ['nonoptionargument'] - >>> print config['value'] - bacon - >>> config.generate_config() - # class for simple configurations which don't need the - # manager / providers model and prefer delegation to inheritance - # - # configuration values are accessible through a dict like interface - # - [MY CONFIG] - - dothis=no - - value=bacon - - # you can also document the option - multiple=4,5,6 - - number=3 - >>> -""" -__docformat__ = "restructuredtext en" - -__all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn', - 'ConfigurationMixIn', 'Configuration', - 'OptionsManager2ConfigurationAdapter') - -import os -import sys -import re -from os.path import exists, expanduser -from copy import copy -from ConfigParser import ConfigParser, NoOptionError, NoSectionError, \ - DuplicateSectionError -from warnings import warn - -from logilab.common.compat import callable, raw_input, str_encode as _encode - -from logilab.common.textutils import normalize_text, unquote -from logilab.common import optik_ext as optparse - -OptionError = optparse.OptionError - -REQUIRED = [] - -class UnsupportedAction(Exception): - """raised by set_option when it doesn't know what to do for an action""" - - -def _get_encoding(encoding, stream): - encoding = encoding or getattr(stream, 'encoding', None) - if not encoding: - import locale - encoding = locale.getpreferredencoding() - return encoding - - -# validation functions ######################################################## - -def choice_validator(optdict, name, value): - """validate and return a converted value for option of type 'choice' - """ - if not value in optdict['choices']: - msg = "option %s: invalid value: %r, should be in %s" - raise optparse.OptionValueError(msg % (name, value, optdict['choices'])) - return value - -def multiple_choice_validator(optdict, name, value): - """validate and return a converted value for option of type 'choice' - """ - choices = optdict['choices'] - values = optparse.check_csv(None, name, value) - for value in values: - if not value in choices: - msg = "option %s: invalid value: %r, should be in %s" - raise optparse.OptionValueError(msg % (name, value, choices)) - return values - -def csv_validator(optdict, name, value): - """validate and return a converted value for option of type 'csv' - """ - return optparse.check_csv(None, name, value) - -def yn_validator(optdict, name, value): - """validate and return a converted value for option of type 'yn' - """ - return optparse.check_yn(None, name, value) - -def named_validator(optdict, name, value): - """validate and return a converted value for option of type 'named' - """ - return optparse.check_named(None, name, value) - -def file_validator(optdict, name, value): - """validate and return a filepath for option of type 'file'""" - return optparse.check_file(None, name, value) - -def color_validator(optdict, name, value): - """validate and return a valid color for option of type 'color'""" - return optparse.check_color(None, name, value) - -def password_validator(optdict, name, value): - """validate and return a string for option of type 'password'""" - return optparse.check_password(None, name, value) - -def date_validator(optdict, name, value): - """validate and return a mx DateTime object for option of type 'date'""" - return optparse.check_date(None, name, value) - -def time_validator(optdict, name, value): - """validate and return a time object for option of type 'time'""" - return optparse.check_time(None, name, value) - -def bytes_validator(optdict, name, value): - """validate and return an integer for option of type 'bytes'""" - return optparse.check_bytes(None, name, value) - - -VALIDATORS = {'string': unquote, - 'int': int, - 'float': float, - 'file': file_validator, - 'font': unquote, - 'color': color_validator, - 'regexp': re.compile, - 'csv': csv_validator, - 'yn': yn_validator, - 'bool': yn_validator, - 'named': named_validator, - 'password': password_validator, - 'date': date_validator, - 'time': time_validator, - 'bytes': bytes_validator, - 'choice': choice_validator, - 'multiple_choice': multiple_choice_validator, - } - -def _call_validator(opttype, optdict, option, value): - if opttype not in VALIDATORS: - raise Exception('Unsupported type "%s"' % opttype) - try: - return VALIDATORS[opttype](optdict, option, value) - except TypeError: - try: - return VALIDATORS[opttype](value) - except optparse.OptionValueError: - raise - except: - raise optparse.OptionValueError('%s value (%r) should be of type %s' % - (option, value, opttype)) - -# user input functions ######################################################## - -def input_password(optdict, question='password:'): - from getpass import getpass - while True: - value = getpass(question) - value2 = getpass('confirm: ') - if value == value2: - return value - print 'password mismatch, try again' - -def input_string(optdict, question): - value = raw_input(question).strip() - return value or None - -def _make_input_function(opttype): - def input_validator(optdict, question): - while True: - value = raw_input(question) - if not value.strip(): - return None - try: - return _call_validator(opttype, optdict, None, value) - except optparse.OptionValueError, ex: - msg = str(ex).split(':', 1)[-1].strip() - print 'bad value: %s' % msg - return input_validator - -INPUT_FUNCTIONS = { - 'string': input_string, - 'password': input_password, - } - -for opttype in VALIDATORS.keys(): - INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype)) - -def expand_default(self, option): - """monkey patch OptionParser.expand_default since we have a particular - way to handle defaults to avoid overriding values in the configuration - file - """ - if self.parser is None or not self.default_tag: - return option.help - optname = option._long_opts[0][2:] - try: - provider = self.parser.options_manager._all_options[optname] - except KeyError: - value = None - else: - optdict = provider.get_option_def(optname) - optname = provider.option_name(optname, optdict) - value = getattr(provider.config, optname, optdict) - value = format_option_value(optdict, value) - if value is optparse.NO_DEFAULT or not value: - value = self.NO_DEFAULT_VALUE - return option.help.replace(self.default_tag, str(value)) - - -def convert(value, optdict, name=''): - """return a validated value for an option according to its type - - optional argument name is only used for error message formatting - """ - try: - _type = optdict['type'] - except KeyError: - # FIXME - return value - return _call_validator(_type, optdict, name, value) - -def comment(string): - """return string as a comment""" - lines = [line.strip() for line in string.splitlines()] - return '# ' + ('%s# ' % os.linesep).join(lines) - -def format_time(value): - if not value: - return '0' - if value != int(value): - return '%.2fs' % value - value = int(value) - nbmin, nbsec = divmod(value, 60) - if nbsec: - return '%ss' % value - nbhour, nbmin_ = divmod(nbmin, 60) - if nbmin_: - return '%smin' % nbmin - nbday, nbhour_ = divmod(nbhour, 24) - if nbhour_: - return '%sh' % nbhour - return '%sd' % nbday - -def format_bytes(value): - if not value: - return '0' - if value != int(value): - return '%.2fB' % value - value = int(value) - prevunit = 'B' - for unit in ('KB', 'MB', 'GB', 'TB'): - next, remain = divmod(value, 1024) - if remain: - return '%s%s' % (value, prevunit) - prevunit = unit - value = next - return '%s%s' % (value, unit) - -def format_option_value(optdict, value): - """return the user input's value from a 'compiled' value""" - if isinstance(value, (list, tuple)): - value = ','.join(value) - elif isinstance(value, dict): - value = ','.join(['%s:%s' % (k, v) for k, v in value.items()]) - elif hasattr(value, 'match'): # optdict.get('type') == 'regexp' - # compiled regexp - value = value.pattern - elif optdict.get('type') == 'yn': - value = value and 'yes' or 'no' - elif isinstance(value, (str, unicode)) and value.isspace(): - value = "'%s'" % value - elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)): - value = format_time(value) - elif optdict.get('type') == 'bytes' and hasattr(value, '__int__'): - value = format_bytes(value) - return value - -def ini_format_section(stream, section, options, encoding=None, doc=None): - """format an options section using the INI format""" - encoding = _get_encoding(encoding, stream) - if doc: - print >> stream, _encode(comment(doc), encoding) - print >> stream, '[%s]' % section - ini_format(stream, options, encoding) - -def ini_format(stream, options, encoding): - """format options using the INI format""" - for optname, optdict, value in options: - value = format_option_value(optdict, value) - help = optdict.get('help') - if help: - help = normalize_text(help, line_len=79, indent='# ') - print >> stream - print >> stream, _encode(help, encoding) - else: - print >> stream - if value is None: - print >> stream, '#%s=' % optname - else: - value = _encode(value, encoding).strip() - print >> stream, '%s=%s' % (optname, value) - -format_section = ini_format_section - -def rest_format_section(stream, section, options, encoding=None, doc=None): - """format an options section using the INI format""" - encoding = _get_encoding(encoding, stream) - if section: - print >> stream, '%s\n%s' % (section, "'"*len(section)) - if doc: - print >> stream, _encode(normalize_text(doc, line_len=79, indent=''), - encoding) - print >> stream - for optname, optdict, value in options: - help = optdict.get('help') - print >> stream, ':%s:' % optname - if help: - help = normalize_text(help, line_len=79, indent=' ') - print >> stream, _encode(help, encoding) - if value: - value = _encode(format_option_value(optdict, value), encoding) - print >> stream, '' - print >> stream, ' Default: ``%s``' % value.replace("`` ", "```` ``") - - -class OptionsManagerMixIn(object): - """MixIn to handle a configuration from both a configuration file and - command line options - """ - - def __init__(self, usage, config_file=None, version=None, quiet=0): - self.config_file = config_file - self.reset_parsers(usage, version=version) - # list of registered options providers - self.options_providers = [] - # dictionary associating option name to checker - self._all_options = {} - self._short_options = {} - self._nocallback_options = {} - self._mygroups = dict() - # verbosity - self.quiet = quiet - self._maxlevel = 0 - - def reset_parsers(self, usage='', version=None): - # configuration file parser - self.cfgfile_parser = ConfigParser() - # command line parser - self.cmdline_parser = optparse.OptionParser(usage=usage, version=version) - self.cmdline_parser.options_manager = self - self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS) - - def register_options_provider(self, provider, own_group=True): - """register an options provider""" - assert provider.priority <= 0, "provider's priority can't be >= 0" - for i in range(len(self.options_providers)): - if provider.priority > self.options_providers[i].priority: - self.options_providers.insert(i, provider) - break - else: - self.options_providers.append(provider) - non_group_spec_options = [option for option in provider.options - if 'group' not in option[1]] - groups = getattr(provider, 'option_groups', ()) - if own_group and non_group_spec_options: - self.add_option_group(provider.name.upper(), provider.__doc__, - non_group_spec_options, provider) - else: - for opt, optdict in non_group_spec_options: - self.add_optik_option(provider, self.cmdline_parser, opt, optdict) - for gname, gdoc in groups: - gname = gname.upper() - goptions = [option for option in provider.options - if option[1].get('group', '').upper() == gname] - self.add_option_group(gname, gdoc, goptions, provider) - - def add_option_group(self, group_name, doc, options, provider): - """add an option group including the listed options - """ - assert options - # add option group to the command line parser - if group_name in self._mygroups: - group = self._mygroups[group_name] - else: - group = optparse.OptionGroup(self.cmdline_parser, - title=group_name.capitalize()) - self.cmdline_parser.add_option_group(group) - group.level = provider.level - self._mygroups[group_name] = group - # add section to the config file - if group_name != "DEFAULT": - self.cfgfile_parser.add_section(group_name) - # add provider's specific options - for opt, optdict in options: - self.add_optik_option(provider, group, opt, optdict) - - def add_optik_option(self, provider, optikcontainer, opt, optdict): - if 'inputlevel' in optdict: - warn('[0.50] "inputlevel" in option dictionary for %s is deprecated,' - ' use "level"' % opt, DeprecationWarning) - optdict['level'] = optdict.pop('inputlevel') - args, optdict = self.optik_option(provider, opt, optdict) - option = optikcontainer.add_option(*args, **optdict) - self._all_options[opt] = provider - self._maxlevel = max(self._maxlevel, option.level or 0) - - def optik_option(self, provider, opt, optdict): - """get our personal option definition and return a suitable form for - use with optik/optparse - """ - optdict = copy(optdict) - others = {} - if 'action' in optdict: - self._nocallback_options[provider] = opt - else: - optdict['action'] = 'callback' - optdict['callback'] = self.cb_set_provider_option - # default is handled here and *must not* be given to optik if you - # want the whole machinery to work - if 'default' in optdict: - if (optparse.OPTPARSE_FORMAT_DEFAULT and 'help' in optdict and - optdict.get('default') is not None and - not optdict['action'] in ('store_true', 'store_false')): - optdict['help'] += ' [current: %default]' - del optdict['default'] - args = ['--' + str(opt)] - if 'short' in optdict: - self._short_options[optdict['short']] = opt - args.append('-' + optdict['short']) - del optdict['short'] - # cleanup option definition dict before giving it to optik - for key in optdict.keys(): - if not key in self._optik_option_attrs: - optdict.pop(key) - return args, optdict - - def cb_set_provider_option(self, option, opt, value, parser): - """optik callback for option setting""" - if opt.startswith('--'): - # remove -- on long option - opt = opt[2:] - else: - # short option, get its long equivalent - opt = self._short_options[opt[1:]] - # trick since we can't set action='store_true' on options - if value is None: - value = 1 - self.global_set_option(opt, value) - - def global_set_option(self, opt, value): - """set option on the correct option provider""" - self._all_options[opt].set_option(opt, value) - - def generate_config(self, stream=None, skipsections=(), encoding=None): - """write a configuration file according to the current configuration - into the given stream or stdout - """ - options_by_section = {} - sections = [] - for provider in self.options_providers: - for section, options in provider.options_by_section(): - if section is None: - section = provider.name - if section in skipsections: - continue - options = [(n, d, v) for (n, d, v) in options - if d.get('type') is not None] - if not options: - continue - if not section in sections: - sections.append(section) - alloptions = options_by_section.setdefault(section, []) - alloptions += options - stream = stream or sys.stdout - encoding = _get_encoding(encoding, stream) - printed = False - for section in sections: - if printed: - print >> stream, '\n' - format_section(stream, section.upper(), options_by_section[section], - encoding) - printed = True - - def generate_manpage(self, pkginfo, section=1, stream=None): - """write a man page for the current configuration into the given - stream or stdout - """ - self._monkeypatch_expand_default() - try: - optparse.generate_manpage(self.cmdline_parser, pkginfo, - section, stream=stream or sys.stdout, - level=self._maxlevel) - finally: - self._unmonkeypatch_expand_default() - - # initialization methods ################################################## - - def load_provider_defaults(self): - """initialize configuration using default values""" - for provider in self.options_providers: - provider.load_defaults() - - def load_file_configuration(self, config_file=None): - """load the configuration from file""" - self.read_config_file(config_file) - self.load_config_file() - - def read_config_file(self, config_file=None): - """read the configuration file but do not load it (i.e. dispatching - values to each options provider) - """ - helplevel = 1 - while helplevel <= self._maxlevel: - opt = '-'.join(['long'] * helplevel) + '-help' - if opt in self._all_options: - break # already processed - def helpfunc(option, opt, val, p, level=helplevel): - print self.help(level) - sys.exit(0) - helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel) - optdict = {'action' : 'callback', 'callback' : helpfunc, - 'help' : helpmsg} - provider = self.options_providers[0] - self.add_optik_option(provider, self.cmdline_parser, opt, optdict) - provider.options += ( (opt, optdict), ) - helplevel += 1 - if config_file is None: - config_file = self.config_file - if config_file is not None: - config_file = expanduser(config_file) - if config_file and exists(config_file): - parser = self.cfgfile_parser - parser.read([config_file]) - # normalize sections'title - for sect, values in parser._sections.items(): - if not sect.isupper() and values: - parser._sections[sect.upper()] = values - elif not self.quiet: - msg = 'No config file found, using default configuration' - print >> sys.stderr, msg - return - - def input_config(self, onlysection=None, inputlevel=0, stream=None): - """interactively get configuration values by asking to the user and generate - a configuration file - """ - if onlysection is not None: - onlysection = onlysection.upper() - for provider in self.options_providers: - for section, option, optdict in provider.all_options(): - if onlysection is not None and section != onlysection: - continue - if not 'type' in optdict: - # ignore action without type (callback, store_true...) - continue - provider.input_option(option, optdict, inputlevel) - # now we can generate the configuration file - if stream is not None: - self.generate_config(stream) - - def load_config_file(self): - """dispatch values previously read from a configuration file to each - options provider) - """ - parser = self.cfgfile_parser - for provider in self.options_providers: - for section, option, optdict in provider.all_options(): - try: - value = parser.get(section, option) - provider.set_option(option, value, optdict=optdict) - except (NoSectionError, NoOptionError), ex: - continue - - def load_configuration(self, **kwargs): - """override configuration according to given parameters - """ - for opt, opt_value in kwargs.items(): - opt = opt.replace('_', '-') - provider = self._all_options[opt] - provider.set_option(opt, opt_value) - - def load_command_line_configuration(self, args=None): - """override configuration according to command line parameters - - return additional arguments - """ - self._monkeypatch_expand_default() - try: - if args is None: - args = sys.argv[1:] - else: - args = list(args) - (options, args) = self.cmdline_parser.parse_args(args=args) - for provider in self._nocallback_options.keys(): - config = provider.config - for attr in config.__dict__.keys(): - value = getattr(options, attr, None) - if value is None: - continue - setattr(config, attr, value) - return args - finally: - self._unmonkeypatch_expand_default() - - - # help methods ############################################################ - - def add_help_section(self, title, description, level=0): - """add a dummy option section for help purpose """ - group = optparse.OptionGroup(self.cmdline_parser, - title=title.capitalize(), - description=description) - group.level = level - self._maxlevel = max(self._maxlevel, level) - self.cmdline_parser.add_option_group(group) - - def _monkeypatch_expand_default(self): - # monkey patch optparse to deal with our default values - try: - self.__expand_default_backup = optparse.HelpFormatter.expand_default - optparse.HelpFormatter.expand_default = expand_default - except AttributeError: - # python < 2.4: nothing to be done - pass - def _unmonkeypatch_expand_default(self): - # remove monkey patch - if hasattr(optparse.HelpFormatter, 'expand_default'): - # unpatch optparse to avoid side effects - optparse.HelpFormatter.expand_default = self.__expand_default_backup - - def help(self, level=0): - """return the usage string for available options """ - self.cmdline_parser.formatter.output_level = level - self._monkeypatch_expand_default() - try: - return self.cmdline_parser.format_help() - finally: - self._unmonkeypatch_expand_default() - - -class Method(object): - """used to ease late binding of default method (so you can define options - on the class using default methods on the configuration instance) - """ - def __init__(self, methname): - self.method = methname - self._inst = None - - def bind(self, instance): - """bind the method to its instance""" - if self._inst is None: - self._inst = instance - - def __call__(self, *args, **kwargs): - assert self._inst, 'unbound method' - return getattr(self._inst, self.method)(*args, **kwargs) - - -class OptionsProviderMixIn(object): - """Mixin to provide options to an OptionsManager""" - - # those attributes should be overridden - priority = -1 - name = 'default' - options = () - level = 0 - - def __init__(self): - self.config = optparse.Values() - for option in self.options: - try: - option, optdict = option - except ValueError: - raise Exception('Bad option: %r' % option) - if isinstance(optdict.get('default'), Method): - optdict['default'].bind(self) - elif isinstance(optdict.get('callback'), Method): - optdict['callback'].bind(self) - self.load_defaults() - - def load_defaults(self): - """initialize the provider using default values""" - for opt, optdict in self.options: - action = optdict.get('action') - if action != 'callback': - # callback action have no default - default = self.option_default(opt, optdict) - if default is REQUIRED: - continue - self.set_option(opt, default, action, optdict) - - def option_default(self, opt, optdict=None): - """return the default value for an option""" - if optdict is None: - optdict = self.get_option_def(opt) - default = optdict.get('default') - if callable(default): - default = default() - return default - - def option_name(self, opt, optdict=None): - """get the config attribute corresponding to opt - """ - if optdict is None: - optdict = self.get_option_def(opt) - return optdict.get('dest', opt.replace('-', '_')) - - def option_value(self, opt): - """get the current value for the given option""" - return getattr(self.config, self.option_name(opt), None) - - def set_option(self, opt, value, action=None, optdict=None): - """method called to set an option (registered in the options list) - """ - # print "************ setting option", opt," to value", value - if optdict is None: - optdict = self.get_option_def(opt) - if value is not None: - value = convert(value, optdict, opt) - if action is None: - action = optdict.get('action', 'store') - if optdict.get('type') == 'named': # XXX need specific handling - optname = self.option_name(opt, optdict) - currentvalue = getattr(self.config, optname, None) - if currentvalue: - currentvalue.update(value) - value = currentvalue - if action == 'store': - setattr(self.config, self.option_name(opt, optdict), value) - elif action in ('store_true', 'count'): - setattr(self.config, self.option_name(opt, optdict), 0) - elif action == 'store_false': - setattr(self.config, self.option_name(opt, optdict), 1) - elif action == 'append': - opt = self.option_name(opt, optdict) - _list = getattr(self.config, opt, None) - if _list is None: - if isinstance(value, (list, tuple)): - _list = value - elif value is not None: - _list = [] - _list.append(value) - setattr(self.config, opt, _list) - elif isinstance(_list, tuple): - setattr(self.config, opt, _list + (value,)) - else: - _list.append(value) - elif action == 'callback': - optdict['callback'](None, opt, value, None) - else: - raise UnsupportedAction(action) - - def input_option(self, option, optdict, inputlevel=99): - default = self.option_default(option, optdict) - if default is REQUIRED: - defaultstr = '(required): ' - elif optdict.get('level', 0) > inputlevel: - return - elif optdict['type'] == 'password' or default is None: - defaultstr = ': ' - else: - defaultstr = '(default: %s): ' % format_option_value(optdict, default) - print ':%s:' % option - print optdict.get('help') or option - inputfunc = INPUT_FUNCTIONS[optdict['type']] - value = inputfunc(optdict, defaultstr) - while default is REQUIRED and not value: - print 'please specify a value' - value = inputfunc(optdict, '%s: ' % option) - if value is None and default is not None: - value = default - self.set_option(option, value, optdict=optdict) - - def get_option_def(self, opt): - """return the dictionary defining an option given it's name""" - assert self.options - for option in self.options: - if option[0] == opt: - return option[1] - raise OptionError('no such option %s in section %r' - % (opt, self.name), opt) - - - def all_options(self): - """return an iterator on available options for this provider - option are actually described by a 3-uple: - (section, option name, option dictionary) - """ - for section, options in self.options_by_section(): - if section is None: - if self.name is None: - continue - section = self.name.upper() - for option, optiondict, value in options: - yield section, option, optiondict - - def options_by_section(self): - """return an iterator on options grouped by section - - (section, [list of (optname, optdict, optvalue)]) - """ - sections = {} - for optname, optdict in self.options: - sections.setdefault(optdict.get('group'), []).append( - (optname, optdict, self.option_value(optname))) - if None in sections: - yield None, sections.pop(None) - for section, options in sections.items(): - yield section.upper(), options - - def options_and_values(self, options=None): - if options is None: - options = self.options - for optname, optdict in options: - yield (optname, optdict, self.option_value(optname)) - - -class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn): - """basic mixin for simple configurations which don't need the - manager / providers model - """ - def __init__(self, *args, **kwargs): - if not args: - kwargs.setdefault('usage', '') - kwargs.setdefault('quiet', 1) - OptionsManagerMixIn.__init__(self, *args, **kwargs) - OptionsProviderMixIn.__init__(self) - if not getattr(self, 'option_groups', None): - self.option_groups = [] - for option, optdict in self.options: - try: - gdef = (optdict['group'].upper(), '') - except KeyError: - continue - if not gdef in self.option_groups: - self.option_groups.append(gdef) - self.register_options_provider(self, own_group=0) - - def register_options(self, options): - """add some options to the configuration""" - options_by_group = {} - for optname, optdict in options: - options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict)) - for group, options in options_by_group.items(): - self.add_option_group(group, None, options, self) - self.options += tuple(options) - - def load_defaults(self): - OptionsProviderMixIn.load_defaults(self) - - def __iter__(self): - return iter(self.config.__dict__.iteritems()) - - def __getitem__(self, key): - try: - return getattr(self.config, self.option_name(key)) - except (optparse.OptionValueError, AttributeError): - raise KeyError(key) - - def __setitem__(self, key, value): - self.set_option(key, value) - - def get(self, key, default=None): - try: - return getattr(self.config, self.option_name(key)) - except (OptionError, AttributeError): - return default - - -class Configuration(ConfigurationMixIn): - """class for simple configurations which don't need the - manager / providers model and prefer delegation to inheritance - - configuration values are accessible through a dict like interface - """ - - def __init__(self, config_file=None, options=None, name=None, - usage=None, doc=None, version=None): - if options is not None: - self.options = options - if name is not None: - self.name = name - if doc is not None: - self.__doc__ = doc - super(Configuration, self).__init__(config_file=config_file, usage=usage, version=version) - - -class OptionsManager2ConfigurationAdapter(object): - """Adapt an option manager to behave like a - `logilab.common.configuration.Configuration` instance - """ - def __init__(self, provider): - self.config = provider - - def __getattr__(self, key): - return getattr(self.config, key) - - def __getitem__(self, key): - provider = self.config._all_options[key] - try: - return getattr(provider.config, provider.option_name(key)) - except AttributeError: - raise KeyError(key) - - def __setitem__(self, key, value): - self.config.global_set_option(self.config.option_name(key), value) - - def get(self, key, default=None): - provider = self.config._all_options[key] - try: - return getattr(provider.config, provider.option_name(key)) - except AttributeError: - return default - - -def read_old_config(newconfig, changes, configfile): - """initialize newconfig from a deprecated configuration file - - possible changes: - * ('renamed', oldname, newname) - * ('moved', option, oldgroup, newgroup) - * ('typechanged', option, oldtype, newvalue) - """ - # build an index of changes - changesindex = {} - for action in changes: - if action[0] == 'moved': - option, oldgroup, newgroup = action[1:] - changesindex.setdefault(option, []).append((action[0], oldgroup, newgroup)) - continue - if action[0] == 'renamed': - oldname, newname = action[1:] - changesindex.setdefault(newname, []).append((action[0], oldname)) - continue - if action[0] == 'typechanged': - option, oldtype, newvalue = action[1:] - changesindex.setdefault(option, []).append((action[0], oldtype, newvalue)) - continue - if action[1] in ('added', 'removed'): - continue # nothing to do here - raise Exception('unknown change %s' % action[0]) - # build a config object able to read the old config - options = [] - for optname, optdef in newconfig.options: - for action in changesindex.pop(optname, ()): - if action[0] == 'moved': - oldgroup, newgroup = action[1:] - optdef = optdef.copy() - optdef['group'] = oldgroup - elif action[0] == 'renamed': - optname = action[1] - elif action[0] == 'typechanged': - oldtype = action[1] - optdef = optdef.copy() - optdef['type'] = oldtype - options.append((optname, optdef)) - if changesindex: - raise Exception('unapplied changes: %s' % changesindex) - oldconfig = Configuration(options=options, name=newconfig.name) - # read the old config - oldconfig.load_file_configuration(configfile) - # apply values reverting changes - changes.reverse() - done = set() - for action in changes: - if action[0] == 'renamed': - oldname, newname = action[1:] - newconfig[newname] = oldconfig[oldname] - done.add(newname) - elif action[0] == 'typechanged': - optname, oldtype, newvalue = action[1:] - newconfig[optname] = newvalue - done.add(optname) - for optname, optdef in newconfig.options: - if optdef.get('type') and not optname in done: - newconfig.set_option(optname, oldconfig[optname], optdict=optdef) - - -def merge_options(options): - """preprocess options to remove duplicate""" - alloptions = {} - options = list(options) - for i in range(len(options)-1, -1, -1): - optname, optdict = options[i] - if optname in alloptions: - options.pop(i) - alloptions[optname].update(optdict) - else: - alloptions[optname] = optdict - return tuple(options) diff --git a/pylibs/logilab/common/contexts.py b/pylibs/logilab/common/contexts.py deleted file mode 100644 index d78c3274..00000000 --- a/pylibs/logilab/common/contexts.py +++ /dev/null @@ -1,5 +0,0 @@ -from warnings import warn -warn('logilab.common.contexts module is deprecated, use logilab.common.shellutils instead', - DeprecationWarning, stacklevel=1) - -from logilab.common.shellutils import tempfile, pushd diff --git a/pylibs/logilab/common/corbautils.py b/pylibs/logilab/common/corbautils.py deleted file mode 100644 index 8dfb2ba3..00000000 --- a/pylibs/logilab/common/corbautils.py +++ /dev/null @@ -1,117 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""A set of utility function to ease the use of OmniORBpy. - - - - -""" -__docformat__ = "restructuredtext en" - -from omniORB import CORBA, PortableServer -import CosNaming - -orb = None - -def get_orb(): - """ - returns a reference to the ORB. - The first call to the method initialized the ORB - This method is mainly used internally in the module. - """ - - global orb - if orb is None: - import sys - orb = CORBA.ORB_init(sys.argv, CORBA.ORB_ID) - return orb - -def get_root_context(): - """ - returns a reference to the NameService object. - This method is mainly used internally in the module. - """ - - orb = get_orb() - nss = orb.resolve_initial_references("NameService") - rootContext = nss._narrow(CosNaming.NamingContext) - assert rootContext is not None, "Failed to narrow root naming context" - return rootContext - -def register_object_name(object, namepath): - """ - Registers a object in the NamingService. - The name path is a list of 2-uples (id,kind) giving the path. - - For instance if the path of an object is [('foo',''),('bar','')], - it is possible to get a reference to the object using the URL - 'corbaname::hostname#foo/bar'. - [('logilab','rootmodule'),('chatbot','application'),('chatter','server')] - is mapped to - 'corbaname::hostname#logilab.rootmodule/chatbot.application/chatter.server' - - The get_object_reference() function can be used to resolve such a URL. - """ - context = get_root_context() - for id, kind in namepath[:-1]: - name = [CosNaming.NameComponent(id, kind)] - try: - context = context.bind_new_context(name) - except CosNaming.NamingContext.AlreadyBound, ex: - context = context.resolve(name)._narrow(CosNaming.NamingContext) - assert context is not None, \ - 'test context exists but is not a NamingContext' - - id, kind = namepath[-1] - name = [CosNaming.NameComponent(id, kind)] - try: - context.bind(name, object._this()) - except CosNaming.NamingContext.AlreadyBound, ex: - context.rebind(name, object._this()) - -def activate_POA(): - """ - This methods activates the Portable Object Adapter. - You need to call it to enable the reception of messages in your code, - on both the client and the server. - """ - orb = get_orb() - poa = orb.resolve_initial_references('RootPOA') - poaManager = poa._get_the_POAManager() - poaManager.activate() - -def run_orb(): - """ - Enters the ORB mainloop on the server. - You should not call this method on the client. - """ - get_orb().run() - -def get_object_reference(url): - """ - Resolves a corbaname URL to an object proxy. - See register_object_name() for examples URLs - """ - return get_orb().string_to_object(url) - -def get_object_string(host, namepath): - """given an host name and a name path as described in register_object_name, - return a corba string identifier - """ - strname = '/'.join(['.'.join(path_elt) for path_elt in namepath]) - return 'corbaname::%s#%s' % (host, strname) diff --git a/pylibs/logilab/common/daemon.py b/pylibs/logilab/common/daemon.py deleted file mode 100644 index 2eedca56..00000000 --- a/pylibs/logilab/common/daemon.py +++ /dev/null @@ -1,200 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""A daemonize function (for Unices) and daemon mix-in class""" - -__docformat__ = "restructuredtext en" - -import os -import errno -import signal -import sys -import time -import warnings - -def setugid(user): - """Change process user and group ID - - Argument is a numeric user id or a user name""" - try: - from pwd import getpwuid - passwd = getpwuid(int(user)) - except ValueError: - from pwd import getpwnam - passwd = getpwnam(user) - - if hasattr(os, 'initgroups'): # python >= 2.7 - os.initgroups(passwd.pw_name, passwd.pw_gid) - else: - import ctypes - if ctypes.CDLL(None).initgroups(passwd.pw_name, passwd.pw_gid) < 0: - err = ctypes.c_int.in_dll(ctypes.pythonapi,"errno").value - raise OSError(err, os.strerror(err), 'initgroups') - os.setgid(passwd.pw_gid) - os.setuid(passwd.pw_uid) - - -def daemonize(pidfile=None, uid=None, umask=077): - """daemonize a Unix process. Set paranoid umask by default. - - Return 1 in the original process, 2 in the first fork, and None for the - second fork (eg daemon process). - """ - # http://www.faqs.org/faqs/unix-faq/programmer/faq/ - # - # fork so the parent can exit - if os.fork(): # launch child and... - return 1 - # disconnect from tty and create a new session - os.setsid() - # fork again so the parent, (the session group leader), can exit. - # as a non-session group leader, we can never regain a controlling - # terminal. - if os.fork(): # launch child again. - return 2 - # move to the root to avoit mount pb - os.chdir('/') - # set umask if specified - if umask is not None: - os.umask(umask) - # redirect standard descriptors - null = os.open('/dev/null', os.O_RDWR) - for i in range(3): - try: - os.dup2(null, i) - except OSError, e: - if e.errno != errno.EBADF: - raise - os.close(null) - # filter warnings - warnings.filterwarnings('ignore') - # write pid in a file - if pidfile: - # ensure the directory where the pid-file should be set exists (for - # instance /var/run/cubicweb may be deleted on computer restart) - piddir = os.path.dirname(pidfile) - if not os.path.exists(piddir): - os.makedirs(piddir) - f = file(pidfile, 'w') - f.write(str(os.getpid())) - f.close() - # change process uid - if uid: - setugid(uid) - return None - - -class DaemonMixIn: - """Mixin to make a daemon from watchers/queriers. - """ - - def __init__(self, configmod) : - self.delay = configmod.DELAY - self.name = str(self.__class__).split('.')[-1] - self._pid_file = os.path.join('/tmp', '%s.pid'%self.name) - if os.path.exists(self._pid_file): - raise Exception('''Another instance of %s must be running. -If it i not the case, remove the file %s''' % (self.name, self._pid_file)) - self._alive = 1 - self._sleeping = 0 - self.config = configmod - - def _daemonize(self): - if not self.config.NODETACH: - if daemonize(self._pid_file) is None: - # put signal handler - signal.signal(signal.SIGTERM, self.signal_handler) - signal.signal(signal.SIGHUP, self.signal_handler) - else: - return -1 - - def run(self): - """ optionally go in daemon mode and - do what concrete class has to do and pauses for delay between runs - If self.delay is negative, do a pause before starting - """ - if self._daemonize() == -1: - return - if self.delay < 0: - self.delay = -self.delay - time.sleep(self.delay) - while True: - try: - self._run() - except Exception, ex: - # display for info, sleep, and hope the problem will be solved - # later. - self.config.exception('Internal error: %s', ex) - if not self._alive: - break - try: - self._sleeping = 1 - time.sleep(self.delay) - self._sleeping = 0 - except SystemExit: - break - self.config.info('%s instance exited', self.name) - # remove pid file - os.remove(self._pid_file) - - def signal_handler(self, sig_num, stack_frame): - if sig_num == signal.SIGTERM: - if self._sleeping: - # we are sleeping so we can exit without fear - self.config.debug('exit on SIGTERM') - sys.exit(0) - else: - self.config.debug('exit on SIGTERM (on next turn)') - self._alive = 0 - elif sig_num == signal.SIGHUP: - self.config.info('reloading configuration on SIGHUP') - reload(self.config) - - def _run(self): - """should be overridden in the mixed class""" - raise NotImplementedError() - - -import logging -from logilab.common.logging_ext import set_log_methods -set_log_methods(DaemonMixIn, logging.getLogger('lgc.daemon')) - -## command line utilities ###################################################### - -L_OPTIONS = ["help", "log=", "delay=", 'no-detach'] -S_OPTIONS = 'hl:d:n' - -def print_help(modconfig): - print """ --help or -h - displays this message - --log - log treshold (7 record everything, 0 record only emergency.) - Defaults to %s - --delay - the number of seconds between two runs. - Defaults to %s""" % (modconfig.LOG_TRESHOLD, modconfig.DELAY) - -def handle_option(modconfig, opt_name, opt_value, help_meth): - if opt_name in ('-h', '--help'): - help_meth() - sys.exit(0) - elif opt_name in ('-l', '--log'): - modconfig.LOG_TRESHOLD = int(opt_value) - elif opt_name in ('-d', '--delay'): - modconfig.DELAY = int(opt_value) - elif opt_name in ('-n', '--no-detach'): - modconfig.NODETACH = 1 diff --git a/pylibs/logilab/common/date.py b/pylibs/logilab/common/date.py deleted file mode 100644 index b069a6fd..00000000 --- a/pylibs/logilab/common/date.py +++ /dev/null @@ -1,327 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Date manipulation helper functions.""" -from __future__ import division - -__docformat__ = "restructuredtext en" - -import math -import re -from locale import getpreferredencoding -from datetime import date, time, datetime, timedelta -from time import strptime as time_strptime -from calendar import monthrange, timegm - -try: - from mx.DateTime import RelativeDateTime, Date, DateTimeType -except ImportError: - endOfMonth = None - DateTimeType = datetime -else: - endOfMonth = RelativeDateTime(months=1, day=-1) - -# NOTE: should we implement a compatibility layer between date representations -# as we have in lgc.db ? - -FRENCH_FIXED_HOLIDAYS = { - 'jour_an': '%s-01-01', - 'fete_travail': '%s-05-01', - 'armistice1945': '%s-05-08', - 'fete_nat': '%s-07-14', - 'assomption': '%s-08-15', - 'toussaint': '%s-11-01', - 'armistice1918': '%s-11-11', - 'noel': '%s-12-25', - } - -FRENCH_MOBILE_HOLIDAYS = { - 'paques2004': '2004-04-12', - 'ascension2004': '2004-05-20', - 'pentecote2004': '2004-05-31', - - 'paques2005': '2005-03-28', - 'ascension2005': '2005-05-05', - 'pentecote2005': '2005-05-16', - - 'paques2006': '2006-04-17', - 'ascension2006': '2006-05-25', - 'pentecote2006': '2006-06-05', - - 'paques2007': '2007-04-09', - 'ascension2007': '2007-05-17', - 'pentecote2007': '2007-05-28', - - 'paques2008': '2008-03-24', - 'ascension2008': '2008-05-01', - 'pentecote2008': '2008-05-12', - - 'paques2009': '2009-04-13', - 'ascension2009': '2009-05-21', - 'pentecote2009': '2009-06-01', - - 'paques2010': '2010-04-05', - 'ascension2010': '2010-05-13', - 'pentecote2010': '2010-05-24', - - 'paques2011': '2011-04-25', - 'ascension2011': '2011-06-02', - 'pentecote2011': '2011-06-13', - - 'paques2012': '2012-04-09', - 'ascension2012': '2012-05-17', - 'pentecote2012': '2012-05-28', - } - -# XXX this implementation cries for multimethod dispatching - -def get_step(dateobj, nbdays=1): - # assume date is either a python datetime or a mx.DateTime object - if isinstance(dateobj, date): - return ONEDAY * nbdays - return nbdays # mx.DateTime is ok with integers - -def datefactory(year, month, day, sampledate): - # assume date is either a python datetime or a mx.DateTime object - if isinstance(sampledate, datetime): - return datetime(year, month, day) - if isinstance(sampledate, date): - return date(year, month, day) - return Date(year, month, day) - -def weekday(dateobj): - # assume date is either a python datetime or a mx.DateTime object - if isinstance(dateobj, date): - return dateobj.weekday() - return dateobj.day_of_week - -def str2date(datestr, sampledate): - # NOTE: datetime.strptime is not an option until we drop py2.4 compat - year, month, day = [int(chunk) for chunk in datestr.split('-')] - return datefactory(year, month, day, sampledate) - -def days_between(start, end): - if isinstance(start, date): - delta = end - start - # datetime.timedelta.days is always an integer (floored) - if delta.seconds: - return delta.days + 1 - return delta.days - else: - return int(math.ceil((end - start).days)) - -def get_national_holidays(begin, end): - """return french national days off between begin and end""" - begin = datefactory(begin.year, begin.month, begin.day, begin) - end = datefactory(end.year, end.month, end.day, end) - holidays = [str2date(datestr, begin) - for datestr in FRENCH_MOBILE_HOLIDAYS.values()] - for year in xrange(begin.year, end.year+1): - for datestr in FRENCH_FIXED_HOLIDAYS.values(): - date = str2date(datestr % year, begin) - if date not in holidays: - holidays.append(date) - return [day for day in holidays if begin <= day < end] - -def add_days_worked(start, days): - """adds date but try to only take days worked into account""" - step = get_step(start) - weeks, plus = divmod(days, 5) - end = start + ((weeks * 7) + plus) * step - if weekday(end) >= 5: # saturday or sunday - end += (2 * step) - end += len([x for x in get_national_holidays(start, end + step) - if weekday(x) < 5]) * step - if weekday(end) >= 5: # saturday or sunday - end += (2 * step) - return end - -def nb_open_days(start, end): - assert start <= end - step = get_step(start) - days = days_between(start, end) - weeks, plus = divmod(days, 7) - if weekday(start) > weekday(end): - plus -= 2 - elif weekday(end) == 6: - plus -= 1 - open_days = weeks * 5 + plus - nb_week_holidays = len([x for x in get_national_holidays(start, end+step) - if weekday(x) < 5 and x < end]) - open_days -= nb_week_holidays - if open_days < 0: - return 0 - return open_days - -def date_range(begin, end, incday=None, incmonth=None): - """yields each date between begin and end - - :param begin: the start date - :param end: the end date - :param incr: the step to use to iterate over dates. Default is - one day. - :param include: None (means no exclusion) or a function taking a - date as parameter, and returning True if the date - should be included. - - When using mx datetime, you should *NOT* use incmonth argument, use instead - oneDay, oneHour, oneMinute, oneSecond, oneWeek or endOfMonth (to enumerate - months) as `incday` argument - """ - assert not (incday and incmonth) - begin = todate(begin) - end = todate(end) - if incmonth: - while begin < end: - begin = next_month(begin, incmonth) - yield begin - else: - incr = get_step(begin, incday or 1) - while begin < end: - yield begin - begin += incr - -# makes py datetime usable ##################################################### - -ONEDAY = timedelta(days=1) -ONEWEEK = timedelta(days=7) - -try: - strptime = datetime.strptime -except AttributeError: # py < 2.5 - from time import strptime as time_strptime - def strptime(value, format): - return datetime(*time_strptime(value, format)[:6]) - -def strptime_time(value, format='%H:%M'): - return time(*time_strptime(value, format)[3:6]) - -def todate(somedate): - """return a date from a date (leaving unchanged) or a datetime""" - if isinstance(somedate, datetime): - return date(somedate.year, somedate.month, somedate.day) - assert isinstance(somedate, (date, DateTimeType)), repr(somedate) - return somedate - -def totime(somedate): - """return a time from a time (leaving unchanged), date or datetime""" - # XXX mx compat - if not isinstance(somedate, time): - return time(somedate.hour, somedate.minute, somedate.second) - assert isinstance(somedate, (time)), repr(somedate) - return somedate - -def todatetime(somedate): - """return a date from a date (leaving unchanged) or a datetime""" - # take care, datetime is a subclass of date - if isinstance(somedate, datetime): - return somedate - assert isinstance(somedate, (date, DateTimeType)), repr(somedate) - return datetime(somedate.year, somedate.month, somedate.day) - -def datetime2ticks(somedate): - return timegm(somedate.timetuple()) * 1000 - -def ticks2datetime(ticks): - miliseconds, microseconds = divmod(ticks, 1000) - try: - return datetime.fromtimestamp(miliseconds) - except (ValueError, OverflowError): - epoch = datetime.fromtimestamp(0) - nb_days, seconds = divmod(int(miliseconds), 86400) - delta = timedelta(nb_days, seconds=seconds, microseconds=microseconds) - try: - return epoch + delta - except (ValueError, OverflowError): - raise - -def days_in_month(somedate): - return monthrange(somedate.year, somedate.month)[1] - -def days_in_year(somedate): - feb = date(somedate.year, 2, 1) - if days_in_month(feb) == 29: - return 366 - else: - return 365 - -def previous_month(somedate, nbmonth=1): - while nbmonth: - somedate = first_day(somedate) - ONEDAY - nbmonth -= 1 - return somedate - -def next_month(somedate, nbmonth=1): - while nbmonth: - somedate = last_day(somedate) + ONEDAY - nbmonth -= 1 - return somedate - -def first_day(somedate): - return date(somedate.year, somedate.month, 1) - -def last_day(somedate): - return date(somedate.year, somedate.month, days_in_month(somedate)) - -def ustrftime(somedate, fmt='%Y-%m-%d'): - """like strftime, but returns a unicode string instead of an encoded - string which' may be problematic with localized date. - - encoding is guessed by locale.getpreferredencoding() - """ - encoding = getpreferredencoding(do_setlocale=False) or 'UTF-8' - try: - return unicode(somedate.strftime(str(fmt)), encoding) - except ValueError, exc: - if somedate.year >= 1900: - raise - # datetime is not happy with dates before 1900 - # we try to work around this, assuming a simple - # format string - fields = {'Y': somedate.year, - 'm': somedate.month, - 'd': somedate.day, - } - if isinstance(somedate, datetime): - fields.update({'H': somedate.hour, - 'M': somedate.minute, - 'S': somedate.second}) - fmt = re.sub('%([YmdHMS])', r'%(\1)02d', fmt) - return unicode(fmt) % fields - -def utcdatetime(dt): - if dt.tzinfo is None: - return dt - return datetime(*dt.utctimetuple()[:7]) - -def utctime(dt): - if dt.tzinfo is None: - return dt - return (dt + dt.utcoffset() + dt.dst()).replace(tzinfo=None) - -def datetime_to_seconds(date): - """return the number of seconds since the begining of the day for that date - """ - return date.second+60*date.minute + 3600*date.hour - -def timedelta_to_days(delta): - """return the time delta as a number of seconds""" - return delta.days + delta.seconds / (3600*24) - -def timedelta_to_seconds(delta): - """return the time delta as a fraction of days""" - return delta.days*(3600*24) + delta.seconds diff --git a/pylibs/logilab/common/dbf.py b/pylibs/logilab/common/dbf.py deleted file mode 100644 index 8def2d2e..00000000 --- a/pylibs/logilab/common/dbf.py +++ /dev/null @@ -1,229 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""This is a DBF reader which reads Visual Fox Pro DBF format with Memo field - -Usage: - ->>> rec = readDbf('test.dbf') ->>> for line in rec: ->>> print line['name'] - - -:date: 13/07/2007 - -http://www.physics.ox.ac.uk/users/santoso/Software.Repository.html -page says code is "available as is without any warranty or support". -""" - -import struct -import os, os.path -import sys -import csv -import tempfile -import ConfigParser - -class Dbase: - def __init__(self): - self.fdb = None - self.fmemo = None - self.db_data = None - self.memo_data = None - self.fields = None - self.num_records = 0 - self.header = None - self.memo_file = '' - self.memo_header = None - self.memo_block_size = 0 - self.memo_header_len = 0 - - def _drop_after_NULL(self, txt): - for i in range(0, len(txt)): - if ord(struct.unpack('c', txt[i])[0])==0: - return txt[:i] - return txt - - def _reverse_endian(self, num): - if not len(num): - return 0 - val = struct.unpack('L', val[0]) - val = struct.unpack('>L', val) - return val[0] - - def _assign_ids(self, lst, ids): - result = {} - idx = 0 - for item in lst: - id = ids[idx] - result[id] = item - idx += 1 - return result - - def open(self, db_name): - filesize = os.path.getsize(db_name) - if filesize <= 68: - raise IOError, 'The file is not large enough to be a dbf file' - - self.fdb = open(db_name, 'rb') - - self.memo_file = '' - if os.path.isfile(db_name[0:-1] + 't'): - self.memo_file = db_name[0:-1] + 't' - elif os.path.isfile(db_name[0:-3] + 'fpt'): - self.memo_file = db_name[0:-3] + 'fpt' - - if self.memo_file: - #Read memo file - self.fmemo = open(self.memo_file, 'rb') - self.memo_data = self.fmemo.read() - self.memo_header = self._assign_ids(struct.unpack('>6x1H', self.memo_data[:8]), ['Block size']) - block_size = self.memo_header['Block size'] - if not block_size: - block_size = 512 - self.memo_block_size = block_size - self.memo_header_len = block_size - memo_size = os.path.getsize(self.memo_file) - - #Start reading data file - data = self.fdb.read(32) - self.header = self._assign_ids(struct.unpack(' self.num_records: - raise Exception, 'Unable to extract data outside the range' - - offset = self.header['Record Size'] * rec_no - data = self.db_data[offset:offset+self.row_len] - record = self._assign_ids(struct.unpack(self.row_format, data), self.row_ids) - - if self.memo_file: - for key in self.fields: - field = self.fields[key] - f_type = field['Field Type'] - f_name = field['Field Name'] - c_data = record[f_name] - - if f_type=='M' or f_type=='G' or f_type=='B' or f_type=='P': - c_data = self._reverse_endian(c_data) - if c_data: - record[f_name] = self.read_memo(c_data-1).strip() - else: - record[f_name] = c_data.strip() - return record - - def read_memo_record(self, num, in_length): - """ - Read the record of given number. The second parameter is the length of - the record to read. It can be undefined, meaning read the whole record, - and it can be negative, meaning at most the length - """ - if in_length < 0: - in_length = -self.memo_block_size - - offset = self.memo_header_len + num * self.memo_block_size - self.fmemo.seek(offset) - if in_length<0: - in_length = -in_length - if in_length==0: - return '' - return self.fmemo.read(in_length) - - def read_memo(self, num): - result = '' - buffer = self.read_memo_record(num, -1) - if len(buffer)<=0: - return '' - length = struct.unpack('>L', buffer[4:4+4])[0] + 8 - - block_size = self.memo_block_size - if length < block_size: - return buffer[8:length] - rest_length = length - block_size - rest_data = self.read_memo_record(num+1, rest_length) - if len(rest_data)<=0: - return '' - return buffer[8:] + rest_data - -def readDbf(filename): - """ - Read the DBF file specified by the filename and - return the records as a list of dictionary. - - :param: filename File name of the DBF - :return: List of rows - """ - db = Dbase() - db.open(filename) - num = db.get_numrecords() - rec = [] - for i in range(0, num): - record = db.get_record_with_names(i) - rec.append(record) - db.close() - return rec - -if __name__=='__main__': - rec = readDbf('dbf/sptable.dbf') - for line in rec: - print '%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip()) diff --git a/pylibs/logilab/common/debugger.py b/pylibs/logilab/common/debugger.py deleted file mode 100644 index 75563227..00000000 --- a/pylibs/logilab/common/debugger.py +++ /dev/null @@ -1,210 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Customized version of pdb's default debugger. - -- sets up a history file -- uses ipython if available to colorize lines of code -- overrides list command to search for current block instead - of using 5 lines of context - - - - -""" -__docformat__ = "restructuredtext en" - -try: - import readline -except ImportError: - readline = None -import os -import os.path as osp -import sys -from pdb import Pdb -from cStringIO import StringIO -import inspect - -try: - from IPython import PyColorize -except ImportError: - def colorize(source, *args): - """fallback colorize function""" - return source - def colorize_source(source, *args): - return source -else: - def colorize(source, start_lineno, curlineno): - """colorize and annotate source with linenos - (as in pdb's list command) - """ - parser = PyColorize.Parser() - output = StringIO() - parser.format(source, output) - annotated = [] - for index, line in enumerate(output.getvalue().splitlines()): - lineno = index + start_lineno - if lineno == curlineno: - annotated.append('%4s\t->\t%s' % (lineno, line)) - else: - annotated.append('%4s\t\t%s' % (lineno, line)) - return '\n'.join(annotated) - - def colorize_source(source): - """colorize given source""" - parser = PyColorize.Parser() - output = StringIO() - parser.format(source, output) - return output.getvalue() - - -def getsource(obj): - """Return the text of the source code for an object. - - The argument may be a module, class, method, function, traceback, frame, - or code object. The source code is returned as a single string. An - IOError is raised if the source code cannot be retrieved.""" - lines, lnum = inspect.getsourcelines(obj) - return ''.join(lines), lnum - - -################################################################ -class Debugger(Pdb): - """custom debugger - - - sets up a history file - - uses ipython if available to colorize lines of code - - overrides list command to search for current block instead - of using 5 lines of context - """ - def __init__(self, tcbk=None): - Pdb.__init__(self) - self.reset() - if tcbk: - while tcbk.tb_next is not None: - tcbk = tcbk.tb_next - self._tcbk = tcbk - self._histfile = os.path.expanduser("~/.pdbhist") - - def setup_history_file(self): - """if readline is available, read pdb history file - """ - if readline is not None: - try: - # XXX try..except shouldn't be necessary - # read_history_file() can accept None - readline.read_history_file(self._histfile) - except IOError: - pass - - def start(self): - """starts the interactive mode""" - self.interaction(self._tcbk.tb_frame, self._tcbk) - - def setup(self, frame, tcbk): - """setup hook: set up history file""" - self.setup_history_file() - Pdb.setup(self, frame, tcbk) - - def set_quit(self): - """quit hook: save commands in the history file""" - if readline is not None: - readline.write_history_file(self._histfile) - Pdb.set_quit(self) - - def complete_p(self, text, line, begin_idx, end_idx): - """provide variable names completion for the ``p`` command""" - namespace = dict(self.curframe.f_globals) - namespace.update(self.curframe.f_locals) - if '.' in text: - return self.attr_matches(text, namespace) - return [varname for varname in namespace if varname.startswith(text)] - - - def attr_matches(self, text, namespace): - """implementation coming from rlcompleter.Completer.attr_matches - Compute matches when text contains a dot. - - Assuming the text is of the form NAME.NAME....[NAME], and is - evaluatable in self.namespace, it will be evaluated and its attributes - (as revealed by dir()) are used as possible completions. (For class - instances, class members are also considered.) - - WARNING: this can still invoke arbitrary C code, if an object - with a __getattr__ hook is evaluated. - - """ - import re - m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text) - if not m: - return - expr, attr = m.group(1, 3) - object = eval(expr, namespace) - words = dir(object) - if hasattr(object, '__class__'): - words.append('__class__') - words = words + self.get_class_members(object.__class__) - matches = [] - n = len(attr) - for word in words: - if word[:n] == attr and word != "__builtins__": - matches.append("%s.%s" % (expr, word)) - return matches - - def get_class_members(self, klass): - """implementation coming from rlcompleter.get_class_members""" - ret = dir(klass) - if hasattr(klass, '__bases__'): - for base in klass.__bases__: - ret = ret + self.get_class_members(base) - return ret - - ## specific / overridden commands - def do_list(self, arg): - """overrides default list command to display the surrounding block - instead of 5 lines of context - """ - self.lastcmd = 'list' - if not arg: - try: - source, start_lineno = getsource(self.curframe) - print colorize(''.join(source), start_lineno, - self.curframe.f_lineno) - except KeyboardInterrupt: - pass - except IOError: - Pdb.do_list(self, arg) - else: - Pdb.do_list(self, arg) - do_l = do_list - - def do_open(self, arg): - """opens source file corresponding to the current stack level""" - filename = self.curframe.f_code.co_filename - lineno = self.curframe.f_lineno - cmd = 'emacsclient --no-wait +%s %s' % (lineno, filename) - os.system(cmd) - - do_o = do_open - -def pm(): - """use our custom debugger""" - dbg = Debugger(sys.last_traceback) - dbg.start() - -def set_trace(): - Debugger().set_trace(sys._getframe().f_back) diff --git a/pylibs/logilab/common/decorators.py b/pylibs/logilab/common/decorators.py deleted file mode 100644 index 7bb08fc9..00000000 --- a/pylibs/logilab/common/decorators.py +++ /dev/null @@ -1,243 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -""" A few useful function/method decorators. """ -__docformat__ = "restructuredtext en" - -import types -import sys, re -from time import clock, time - -# XXX rewrite so we can use the decorator syntax when keyarg has to be specified - -def _is_generator_function(callableobj): - return callableobj.func_code.co_flags & 0x20 - -class cached_decorator(object): - def __init__(self, cacheattr=None, keyarg=None): - self.cacheattr = cacheattr - self.keyarg = keyarg - def __call__(self, callableobj=None): - assert not _is_generator_function(callableobj), \ - 'cannot cache generator function: %s' % callableobj - if callableobj.func_code.co_argcount == 1 or self.keyarg == 0: - cache = _SingleValueCache(callableobj, self.cacheattr) - elif self.keyarg: - cache = _MultiValuesKeyArgCache(callableobj, self.keyarg, self.cacheattr) - else: - cache = _MultiValuesCache(callableobj, self.cacheattr) - return cache.closure() - -class _SingleValueCache(object): - def __init__(self, callableobj, cacheattr=None): - self.callable = callableobj - if cacheattr is None: - self.cacheattr = '_%s_cache_' % callableobj.__name__ - else: - assert cacheattr != callableobj.__name__ - self.cacheattr = cacheattr - - def __call__(__me, self, *args): - try: - return self.__dict__[__me.cacheattr] - except KeyError: - value = __me.callable(self, *args) - setattr(self, __me.cacheattr, value) - return value - - def closure(self): - def wrapped(*args, **kwargs): - return self.__call__(*args, **kwargs) - wrapped.cache_obj = self - try: - wrapped.__doc__ = self.callable.__doc__ - wrapped.__name__ = self.callable.__name__ - wrapped.func_name = self.callable.func_name - except: - pass - return wrapped - - def clear(self, holder): - holder.__dict__.pop(self.cacheattr, None) - - -class _MultiValuesCache(_SingleValueCache): - def _get_cache(self, holder): - try: - _cache = holder.__dict__[self.cacheattr] - except KeyError: - _cache = {} - setattr(holder, self.cacheattr, _cache) - return _cache - - def __call__(__me, self, *args, **kwargs): - _cache = __me._get_cache(self) - try: - return _cache[args] - except KeyError: - _cache[args] = __me.callable(self, *args) - return _cache[args] - -class _MultiValuesKeyArgCache(_MultiValuesCache): - def __init__(self, callableobj, keyarg, cacheattr=None): - super(_MultiValuesKeyArgCache, self).__init__(callableobj, cacheattr) - self.keyarg = keyarg - - def __call__(__me, self, *args, **kwargs): - _cache = __me._get_cache(self) - key = args[__me.keyarg-1] - try: - return _cache[key] - except KeyError: - _cache[key] = __me.callable(self, *args, **kwargs) - return _cache[key] - - -def cached(callableobj=None, keyarg=None, **kwargs): - """Simple decorator to cache result of method call.""" - kwargs['keyarg'] = keyarg - decorator = cached_decorator(**kwargs) - if callableobj is None: - return decorator - else: - return decorator(callableobj) - -def get_cache_impl(obj, funcname): - cls = obj.__class__ - member = getattr(cls, funcname) - if isinstance(member, property): - member = member.fget - return member.cache_obj - -def clear_cache(obj, funcname): - """Clear a cache handled by the :func:`cached` decorator. If 'x' class has - @cached on its method `foo`, type - - >>> clear_cache(x, 'foo') - - to purge this method's cache on the instance. - """ - get_cache_impl(obj, funcname).clear(obj) - -def copy_cache(obj, funcname, cacheobj): - """Copy cache for from cacheobj to obj.""" - cacheattr = get_cache_impl(obj, funcname).cacheattr - try: - setattr(obj, cacheattr, cacheobj.__dict__[cacheattr]) - except KeyError: - pass - - -class wproperty(object): - """Simple descriptor expecting to take a modifier function as first argument - and looking for a _ to retrieve the attribute. - """ - def __init__(self, setfunc): - self.setfunc = setfunc - self.attrname = '_%s' % setfunc.__name__ - - def __set__(self, obj, value): - self.setfunc(obj, value) - - def __get__(self, obj, cls): - assert obj is not None - return getattr(obj, self.attrname) - - -class classproperty(object): - """this is a simple property-like class but for class attributes. - """ - def __init__(self, get): - self.get = get - def __get__(self, inst, cls): - return self.get(cls) - - -class iclassmethod(object): - '''Descriptor for method which should be available as class method if called - on the class or instance method if called on an instance. - ''' - def __init__(self, func): - self.func = func - def __get__(self, instance, objtype): - if instance is None: - return types.MethodType(self.func, objtype, objtype.__class__) - return types.MethodType(self.func, instance, objtype) - def __set__(self, instance, value): - raise AttributeError("can't set attribute") - - -def timed(f): - def wrap(*args, **kwargs): - t = time() - c = clock() - res = f(*args, **kwargs) - print '%s clock: %.9f / time: %.9f' % (f.__name__, - clock() - c, time() - t) - return res - return wrap - - -def locked(acquire, release): - """Decorator taking two methods to acquire/release a lock as argument, - returning a decorator function which will call the inner method after - having called acquire(self) et will call release(self) afterwards. - """ - def decorator(f): - def wrapper(self, *args, **kwargs): - acquire(self) - try: - return f(self, *args, **kwargs) - finally: - release(self) - return wrapper - return decorator - - -def monkeypatch(klass, methodname=None): - """Decorator extending class with the decorated callable - >>> class A: - ... pass - >>> @monkeypatch(A) - ... def meth(self): - ... return 12 - ... - >>> a = A() - >>> a.meth() - 12 - >>> @monkeypatch(A, 'foo') - ... def meth(self): - ... return 12 - ... - >>> a.foo() - 12 - """ - def decorator(func): - try: - name = methodname or func.__name__ - except AttributeError: - raise AttributeError('%s has no __name__ attribute: ' - 'you should provide an explicit `methodname`' - % func) - if callable(func): - setattr(klass, name, types.MethodType(func, None, klass)) - else: - # likely a property - # this is quite borderline but usage already in the wild ... - setattr(klass, name, func) - return func - return decorator diff --git a/pylibs/logilab/common/deprecation.py b/pylibs/logilab/common/deprecation.py deleted file mode 100644 index db0829ae..00000000 --- a/pylibs/logilab/common/deprecation.py +++ /dev/null @@ -1,112 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Deprecation utilities.""" - -__docformat__ = "restructuredtext en" - -import sys -from warnings import warn - -class class_deprecated(type): - """metaclass to print a warning on instantiation of a deprecated class""" - - def __call__(cls, *args, **kwargs): - msg = getattr(cls, "__deprecation_warning__", - "%(cls)s is deprecated") % {'cls': cls.__name__} - warn(msg, DeprecationWarning, stacklevel=2) - return type.__call__(cls, *args, **kwargs) - - -def class_renamed(old_name, new_class, message=None): - """automatically creates a class which fires a DeprecationWarning - when instantiated. - - >>> Set = class_renamed('Set', set, 'Set is now replaced by set') - >>> s = Set() - sample.py:57: DeprecationWarning: Set is now replaced by set - s = Set() - >>> - """ - clsdict = {} - if message is None: - message = '%s is deprecated, use %s' % (old_name, new_class.__name__) - clsdict['__deprecation_warning__'] = message - try: - # new-style class - return class_deprecated(old_name, (new_class,), clsdict) - except (NameError, TypeError): - # old-style class - class DeprecatedClass(new_class): - """FIXME: There might be a better way to handle old/new-style class - """ - def __init__(self, *args, **kwargs): - warn(message, DeprecationWarning, stacklevel=2) - new_class.__init__(self, *args, **kwargs) - return DeprecatedClass - - -def class_moved(new_class, old_name=None, message=None): - """nice wrapper around class_renamed when a class has been moved into - another module - """ - if old_name is None: - old_name = new_class.__name__ - if message is None: - message = 'class %s is now available as %s.%s' % ( - old_name, new_class.__module__, new_class.__name__) - return class_renamed(old_name, new_class, message) - -def deprecated(reason=None, stacklevel=2): - """Decorator that raises a DeprecationWarning to print a message - when the decorated function is called. - """ - def deprecated_decorator(func): - message = reason or 'The function "%s" is deprecated' - if '%s' in message: - message = message % func.func_name - def wrapped(*args, **kwargs): - warn(message, DeprecationWarning, stacklevel=stacklevel) - return func(*args, **kwargs) - try: - wrapped.__name__ = func.__name__ - except TypeError: # readonly attribute in 2.3 - pass - wrapped.__doc__ = func.__doc__ - return wrapped - return deprecated_decorator - -def moved(modpath, objname): - """use to tell that a callable has been moved to a new module. - - It returns a callable wrapper, so that when its called a warning is printed - telling where the object can be found, import is done (and not before) and - the actual object is called. - - NOTE: the usage is somewhat limited on classes since it will fail if the - wrapper is use in a class ancestors list, use the `class_moved` function - instead (which has no lazy import feature though). - """ - def callnew(*args, **kwargs): - from logilab.common.modutils import load_module_from_name - message = "object %s has been moved to module %s" % (objname, modpath) - warn(message, DeprecationWarning, stacklevel=2) - m = load_module_from_name(modpath) - return getattr(m, objname)(*args, **kwargs) - return callnew - - diff --git a/pylibs/logilab/common/fileutils.py b/pylibs/logilab/common/fileutils.py deleted file mode 100644 index 4ac92702..00000000 --- a/pylibs/logilab/common/fileutils.py +++ /dev/null @@ -1,402 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""File and file-path manipulation utilities. - -:group path manipulation: first_level_directory, relative_path, is_binary,\ -get_by_ext, remove_dead_links -:group file manipulation: norm_read, norm_open, lines, stream_lines, lines,\ -write_open_mode, ensure_fs_mode, export -:sort: path manipulation, file manipulation -""" -__docformat__ = "restructuredtext en" - -import sys -import shutil -import mimetypes -from os.path import isabs, isdir, islink, split, exists, normpath, join -from os.path import abspath -from os import sep, mkdir, remove, listdir, stat, chmod, walk -from stat import ST_MODE, S_IWRITE -from cStringIO import StringIO - -from logilab.common import STD_BLACKLIST as BASE_BLACKLIST, IGNORED_EXTENSIONS -from logilab.common.shellutils import find -from logilab.common.deprecation import deprecated -from logilab.common.compat import FileIO, any - -def first_level_directory(path): - """Return the first level directory of a path. - - >>> first_level_directory('home/syt/work') - 'home' - >>> first_level_directory('/home/syt/work') - '/' - >>> first_level_directory('work') - 'work' - >>> - - :type path: str - :param path: the path for which we want the first level directory - - :rtype: str - :return: the first level directory appearing in `path` - """ - head, tail = split(path) - while head and tail: - head, tail = split(head) - if tail: - return tail - # path was absolute, head is the fs root - return head - -def abspath_listdir(path): - """Lists path's content using absolute paths. - - >>> os.listdir('/home') - ['adim', 'alf', 'arthur', 'auc'] - >>> abspath_listdir('/home') - ['/home/adim', '/home/alf', '/home/arthur', '/home/auc'] - """ - path = abspath(path) - return [join(path, filename) for filename in listdir(path)] - - -def is_binary(filename): - """Return true if filename may be a binary file, according to it's - extension. - - :type filename: str - :param filename: the name of the file - - :rtype: bool - :return: - true if the file is a binary file (actually if it's mime type - isn't beginning by text/) - """ - try: - return not mimetypes.guess_type(filename)[0].startswith('text') - except AttributeError: - return 1 - - -def write_open_mode(filename): - """Return the write mode that should used to open file. - - :type filename: str - :param filename: the name of the file - - :rtype: str - :return: the mode that should be use to open the file ('w' or 'wb') - """ - if is_binary(filename): - return 'wb' - return 'w' - - -def ensure_fs_mode(filepath, desired_mode=S_IWRITE): - """Check that the given file has the given mode(s) set, else try to - set it. - - :type filepath: str - :param filepath: path of the file - - :type desired_mode: int - :param desired_mode: - ORed flags describing the desired mode. Use constants from the - `stat` module for file permission's modes - """ - mode = stat(filepath)[ST_MODE] - if not mode & desired_mode: - chmod(filepath, mode | desired_mode) - - -# XXX (syt) unused? kill? -class ProtectedFile(FileIO): - """A special file-object class that automatically does a 'chmod +w' when - needed. - - XXX: for now, the way it is done allows 'normal file-objects' to be - created during the ProtectedFile object lifetime. - One way to circumvent this would be to chmod / unchmod on each - write operation. - - One other way would be to : - - - catch the IOError in the __init__ - - - if IOError, then create a StringIO object - - - each write operation writes in this StringIO object - - - on close()/del(), write/append the StringIO content to the file and - do the chmod only once - """ - def __init__(self, filepath, mode): - self.original_mode = stat(filepath)[ST_MODE] - self.mode_changed = False - if mode in ('w', 'a', 'wb', 'ab'): - if not self.original_mode & S_IWRITE: - chmod(filepath, self.original_mode | S_IWRITE) - self.mode_changed = True - FileIO.__init__(self, filepath, mode) - - def _restore_mode(self): - """restores the original mode if needed""" - if self.mode_changed: - chmod(self.name, self.original_mode) - # Don't re-chmod in case of several restore - self.mode_changed = False - - def close(self): - """restore mode before closing""" - self._restore_mode() - FileIO.close(self) - - def __del__(self): - if not self.closed: - self.close() - - -class UnresolvableError(Exception): - """Exception raised by relative path when it's unable to compute relative - path between two paths. - """ - -def relative_path(from_file, to_file): - """Try to get a relative path from `from_file` to `to_file` - (path will be absolute if to_file is an absolute file). This function - is useful to create link in `from_file` to `to_file`. This typical use - case is used in this function description. - - If both files are relative, they're expected to be relative to the same - directory. - - >>> relative_path( from_file='toto/index.html', to_file='index.html') - '../index.html' - >>> relative_path( from_file='index.html', to_file='toto/index.html') - 'toto/index.html' - >>> relative_path( from_file='tutu/index.html', to_file='toto/index.html') - '../toto/index.html' - >>> relative_path( from_file='toto/index.html', to_file='/index.html') - '/index.html' - >>> relative_path( from_file='/toto/index.html', to_file='/index.html') - '../index.html' - >>> relative_path( from_file='/toto/index.html', to_file='/toto/summary.html') - 'summary.html' - >>> relative_path( from_file='index.html', to_file='index.html') - '' - >>> relative_path( from_file='/index.html', to_file='toto/index.html') - Traceback (most recent call last): - File "", line 1, in ? - File "", line 37, in relative_path - UnresolvableError - >>> relative_path( from_file='/index.html', to_file='/index.html') - '' - >>> - - :type from_file: str - :param from_file: source file (where links will be inserted) - - :type to_file: str - :param to_file: target file (on which links point) - - :raise UnresolvableError: if it has been unable to guess a correct path - - :rtype: str - :return: the relative path of `to_file` from `from_file` - """ - from_file = normpath(from_file) - to_file = normpath(to_file) - if from_file == to_file: - return '' - if isabs(to_file): - if not isabs(from_file): - return to_file - elif isabs(from_file): - raise UnresolvableError() - from_parts = from_file.split(sep) - to_parts = to_file.split(sep) - idem = 1 - result = [] - while len(from_parts) > 1: - dirname = from_parts.pop(0) - if idem and len(to_parts) > 1 and dirname == to_parts[0]: - to_parts.pop(0) - else: - idem = 0 - result.append('..') - result += to_parts - return sep.join(result) - - -def norm_read(path): - """Return the content of the file with normalized line feeds. - - :type path: str - :param path: path to the file to read - - :rtype: str - :return: the content of the file with normalized line feeds - """ - return open(path, 'U').read() -norm_read = deprecated("use \"open(path, 'U').read()\"")(norm_read) - -def norm_open(path): - """Return a stream for a file with content with normalized line feeds. - - :type path: str - :param path: path to the file to open - - :rtype: file or StringIO - :return: the opened file with normalized line feeds - """ - return open(path, 'U') -norm_open = deprecated("use \"open(path, 'U')\"")(norm_open) - -def lines(path, comments=None): - """Return a list of non empty lines in the file located at `path`. - - :type path: str - :param path: path to the file - - :type comments: str or None - :param comments: - optional string which can be used to comment a line in the file - (i.e. lines starting with this string won't be returned) - - :rtype: list - :return: - a list of stripped line in the file, without empty and commented - lines - - :warning: at some point this function will probably return an iterator - """ - stream = open(path, 'U') - result = stream_lines(stream, comments) - stream.close() - return result - - -def stream_lines(stream, comments=None): - """Return a list of non empty lines in the given `stream`. - - :type stream: object implementing 'xreadlines' or 'readlines' - :param stream: file like object - - :type comments: str or None - :param comments: - optional string which can be used to comment a line in the file - (i.e. lines starting with this string won't be returned) - - :rtype: list - :return: - a list of stripped line in the file, without empty and commented - lines - - :warning: at some point this function will probably return an iterator - """ - try: - readlines = stream.xreadlines - except AttributeError: - readlines = stream.readlines - result = [] - for line in readlines(): - line = line.strip() - if line and (comments is None or not line.startswith(comments)): - result.append(line) - return result - - -def export(from_dir, to_dir, - blacklist=BASE_BLACKLIST, ignore_ext=IGNORED_EXTENSIONS, - verbose=0): - """Make a mirror of `from_dir` in `to_dir`, omitting directories and - files listed in the black list or ending with one of the given - extensions. - - :type from_dir: str - :param from_dir: directory to export - - :type to_dir: str - :param to_dir: destination directory - - :type blacklist: list or tuple - :param blacklist: - list of files or directories to ignore, default to the content of - `BASE_BLACKLIST` - - :type ignore_ext: list or tuple - :param ignore_ext: - list of extensions to ignore, default to the content of - `IGNORED_EXTENSIONS` - - :type verbose: bool - :param verbose: - flag indicating whether information about exported files should be - printed to stderr, default to False - """ - try: - mkdir(to_dir) - except OSError: - pass # FIXME we should use "exists" if the point is about existing dir - # else (permission problems?) shouldn't return / raise ? - for directory, dirnames, filenames in walk(from_dir): - for norecurs in blacklist: - try: - dirnames.remove(norecurs) - except ValueError: - continue - for dirname in dirnames: - src = join(directory, dirname) - dest = to_dir + src[len(from_dir):] - if isdir(src): - if not exists(dest): - mkdir(dest) - for filename in filenames: - # don't include binary files - # endswith does not accept tuple in 2.4 - if any([filename.endswith(ext) for ext in ignore_ext]): - continue - src = join(directory, filename) - dest = to_dir + src[len(from_dir):] - if verbose: - print >> sys.stderr, src, '->', dest - if exists(dest): - remove(dest) - shutil.copy2(src, dest) - - -def remove_dead_links(directory, verbose=0): - """Recursively traverse directory and remove all dead links. - - :type directory: str - :param directory: directory to cleanup - - :type verbose: bool - :param verbose: - flag indicating whether information about deleted links should be - printed to stderr, default to False - """ - for dirpath, dirname, filenames in walk(directory): - for filename in dirnames + filenames: - src = join(dirpath, filename) - if islink(src) and not exists(src): - if verbose: - print 'remove dead link', src - remove(src) - diff --git a/pylibs/logilab/common/graph.py b/pylibs/logilab/common/graph.py deleted file mode 100644 index 75a2ee7a..00000000 --- a/pylibs/logilab/common/graph.py +++ /dev/null @@ -1,273 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Graph manipulation utilities. - -(dot generation adapted from pypy/translator/tool/make_dot.py) -""" - -__docformat__ = "restructuredtext en" - -__metaclass__ = type - -import os.path as osp -import os -import sys -import tempfile -from logilab.common.compat import str_encode - -def escape(value): - """Make usable in a dot file.""" - lines = [line.replace('"', '\\"') for line in value.split('\n')] - data = '\\l'.join(lines) - return '\\n' + data - -def target_info_from_filename(filename): - """Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png').""" - basename = osp.basename(filename) - storedir = osp.dirname(osp.abspath(filename)) - target = filename.split('.')[-1] - return storedir, basename, target - - -class DotBackend: - """Dot File backend.""" - def __init__(self, graphname, rankdir=None, size=None, ratio=None, - charset='utf-8', renderer='dot', additionnal_param={}): - self.graphname = graphname - self.renderer = renderer - self.lines = [] - self._source = None - self.emit("digraph %s {" % normalize_node_id(graphname)) - if rankdir: - self.emit('rankdir=%s' % rankdir) - if ratio: - self.emit('ratio=%s' % ratio) - if size: - self.emit('size="%s"' % size) - if charset: - assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \ - 'unsupported charset %s' % charset - self.emit('charset="%s"' % charset) - for param in additionnal_param.iteritems(): - self.emit('='.join(param)) - - def get_source(self): - """returns self._source""" - if self._source is None: - self.emit("}\n") - self._source = '\n'.join(self.lines) - del self.lines - return self._source - - source = property(get_source) - - def generate(self, outputfile=None, dotfile=None, mapfile=None): - """Generates a graph file. - - :param outputfile: filename and path [defaults to graphname.png] - :param dotfile: filename and path [defaults to graphname.dot] - - :rtype: str - :return: a path to the generated file - """ - import subprocess # introduced in py 2.4 - name = self.graphname - if not dotfile: - # if 'outputfile' is a dot file use it as 'dotfile' - if outputfile and outputfile.endswith(".dot"): - dotfile = outputfile - else: - dotfile = '%s.dot' % name - if outputfile is not None: - storedir, basename, target = target_info_from_filename(outputfile) - if target != "dot": - pdot, dot_sourcepath = tempfile.mkstemp(".dot", name) - os.close(pdot) - else: - dot_sourcepath = osp.join(storedir, dotfile) - else: - target = 'png' - pdot, dot_sourcepath = tempfile.mkstemp(".dot", name) - ppng, outputfile = tempfile.mkstemp(".png", name) - os.close(pdot) - os.close(ppng) - pdot = open(dot_sourcepath, 'w') - pdot.write(str_encode(self.source, 'utf8')) - pdot.close() - if target != 'dot': - if sys.platform == 'win32': - use_shell = True - else: - use_shell = False - if mapfile: - subprocess.call([self.renderer, '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile], - shell=use_shell) - else: - subprocess.call([self.renderer, '-T', target, - dot_sourcepath, '-o', outputfile], - shell=use_shell) - os.unlink(dot_sourcepath) - return outputfile - - def emit(self, line): - """Adds to final output.""" - self.lines.append(line) - - def emit_edge(self, name1, name2, **props): - """emit an edge from to . - edge properties: see http://www.graphviz.org/doc/info/attrs.html - """ - attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()] - n_from, n_to = normalize_node_id(name1), normalize_node_id(name2) - self.emit('%s -> %s [%s];' % (n_from, n_to, ", ".join(attrs)) ) - - def emit_node(self, name, **props): - """emit a node with given properties. - node properties: see http://www.graphviz.org/doc/info/attrs.html - """ - attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()] - self.emit('%s [%s];' % (normalize_node_id(name), ", ".join(attrs))) - -def normalize_node_id(nid): - """Returns a suitable DOT node id for `nid`.""" - return '"%s"' % nid - -class GraphGenerator: - def __init__(self, backend): - # the backend is responsible to output the graph in a particular format - self.backend = backend - - # XXX doesn't like space in outpufile / mapfile - def generate(self, visitor, propshdlr, outputfile=None, mapfile=None): - # the visitor - # the property handler is used to get node and edge properties - # according to the graph and to the backend - self.propshdlr = propshdlr - for nodeid, node in visitor.nodes(): - props = propshdlr.node_properties(node) - self.backend.emit_node(nodeid, **props) - for subjnode, objnode, edge in visitor.edges(): - props = propshdlr.edge_properties(edge, subjnode, objnode) - self.backend.emit_edge(subjnode, objnode, **props) - return self.backend.generate(outputfile=outputfile, mapfile=mapfile) - - -class UnorderableGraph(Exception): - pass - -def ordered_nodes(graph): - """takes a dependency graph dict as arguments and return an ordered tuple of - nodes starting with nodes without dependencies and up to the outermost node. - - If there is some cycle in the graph, :exc:`UnorderableGraph` will be raised. - - Also the given graph dict will be emptied. - """ - # check graph consistency - cycles = get_cycles(graph) - if cycles: - cycles = '\n'.join([' -> '.join(cycle) for cycle in cycles]) - raise UnorderableGraph('cycles in graph: %s' % cycles) - vertices = set(graph) - to_vertices = set() - for edges in graph.values(): - to_vertices |= set(edges) - missing_vertices = to_vertices - vertices - if missing_vertices: - raise UnorderableGraph('missing vertices: %s' % ', '.join(missing_vertices)) - # order vertices - order = [] - order_set = set() - old_len = None - while graph: - if old_len == len(graph): - raise UnorderableGraph('unknown problem with %s' % graph) - old_len = len(graph) - deps_ok = [] - for node, node_deps in graph.items(): - for dep in node_deps: - if dep not in order_set: - break - else: - deps_ok.append(node) - order.append(deps_ok) - order_set |= set(deps_ok) - for node in deps_ok: - del graph[node] - result = [] - for grp in reversed(order): - result.extend(sorted(grp)) - return tuple(result) - - -def get_cycles(graph_dict, vertices=None): - '''given a dictionary representing an ordered graph (i.e. key are vertices - and values is a list of destination vertices representing edges), return a - list of detected cycles - ''' - if not graph_dict: - return () - result = [] - if vertices is None: - vertices = graph_dict.keys() - for vertice in vertices: - _get_cycles(graph_dict, vertice, [], result) - return result - -def _get_cycles(graph_dict, vertice=None, path=None, result=None): - """recursive function doing the real work for get_cycles""" - if vertice in path: - cycle = [vertice] - for node in path[::-1]: - if node == vertice: - break - cycle.insert(0, node) - # make a canonical representation - start_from = min(cycle) - index = cycle.index(start_from) - cycle = cycle[index:] + cycle[0:index] - # append it to result if not already in - if not cycle in result: - result.append(cycle) - return - path.append(vertice) - try: - for node in graph_dict[vertice]: - _get_cycles(graph_dict, node, path, result) - except KeyError: - pass - path.pop() - -def has_path(graph_dict, fromnode, tonode, path=None): - """generic function taking a simple graph definition as a dictionary, with - node has key associated to a list of nodes directly reachable from it. - - Return None if no path exists to go from `fromnode` to `tonode`, else the - first path found (as a list including the destination node at last) - """ - if path is None: - path = [] - elif fromnode in path: - return None - path.append(fromnode) - for destnode in graph_dict[fromnode]: - if destnode == tonode or has_path(graph_dict, destnode, tonode, path): - return path[1:] + [tonode] - path.pop() - return None - diff --git a/pylibs/logilab/common/hg.py b/pylibs/logilab/common/hg.py deleted file mode 100644 index edf2d3be..00000000 --- a/pylibs/logilab/common/hg.py +++ /dev/null @@ -1,130 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""mercurial utilities (mercurial should be installed)""" - -__docformat__ = "restructuredtext en" - -import os -import sys -import os.path as osp - -try: - from mercurial.error import RepoError - from mercurial.__version__ import version as hg_version -except ImportError: - from mercurial.repo import RepoError - from mercurial.version import get_version - hg_version = get_version() - -from mercurial.hg import repository as Repository -from mercurial.ui import ui as Ui -from mercurial.node import short -try: - # mercurial >= 1.2 (?) - from mercurial.cmdutil import walkchangerevs -except ImportError, ex: - from mercurial.commands import walkchangerevs -try: - # mercurial >= 1.1 (.1?) - from mercurial.util import cachefunc -except ImportError, ex: - def cachefunc(func): - return func -try: - # mercurial >= 1.3.1 - from mercurial import encoding - _encoding = encoding.encoding -except ImportError: - try: - from mercurial.util import _encoding - except ImportError: - import locale - # stay compatible with mercurial 0.9.1 (etch debian release) - # (borrowed from mercurial.util 1.1.2) - try: - _encoding = os.environ.get("HGENCODING") - if sys.platform == 'darwin' and not _encoding: - # On darwin, getpreferredencoding ignores the locale environment and - # always returns mac-roman. We override this if the environment is - # not C (has been customized by the user). - locale.setlocale(locale.LC_CTYPE, '') - _encoding = locale.getlocale()[1] - if not _encoding: - _encoding = locale.getpreferredencoding() or 'ascii' - except locale.Error: - _encoding = 'ascii' -try: - # demandimport causes problems when activated, ensure it isn't - # XXX put this in apycot where the pb has been noticed? - from mercurial import demandimport - demandimport.disable() -except: - pass - -Ui.warn = lambda *args, **kwargs: 0 # make it quiet - -def find_repository(path): - """returns 's mercurial repository - - None if is not under hg control - """ - path = osp.realpath(osp.abspath(path)) - while not osp.isdir(osp.join(path, ".hg")): - oldpath = path - path = osp.dirname(path) - if path == oldpath: - return None - return path - - -def get_repository(path): - """Simple function that open a hg repository""" - repopath = find_repository(path) - if repopath is None: - raise RuntimeError('no repository found in %s' % osp.abspath(path)) - return Repository(Ui(), path=repopath) - -def incoming(wdrepo, masterrepo): - try: - return wdrepo.findincoming(masterrepo) - except AttributeError: - from mercurial import hg, discovery - revs, checkout = hg.addbranchrevs(wdrepo, masterrepo, ('', []), None) - common, incoming, rheads = discovery.findcommonincoming( - wdrepo, masterrepo, heads=revs) - if not masterrepo.local(): - from mercurial import bundlerepo, changegroup - if revs is None and masterrepo.capable('changegroupsubset'): - revs = rheads - if revs is None: - cg = masterrepo.changegroup(incoming, "incoming") - else: - cg = masterrepo.changegroupsubset(incoming, revs, 'incoming') - fname = changegroup.writebundle(cg, None, "HG10UN") - # use the created uncompressed bundlerepo - masterrepo = bundlerepo.bundlerepository(wdrepo.ui, wdrepo.root, fname) - return masterrepo.changelog.nodesbetween(incoming, revs)[0] - -def outgoing(wdrepo, masterrepo): - try: - return wdrepo.findoutgoing(masterrepo) - except AttributeError: - from mercurial import hg, discovery - revs, checkout = hg.addbranchrevs(wdrepo, wdrepo, ('', []), None) - o = discovery.findoutgoing(wdrepo, masterrepo) - return wdrepo.changelog.nodesbetween(o, revs)[0] diff --git a/pylibs/logilab/common/interface.py b/pylibs/logilab/common/interface.py deleted file mode 100644 index 3ea4ab7e..00000000 --- a/pylibs/logilab/common/interface.py +++ /dev/null @@ -1,71 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Bases class for interfaces to provide 'light' interface handling. - - TODO: - _ implements a check method which check that an object implements the - interface - _ Attribute objects - - This module requires at least python 2.2 -""" -__docformat__ = "restructuredtext en" - - -class Interface(object): - """Base class for interfaces.""" - def is_implemented_by(cls, instance): - return implements(instance, cls) - is_implemented_by = classmethod(is_implemented_by) - - -def implements(obj, interface): - """Return true if the give object (maybe an instance or class) implements - the interface. - """ - kimplements = getattr(obj, '__implements__', ()) - if not isinstance(kimplements, (list, tuple)): - kimplements = (kimplements,) - for implementedinterface in kimplements: - if issubclass(implementedinterface, interface): - return True - return False - - -def extend(klass, interface, _recurs=False): - """Add interface to klass'__implements__ if not already implemented in. - - If klass is subclassed, ensure subclasses __implements__ it as well. - - NOTE: klass should be e new class. - """ - if not implements(klass, interface): - try: - kimplements = klass.__implements__ - kimplementsklass = type(kimplements) - kimplements = list(kimplements) - except AttributeError: - kimplementsklass = tuple - kimplements = [] - kimplements.append(interface) - klass.__implements__ = kimplementsklass(kimplements) - for subklass in klass.__subclasses__(): - extend(subklass, interface, _recurs=True) - elif _recurs: - for subklass in klass.__subclasses__(): - extend(subklass, interface, _recurs=True) diff --git a/pylibs/logilab/common/logging_ext.py b/pylibs/logilab/common/logging_ext.py deleted file mode 100644 index 1b7a1e60..00000000 --- a/pylibs/logilab/common/logging_ext.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Extends the logging module from the standard library.""" - -__docformat__ = "restructuredtext en" - -import os -import sys -import logging - -from logilab.common.textutils import colorize_ansi - - -def set_log_methods(cls, logger): - """bind standard logger's methods as methods on the class""" - cls.__logger = logger - for attr in ('debug', 'info', 'warning', 'error', 'critical', 'exception'): - setattr(cls, attr, getattr(logger, attr)) - - -def xxx_cyan(record): - if 'XXX' in record.message: - return 'cyan' - -class ColorFormatter(logging.Formatter): - """ - A color Formatter for the logging standard module. - - By default, colorize CRITICAL and ERROR in red, WARNING in orange, INFO in - green and DEBUG in yellow. - - self.colors is customizable via the 'color' constructor argument (dictionary). - - self.colorfilters is a list of functions that get the LogRecord - and return a color name or None. - """ - - def __init__(self, fmt=None, datefmt=None, colors=None): - logging.Formatter.__init__(self, fmt, datefmt) - self.colorfilters = [] - self.colors = {'CRITICAL': 'red', - 'ERROR': 'red', - 'WARNING': 'magenta', - 'INFO': 'green', - 'DEBUG': 'yellow', - } - if colors is not None: - assert isinstance(colors, dict) - self.colors.update(colors) - - def format(self, record): - msg = logging.Formatter.format(self, record) - if record.levelname in self.colors: - color = self.colors[record.levelname] - return colorize_ansi(msg, color) - else: - for cf in self.colorfilters: - color = cf(record) - if color: - return colorize_ansi(msg, color) - return msg - -def set_color_formatter(logger=None, **kw): - """ - Install a color formatter on the 'logger'. If not given, it will - defaults to the default logger. - - Any additional keyword will be passed as-is to the ColorFormatter - constructor. - """ - if logger is None: - logger = logging.getLogger() - if not logger.handlers: - logging.basicConfig() - format_msg = logger.handlers[0].formatter._fmt - fmt = ColorFormatter(format_msg, **kw) - fmt.colorfilters.append(xxx_cyan) - logger.handlers[0].setFormatter(fmt) - - -LOG_FORMAT = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s' -LOG_DATE_FORMAT = '%Y-%m-%d %H:%M:%S' - -def get_handler(debug=False, syslog=False, logfile=None, rotation_parameters=None): - """get an apropriate handler according to given parameters""" - if os.environ.get('APYCOT_ROOT'): - handler = logging.StreamHandler(sys.stdout) - if debug: - handler = logging.StreamHandler() - elif logfile is None: - if syslog: - from logging import handlers - handler = handlers.SysLogHandler() - else: - handler = logging.StreamHandler() - else: - try: - if rotation_parameters is None: - handler = logging.FileHandler(logfile) - else: - from logging.handlers import TimedRotatingFileHandler - handler = TimedRotatingFileHandler( - logfile, **rotation_parameters) - except IOError: - handler = logging.StreamHandler() - return handler - -def get_threshold(debug=False, logthreshold=None): - if logthreshold is None: - if debug: - logthreshold = logging.DEBUG - else: - logthreshold = logging.ERROR - elif isinstance(logthreshold, basestring): - logthreshold = getattr(logging, THRESHOLD_MAP.get(logthreshold, - logthreshold)) - return logthreshold - -def get_formatter(logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT): - isatty = hasattr(sys.__stdout__, 'isatty') and sys.__stdout__.isatty() - if isatty and sys.platform != 'win32': - fmt = ColorFormatter(logformat, logdateformat) - def col_fact(record): - if 'XXX' in record.message: - return 'cyan' - if 'kick' in record.message: - return 'red' - fmt.colorfilters.append(col_fact) - else: - fmt = logging.Formatter(logformat, logdateformat) - return fmt - -def init_log(debug=False, syslog=False, logthreshold=None, logfile=None, - logformat=LOG_FORMAT, logdateformat=LOG_DATE_FORMAT, fmt=None, - rotation_parameters=None, handler=None): - """init the log service""" - logger = logging.getLogger() - if handler is None: - handler = get_handler(debug, syslog, logfile, rotation_parameters) - # only addHandler and removeHandler method while I would like a setHandler - # method, so do it this way :$ - logger.handlers = [handler] - logthreshold = get_threshold(debug, logthreshold) - logger.setLevel(logthreshold) - if fmt is None: - if debug: - fmt = get_formatter(logformat=logformat, logdateformat=logdateformat) - else: - fmt = logging.Formatter(logformat, logdateformat) - handler.setFormatter(fmt) - return handler - -# map logilab.common.logger thresholds to logging thresholds -THRESHOLD_MAP = {'LOG_DEBUG': 'DEBUG', - 'LOG_INFO': 'INFO', - 'LOG_NOTICE': 'INFO', - 'LOG_WARN': 'WARNING', - 'LOG_WARNING': 'WARNING', - 'LOG_ERR': 'ERROR', - 'LOG_ERROR': 'ERROR', - 'LOG_CRIT': 'CRITICAL', - } diff --git a/pylibs/logilab/common/optik_ext.py b/pylibs/logilab/common/optik_ext.py deleted file mode 100644 index 39bbe18d..00000000 --- a/pylibs/logilab/common/optik_ext.py +++ /dev/null @@ -1,397 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Add an abstraction level to transparently import optik classes from optparse -(python >= 2.3) or the optik package. - -It also defines three new types for optik/optparse command line parser : - - * regexp - argument of this type will be converted using re.compile - * csv - argument of this type will be converted using split(',') - * yn - argument of this type will be true if 'y' or 'yes', false if 'n' or 'no' - * named - argument of this type are in the form = or : - * password - argument of this type wont be converted but this is used by other tools - such as interactive prompt for configuration to double check value and - use an invisible field - * multiple_choice - same as default "choice" type but multiple choices allowed - * file - argument of this type wont be converted but checked that the given file exists - * color - argument of this type wont be converted but checked its either a - named color or a color specified using hexadecimal notation (preceded by a #) - * time - argument of this type will be converted to a float value in seconds - according to time units (ms, s, min, h, d) - * bytes - argument of this type will be converted to a float value in bytes - according to byte units (b, kb, mb, gb, tb) -""" -__docformat__ = "restructuredtext en" - -import re -import sys -import time -from copy import copy -from os.path import exists - -# python >= 2.3 -from optparse import OptionParser as BaseParser, Option as BaseOption, \ - OptionGroup, OptionContainer, OptionValueError, OptionError, \ - Values, HelpFormatter, NO_DEFAULT, SUPPRESS_HELP - -try: - from mx import DateTime - HAS_MX_DATETIME = True -except ImportError: - HAS_MX_DATETIME = False - - -OPTPARSE_FORMAT_DEFAULT = sys.version_info >= (2, 4) - -from logilab.common.textutils import splitstrip - -def check_regexp(option, opt, value): - """check a regexp value by trying to compile it - return the compiled regexp - """ - if hasattr(value, 'pattern'): - return value - try: - return re.compile(value) - except ValueError: - raise OptionValueError( - "option %s: invalid regexp value: %r" % (opt, value)) - -def check_csv(option, opt, value): - """check a csv value by trying to split it - return the list of separated values - """ - if isinstance(value, (list, tuple)): - return value - try: - return splitstrip(value) - except ValueError: - raise OptionValueError( - "option %s: invalid csv value: %r" % (opt, value)) - -def check_yn(option, opt, value): - """check a yn value - return true for yes and false for no - """ - if isinstance(value, int): - return bool(value) - if value in ('y', 'yes'): - return True - if value in ('n', 'no'): - return False - msg = "option %s: invalid yn value %r, should be in (y, yes, n, no)" - raise OptionValueError(msg % (opt, value)) - -def check_named(option, opt, value): - """check a named value - return a dictionary containing (name, value) associations - """ - if isinstance(value, dict): - return value - values = [] - for value in check_csv(option, opt, value): - if value.find('=') != -1: - values.append(value.split('=', 1)) - elif value.find(':') != -1: - values.append(value.split(':', 1)) - if values: - return dict(values) - msg = "option %s: invalid named value %r, should be = or \ -:" - raise OptionValueError(msg % (opt, value)) - -def check_password(option, opt, value): - """check a password value (can't be empty) - """ - # no actual checking, monkey patch if you want more - return value - -def check_file(option, opt, value): - """check a file value - return the filepath - """ - if exists(value): - return value - msg = "option %s: file %r does not exist" - raise OptionValueError(msg % (opt, value)) - -# XXX use python datetime -def check_date(option, opt, value): - """check a file value - return the filepath - """ - try: - return DateTime.strptime(value, "%Y/%m/%d") - except DateTime.Error : - raise OptionValueError( - "expected format of %s is yyyy/mm/dd" % opt) - -def check_color(option, opt, value): - """check a color value and returns it - /!\ does *not* check color labels (like 'red', 'green'), only - checks hexadecimal forms - """ - # Case (1) : color label, we trust the end-user - if re.match('[a-z0-9 ]+$', value, re.I): - return value - # Case (2) : only accepts hexadecimal forms - if re.match('#[a-f0-9]{6}', value, re.I): - return value - # Else : not a color label neither a valid hexadecimal form => error - msg = "option %s: invalid color : %r, should be either hexadecimal \ - value or predefined color" - raise OptionValueError(msg % (opt, value)) - -def check_time(option, opt, value): - from logilab.common.textutils import TIME_UNITS, apply_units - if isinstance(value, (int, long, float)): - return value - return apply_units(value, TIME_UNITS) - -def check_bytes(option, opt, value): - from logilab.common.textutils import BYTE_UNITS, apply_units - if hasattr(value, '__int__'): - return value - return apply_units(value, BYTE_UNITS) - -import types - -class Option(BaseOption): - """override optik.Option to add some new option types - """ - TYPES = BaseOption.TYPES + ('regexp', 'csv', 'yn', 'named', 'password', - 'multiple_choice', 'file', 'color', - 'time', 'bytes') - ATTRS = BaseOption.ATTRS + ['hide', 'level'] - TYPE_CHECKER = copy(BaseOption.TYPE_CHECKER) - TYPE_CHECKER['regexp'] = check_regexp - TYPE_CHECKER['csv'] = check_csv - TYPE_CHECKER['yn'] = check_yn - TYPE_CHECKER['named'] = check_named - TYPE_CHECKER['multiple_choice'] = check_csv - TYPE_CHECKER['file'] = check_file - TYPE_CHECKER['color'] = check_color - TYPE_CHECKER['password'] = check_password - TYPE_CHECKER['time'] = check_time - TYPE_CHECKER['bytes'] = check_bytes - if HAS_MX_DATETIME: - TYPES += ('date',) - TYPE_CHECKER['date'] = check_date - - def __init__(self, *opts, **attrs): - BaseOption.__init__(self, *opts, **attrs) - if hasattr(self, "hide") and self.hide: - self.help = SUPPRESS_HELP - - def _check_choice(self): - """FIXME: need to override this due to optik misdesign""" - if self.type in ("choice", "multiple_choice"): - if self.choices is None: - raise OptionError( - "must supply a list of choices for type 'choice'", self) - elif type(self.choices) not in (types.TupleType, types.ListType): - raise OptionError( - "choices must be a list of strings ('%s' supplied)" - % str(type(self.choices)).split("'")[1], self) - elif self.choices is not None: - raise OptionError( - "must not supply choices for type %r" % self.type, self) - BaseOption.CHECK_METHODS[2] = _check_choice - - - def process(self, opt, value, values, parser): - # First, convert the value(s) to the right type. Howl if any - # value(s) are bogus. - try: - value = self.convert_value(opt, value) - except AttributeError: # py < 2.4 - value = self.check_value(opt, value) - if self.type == 'named': - existant = getattr(values, self.dest) - if existant: - existant.update(value) - value = existant - # And then take whatever action is expected of us. - # This is a separate method to make life easier for - # subclasses to add new actions. - return self.take_action( - self.action, self.dest, opt, value, values, parser) - - -class OptionParser(BaseParser): - """override optik.OptionParser to use our Option class - """ - def __init__(self, option_class=Option, *args, **kwargs): - BaseParser.__init__(self, option_class=Option, *args, **kwargs) - - def format_option_help(self, formatter=None): - if formatter is None: - formatter = self.formatter - outputlevel = getattr(formatter, 'output_level', 0) - formatter.store_option_strings(self) - result = [] - result.append(formatter.format_heading("Options")) - formatter.indent() - if self.option_list: - result.append(OptionContainer.format_option_help(self, formatter)) - result.append("\n") - for group in self.option_groups: - if group.level <= outputlevel and ( - group.description or level_options(group, outputlevel)): - result.append(group.format_help(formatter)) - result.append("\n") - formatter.dedent() - # Drop the last "\n", or the header if no options or option groups: - return "".join(result[:-1]) - - -OptionGroup.level = 0 - -def level_options(group, outputlevel): - return [option for option in group.option_list - if (getattr(option, 'level', 0) or 0) <= outputlevel - and not option.help is SUPPRESS_HELP] - -def format_option_help(self, formatter): - result = [] - outputlevel = getattr(formatter, 'output_level', 0) or 0 - for option in level_options(self, outputlevel): - result.append(formatter.format_option(option)) - return "".join(result) -OptionContainer.format_option_help = format_option_help - - -class ManHelpFormatter(HelpFormatter): - """Format help using man pages ROFF format""" - - def __init__ (self, - indent_increment=0, - max_help_position=24, - width=79, - short_first=0): - HelpFormatter.__init__ ( - self, indent_increment, max_help_position, width, short_first) - - def format_heading(self, heading): - return '.SH %s\n' % heading.upper() - - def format_description(self, description): - return description - - def format_option(self, option): - try: - optstring = option.option_strings - except AttributeError: - optstring = self.format_option_strings(option) - if option.help: - help_text = self.expand_default(option) - help = ' '.join([l.strip() for l in help_text.splitlines()]) - else: - help = '' - return '''.IP "%s" -%s -''' % (optstring, help) - - def format_head(self, optparser, pkginfo, section=1): - long_desc = "" - try: - pgm = optparser._get_prog_name() - except AttributeError: - # py >= 2.4.X (dunno which X exactly, at least 2) - pgm = optparser.get_prog_name() - short_desc = self.format_short_description(pgm, pkginfo.description) - if hasattr(pkginfo, "long_desc"): - long_desc = self.format_long_description(pgm, pkginfo.long_desc) - return '%s\n%s\n%s\n%s' % (self.format_title(pgm, section), - short_desc, self.format_synopsis(pgm), - long_desc) - - def format_title(self, pgm, section): - date = '-'.join([str(num) for num in time.localtime()[:3]]) - return '.TH %s %s "%s" %s' % (pgm, section, date, pgm) - - def format_short_description(self, pgm, short_desc): - return '''.SH NAME -.B %s -\- %s -''' % (pgm, short_desc.strip()) - - def format_synopsis(self, pgm): - return '''.SH SYNOPSIS -.B %s -[ -.I OPTIONS -] [ -.I -] -''' % pgm - - def format_long_description(self, pgm, long_desc): - long_desc = '\n'.join([line.lstrip() - for line in long_desc.splitlines()]) - long_desc = long_desc.replace('\n.\n', '\n\n') - if long_desc.lower().startswith(pgm): - long_desc = long_desc[len(pgm):] - return '''.SH DESCRIPTION -.B %s -%s -''' % (pgm, long_desc.strip()) - - def format_tail(self, pkginfo): - tail = '''.SH SEE ALSO -/usr/share/doc/pythonX.Y-%s/ - -.SH BUGS -Please report bugs on the project\'s mailing list: -%s - -.SH AUTHOR -%s <%s> -''' % (getattr(pkginfo, 'debian_name', pkginfo.modname), - pkginfo.mailinglist, pkginfo.author, pkginfo.author_email) - - if hasattr(pkginfo, "copyright"): - tail += ''' -.SH COPYRIGHT -%s -''' % pkginfo.copyright - - return tail - -def generate_manpage(optparser, pkginfo, section=1, stream=sys.stdout, level=0): - """generate a man page from an optik parser""" - formatter = ManHelpFormatter() - formatter.output_level = level - formatter.parser = optparser - print >> stream, formatter.format_head(optparser, pkginfo, section) - print >> stream, optparser.format_option_help(formatter) - print >> stream, formatter.format_tail(pkginfo) - - -__all__ = ('OptionParser', 'Option', 'OptionGroup', 'OptionValueError', - 'Values') diff --git a/pylibs/logilab/common/optparser.py b/pylibs/logilab/common/optparser.py deleted file mode 100644 index 0263dab6..00000000 --- a/pylibs/logilab/common/optparser.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Extend OptionParser with commands. - -Example: - ->>> parser = OptionParser() ->>> parser.usage = '%prog COMMAND [options] ...' ->>> parser.add_command('build', 'mymod.build') ->>> parser.add_command('clean', run_clean, add_opt_clean) ->>> run, options, args = parser.parse_command(sys.argv[1:]) ->>> return run(options, args[1:]) - -With mymod.build that defines two functions run and add_options -""" -__docformat__ = "restructuredtext en" - -from warnings import warn -warn('lgc.optparser module is deprecated, use lgc.clcommands instead', DeprecationWarning, - stacklevel=2) - -import sys -import optparse - -class OptionParser(optparse.OptionParser): - - def __init__(self, *args, **kwargs): - optparse.OptionParser.__init__(self, *args, **kwargs) - self._commands = {} - self.min_args, self.max_args = 0, 1 - - def add_command(self, name, mod_or_funcs, help=''): - """name of the command, name of module or tuple of functions - (run, add_options) - """ - assert isinstance(mod_or_funcs, str) or isinstance(mod_or_funcs, tuple), \ - "mod_or_funcs has to be a module name or a tuple of functions" - self._commands[name] = (mod_or_funcs, help) - - def print_main_help(self): - optparse.OptionParser.print_help(self) - print '\ncommands:' - for cmdname, (_, help) in self._commands.items(): - print '% 10s - %s' % (cmdname, help) - - def parse_command(self, args): - if len(args) == 0: - self.print_main_help() - sys.exit(1) - cmd = args[0] - args = args[1:] - if cmd not in self._commands: - if cmd in ('-h', '--help'): - self.print_main_help() - sys.exit(0) - elif self.version is not None and cmd == "--version": - self.print_version() - sys.exit(0) - self.error('unknown command') - self.prog = '%s %s' % (self.prog, cmd) - mod_or_f, help = self._commands[cmd] - # optparse inserts self.description between usage and options help - self.description = help - if isinstance(mod_or_f, str): - exec 'from %s import run, add_options' % mod_or_f - else: - run, add_options = mod_or_f - add_options(self) - (options, args) = self.parse_args(args) - if not (self.min_args <= len(args) <= self.max_args): - self.error('incorrect number of arguments') - return run, options, args - - diff --git a/pylibs/logilab/common/pdf_ext.py b/pylibs/logilab/common/pdf_ext.py deleted file mode 100644 index 71c483b2..00000000 --- a/pylibs/logilab/common/pdf_ext.py +++ /dev/null @@ -1,111 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Manipulate pdf and fdf files (pdftk recommended). - -Notes regarding pdftk, pdf forms and fdf files (form definition file) -fields names can be extracted with: - - pdftk orig.pdf generate_fdf output truc.fdf - -to merge fdf and pdf: - - pdftk orig.pdf fill_form test.fdf output result.pdf [flatten] - -without flatten, one could further edit the resulting form. -with flatten, everything is turned into text. - - - - -""" -__docformat__ = "restructuredtext en" -# XXX seems very unix specific -# TODO: check availability of pdftk at import - - -import os - -HEAD="""%FDF-1.2 -%\xE2\xE3\xCF\xD3 -1 0 obj -<< -/FDF -<< -/Fields [ -""" - -TAIL="""] ->> ->> -endobj -trailer - -<< -/Root 1 0 R ->> -%%EOF -""" - -def output_field( f ): - return "\xfe\xff" + "".join( [ "\x00"+c for c in f ] ) - -def extract_keys(lines): - keys = [] - for line in lines: - if line.startswith('/V'): - pass #print 'value',line - elif line.startswith('/T'): - key = line[7:-2] - key = ''.join(key.split('\x00')) - keys.append( key ) - return keys - -def write_field(out, key, value): - out.write("<<\n") - if value: - out.write("/V (%s)\n" %value) - else: - out.write("/V /\n") - out.write("/T (%s)\n" % output_field(key) ) - out.write(">> \n") - -def write_fields(out, fields): - out.write(HEAD) - for (key, value, comment) in fields: - write_field(out, key, value) - write_field(out, key+"a", value) # pour copie-carbone sur autres pages - out.write(TAIL) - -def extract_keys_from_pdf(filename): - # what about using 'pdftk filename dump_data_fields' and parsing the output ? - os.system('pdftk %s generate_fdf output /tmp/toto.fdf' % filename) - lines = file('/tmp/toto.fdf').readlines() - return extract_keys(lines) - - -def fill_pdf(infile, outfile, fields): - write_fields(file('/tmp/toto.fdf', 'w'), fields) - os.system('pdftk %s fill_form /tmp/toto.fdf output %s flatten' % (infile, outfile)) - -def testfill_pdf(infile, outfile): - keys = extract_keys_from_pdf(infile) - fields = [] - for key in keys: - fields.append( (key, key, '') ) - fill_pdf(infile, outfile, fields) - diff --git a/pylibs/logilab/common/proc.py b/pylibs/logilab/common/proc.py deleted file mode 100644 index c27356c6..00000000 --- a/pylibs/logilab/common/proc.py +++ /dev/null @@ -1,277 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""module providing: -* process information (linux specific: rely on /proc) -* a class for resource control (memory / time / cpu time) - -This module doesn't work on windows platforms (only tested on linux) - -:organization: Logilab - - - -""" -__docformat__ = "restructuredtext en" - -import os -import stat -from resource import getrlimit, setrlimit, RLIMIT_CPU, RLIMIT_AS -from signal import signal, SIGXCPU, SIGKILL, SIGUSR2, SIGUSR1 -from threading import Timer, currentThread, Thread, Event -from time import time - -from logilab.common.tree import Node - -class NoSuchProcess(Exception): pass - -def proc_exists(pid): - """check the a pid is registered in /proc - raise NoSuchProcess exception if not - """ - if not os.path.exists('/proc/%s' % pid): - raise NoSuchProcess() - -PPID = 3 -UTIME = 13 -STIME = 14 -CUTIME = 15 -CSTIME = 16 -VSIZE = 22 - -class ProcInfo(Node): - """provide access to process information found in /proc""" - - def __init__(self, pid): - self.pid = int(pid) - Node.__init__(self, self.pid) - proc_exists(self.pid) - self.file = '/proc/%s/stat' % self.pid - self.ppid = int(self.status()[PPID]) - - def memory_usage(self): - """return the memory usage of the process in Ko""" - try : - return int(self.status()[VSIZE]) - except IOError: - return 0 - - def lineage_memory_usage(self): - return self.memory_usage() + sum([child.lineage_memory_usage() - for child in self.children]) - - def time(self, children=0): - """return the number of jiffies that this process has been scheduled - in user and kernel mode""" - status = self.status() - time = int(status[UTIME]) + int(status[STIME]) - if children: - time += int(status[CUTIME]) + int(status[CSTIME]) - return time - - def status(self): - """return the list of fields found in /proc//stat""" - return open(self.file).read().split() - - def name(self): - """return the process name found in /proc//stat - """ - return self.status()[1].strip('()') - - def age(self): - """return the age of the process - """ - return os.stat(self.file)[stat.ST_MTIME] - -class ProcInfoLoader: - """manage process information""" - - def __init__(self): - self._loaded = {} - - def list_pids(self): - """return a list of existent process ids""" - for subdir in os.listdir('/proc'): - if subdir.isdigit(): - yield int(subdir) - - def load(self, pid): - """get a ProcInfo object for a given pid""" - pid = int(pid) - try: - return self._loaded[pid] - except KeyError: - procinfo = ProcInfo(pid) - procinfo.manager = self - self._loaded[pid] = procinfo - return procinfo - - - def load_all(self): - """load all processes information""" - for pid in self.list_pids(): - try: - procinfo = self.load(pid) - if procinfo.parent is None and procinfo.ppid: - pprocinfo = self.load(procinfo.ppid) - pprocinfo.append(procinfo) - except NoSuchProcess: - pass - - -try: - class ResourceError(BaseException): - """Error raise when resource limit is reached""" - limit = "Unknown Resource Limit" -except NameError: - class ResourceError(Exception): - """Error raise when resource limit is reached""" - limit = "Unknown Resource Limit" - - -class XCPUError(ResourceError): - """Error raised when CPU Time limit is reached""" - limit = "CPU Time" - -class LineageMemoryError(ResourceError): - """Error raised when the total amount of memory used by a process and - it's child is reached""" - limit = "Lineage total Memory" - -class TimeoutError(ResourceError): - """Error raised when the process is running for to much time""" - limit = "Real Time" - -# Can't use subclass because the StandardError MemoryError raised -RESOURCE_LIMIT_EXCEPTION = (ResourceError, MemoryError) - - -class MemorySentinel(Thread): - """A class checking a process don't use too much memory in a separated - daemonic thread - """ - def __init__(self, interval, memory_limit, gpid=os.getpid()): - Thread.__init__(self, target=self._run, name="Test.Sentinel") - self.memory_limit = memory_limit - self._stop = Event() - self.interval = interval - self.setDaemon(True) - self.gpid = gpid - - def stop(self): - """stop ap""" - self._stop.set() - - def _run(self): - pil = ProcInfoLoader() - while not self._stop.isSet(): - if self.memory_limit <= pil.load(self.gpid).lineage_memory_usage(): - os.killpg(self.gpid, SIGUSR1) - self._stop.wait(self.interval) - - -class ResourceController: - - def __init__(self, max_cpu_time=None, max_time=None, max_memory=None, - max_reprieve=60): - if SIGXCPU == -1: - raise RuntimeError("Unsupported platform") - self.max_time = max_time - self.max_memory = max_memory - self.max_cpu_time = max_cpu_time - self._reprieve = max_reprieve - self._timer = None - self._msentinel = None - self._old_max_memory = None - self._old_usr1_hdlr = None - self._old_max_cpu_time = None - self._old_usr2_hdlr = None - self._old_sigxcpu_hdlr = None - self._limit_set = 0 - self._abort_try = 0 - self._start_time = None - self._elapse_time = 0 - - def _hangle_sig_timeout(self, sig, frame): - raise TimeoutError() - - def _hangle_sig_memory(self, sig, frame): - if self._abort_try < self._reprieve: - self._abort_try += 1 - raise LineageMemoryError("Memory limit reached") - else: - os.killpg(os.getpid(), SIGKILL) - - def _handle_sigxcpu(self, sig, frame): - if self._abort_try < self._reprieve: - self._abort_try += 1 - raise XCPUError("Soft CPU time limit reached") - else: - os.killpg(os.getpid(), SIGKILL) - - def _time_out(self): - if self._abort_try < self._reprieve: - self._abort_try += 1 - os.killpg(os.getpid(), SIGUSR2) - if self._limit_set > 0: - self._timer = Timer(1, self._time_out) - self._timer.start() - else: - os.killpg(os.getpid(), SIGKILL) - - def setup_limit(self): - """set up the process limit""" - assert currentThread().getName() == 'MainThread' - os.setpgrp() - if self._limit_set <= 0: - if self.max_time is not None: - self._old_usr2_hdlr = signal(SIGUSR2, self._hangle_sig_timeout) - self._timer = Timer(max(1, int(self.max_time) - self._elapse_time), - self._time_out) - self._start_time = int(time()) - self._timer.start() - if self.max_cpu_time is not None: - self._old_max_cpu_time = getrlimit(RLIMIT_CPU) - cpu_limit = (int(self.max_cpu_time), self._old_max_cpu_time[1]) - self._old_sigxcpu_hdlr = signal(SIGXCPU, self._handle_sigxcpu) - setrlimit(RLIMIT_CPU, cpu_limit) - if self.max_memory is not None: - self._msentinel = MemorySentinel(1, int(self.max_memory) ) - self._old_max_memory = getrlimit(RLIMIT_AS) - self._old_usr1_hdlr = signal(SIGUSR1, self._hangle_sig_memory) - as_limit = (int(self.max_memory), self._old_max_memory[1]) - setrlimit(RLIMIT_AS, as_limit) - self._msentinel.start() - self._limit_set += 1 - - def clean_limit(self): - """reinstall the old process limit""" - if self._limit_set > 0: - if self.max_time is not None: - self._timer.cancel() - self._elapse_time += int(time())-self._start_time - self._timer = None - signal(SIGUSR2, self._old_usr2_hdlr) - if self.max_cpu_time is not None: - setrlimit(RLIMIT_CPU, self._old_max_cpu_time) - signal(SIGXCPU, self._old_sigxcpu_hdlr) - if self.max_memory is not None: - self._msentinel.stop() - self._msentinel = None - setrlimit(RLIMIT_AS, self._old_max_memory) - signal(SIGUSR1, self._old_usr1_hdlr) - self._limit_set -= 1 diff --git a/pylibs/logilab/common/pyro_ext.py b/pylibs/logilab/common/pyro_ext.py deleted file mode 100644 index 0f4d2790..00000000 --- a/pylibs/logilab/common/pyro_ext.py +++ /dev/null @@ -1,180 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Python Remote Object utilities - -Main functions available: - -* `register_object` to expose arbitrary object through pyro using delegation - approach and register it in the nameserver. -* `ns_unregister` unregister an object identifier from the nameserver. -* `ns_get_proxy` get a pyro proxy from a nameserver object identifier. -""" - -__docformat__ = "restructuredtext en" - -import logging -import tempfile - -from Pyro import core, naming, errors, util, config - -_LOGGER = logging.getLogger('pyro') -_MARKER = object() - -config.PYRO_STORAGE = tempfile.gettempdir() - -def ns_group_and_id(idstr, defaultnsgroup=_MARKER): - try: - nsgroup, nsid = idstr.rsplit('.', 1) - except ValueError: - if defaultnsgroup is _MARKER: - nsgroup = config.PYRO_NS_DEFAULTGROUP - else: - nsgroup = defaultnsgroup - nsid = idstr - if nsgroup is not None and not nsgroup.startswith(':'): - nsgroup = ':' + nsgroup - return nsgroup, nsid - -def host_and_port(hoststr): - if not hoststr: - return None, None - try: - hoststr, port = hoststr.split(':') - except ValueError: - port = None - else: - port = int(port) - return hoststr, port - -_DAEMONS = {} -_PYRO_OBJS = {} -def _get_daemon(daemonhost, start=True): - if not daemonhost in _DAEMONS: - if not start: - raise Exception('no daemon for %s' % daemonhost) - if not _DAEMONS: - core.initServer(banner=0) - host, port = host_and_port(daemonhost) - daemon = core.Daemon(host=host, port=port) - _DAEMONS[daemonhost] = daemon - return _DAEMONS[daemonhost] - - -def locate_ns(nshost): - """locate and return the pyro name server to the daemon""" - core.initClient(banner=False) - return naming.NameServerLocator().getNS(*host_and_port(nshost)) - - -def register_object(object, nsid, defaultnsgroup=_MARKER, - daemonhost=None, nshost=None, use_pyrons=True): - """expose the object as a pyro object and register it in the name-server - - if use_pyrons is False, then the object is exposed, but no - attempt to register it to a pyro nameserver is made. - - return the pyro daemon object - """ - nsgroup, nsid = ns_group_and_id(nsid, defaultnsgroup) - daemon = _get_daemon(daemonhost) - if use_pyrons: - nsd = locate_ns(nshost) - # make sure our namespace group exists - try: - nsd.createGroup(nsgroup) - except errors.NamingError: - pass - daemon.useNameServer(nsd) - # use Delegation approach - impl = core.ObjBase() - impl.delegateTo(object) - qnsid = '%s.%s' % (nsgroup, nsid) - uri = daemon.connect(impl, qnsid) - _PYRO_OBJS[qnsid] = str(uri) - _LOGGER.info('registered %s a pyro object using group %s and id %s', - object, nsgroup, nsid) - return daemon - -def get_object_uri(qnsid): - return _PYRO_OBJS[qnsid] - -def ns_unregister(nsid, defaultnsgroup=_MARKER, nshost=None): - """unregister the object with the given nsid from the pyro name server""" - nsgroup, nsid = ns_group_and_id(nsid, defaultnsgroup) - try: - nsd = locate_ns(nshost) - except errors.PyroError, ex: - # name server not responding - _LOGGER.error('can\'t locate pyro name server: %s', ex) - else: - try: - nsd.unregister('%s.%s' % (nsgroup, nsid)) - _LOGGER.info('%s unregistered from pyro name server', nsid) - except errors.NamingError: - _LOGGER.warning('%s not registered in pyro name server', nsid) - - -def ns_reregister(nsid, defaultnsgroup=_MARKER, nshost=None): - """reregister a pyro object into the name server. You only have to specify - the name-server id of the object (though you MUST have gone through - `register_object` for the given object previously). - - This is especially useful for long running server while the name server may - have been restarted, and its records lost. - """ - nsgroup, nsid = ns_group_and_id(nsid, defaultnsgroup) - qnsid = '%s.%s' % (nsgroup, nsid) - nsd = locate_ns(nshost) - try: - nsd.unregister(qnsid) - except errors.NamingError: - # make sure our namespace group exists - try: - nsd.createGroup(nsgroup) - except errors.NamingError: - pass - nsd.register(qnsid, _PYRO_OBJS[qnsid]) - -def ns_get_proxy(nsid, defaultnsgroup=_MARKER, nshost=None): - """ - if nshost is None, the nameserver is found by a broadcast. - """ - # resolve the Pyro object - nsgroup, nsid = ns_group_and_id(nsid, defaultnsgroup) - try: - nsd = locate_ns(nshost) - pyrouri = nsd.resolve('%s.%s' % (nsgroup, nsid)) - except errors.ProtocolError, ex: - raise errors.PyroError( - 'Could not connect to the Pyro name server (host: %s)' % nshost) - except errors.NamingError: - raise errors.PyroError( - 'Could not get proxy for %s (not registered in Pyro), ' - 'you may have to restart your server-side application' % nsid) - return core.getProxyForURI(pyrouri) - -def get_proxy(pyro_uri): - """get a proxy for the passed pyro uri without using a nameserver - """ - return core.getProxyForURI(pyro_uri) - -def set_pyro_log_threshold(level): - pyrologger = logging.getLogger('Pyro.%s' % str(id(util.Log))) - # remove handlers so only the root handler is used - pyrologger.handlers = [] - pyrologger.setLevel(level) diff --git a/pylibs/logilab/common/pytest.py b/pylibs/logilab/common/pytest.py deleted file mode 100644 index 2d6ccf95..00000000 --- a/pylibs/logilab/common/pytest.py +++ /dev/null @@ -1,1177 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""pytest is a tool that eases test running and debugging. - -To be able to use pytest, you should either write tests using -the logilab.common.testlib's framework or the unittest module of the -Python's standard library. - -You can customize pytest's behaviour by defining a ``pytestconf.py`` file -somewhere in your test directory. In this file, you can add options or -change the way tests are run. - -To add command line options, you must define a ``update_parser`` function in -your ``pytestconf.py`` file. The function must accept a single parameter -that will be the OptionParser's instance to customize. - -If you wish to customize the tester, you'll have to define a class named -``CustomPyTester``. This class should extend the default `PyTester` class -defined in the pytest module. Take a look at the `PyTester` and `DjangoTester` -classes for more information about what can be done. - -For instance, if you wish to add a custom -l option to specify a loglevel, you -could define the following ``pytestconf.py`` file :: - - import logging - from logilab.common.pytest import PyTester - - def update_parser(parser): - parser.add_option('-l', '--loglevel', dest='loglevel', action='store', - choices=('debug', 'info', 'warning', 'error', 'critical'), - default='critical', help="the default log level possible choices are " - "('debug', 'info', 'warning', 'error', 'critical')") - return parser - - - class CustomPyTester(PyTester): - def __init__(self, cvg, options): - super(CustomPyTester, self).__init__(cvg, options) - loglevel = options.loglevel.upper() - logger = logging.getLogger('erudi') - logger.setLevel(logging.getLevelName(loglevel)) - - -In your TestCase class you can then get the value of a specific option with -the ``optval`` method:: - - class MyTestCase(TestCase): - def test_foo(self): - loglevel = self.optval('loglevel') - # ... - - -You can also tag your tag your test for fine filtering - -With those tag:: - - from logilab.common.testlib import tag, TestCase - - class Exemple(TestCase): - - @tag('rouge', 'carre') - def toto(self): - pass - - @tag('carre', 'vert') - def tata(self): - pass - - @tag('rouge') - def titi(test): - pass - -you can filter the function with a simple python expression - - * ``toto`` and ``titi`` match ``rouge`` - * ``toto``, ``tata`` and ``titi``, match ``rouge or carre`` - * ``tata`` and ``titi`` match``rouge ^ carre`` - * ``titi`` match ``rouge and not carre`` -""" -__docformat__ = "restructuredtext en" - -PYTEST_DOC = """%prog [OPTIONS] [testfile [testpattern]] - -examples: - -pytest path/to/mytests.py -pytest path/to/mytests.py TheseTests -pytest path/to/mytests.py TheseTests.test_thisone -pytest path/to/mytests.py -m '(not long and database) or regr' - -pytest one (will run both test_thisone and test_thatone) -pytest path/to/mytests.py -s not (will skip test_notthisone) - -pytest --coverage test_foo.py - (only if logilab.devtools is available) -""" - -ENABLE_DBC = False -FILE_RESTART = ".pytest.restart" - -import os, sys, re -import os.path as osp -from time import time, clock -import warnings -import types - -from logilab.common.fileutils import abspath_listdir -from logilab.common import textutils -from logilab.common import testlib, STD_BLACKLIST -# use the same unittest module as testlib -from logilab.common.testlib import unittest, start_interactive_mode -from logilab.common.compat import any -import doctest - -import unittest as unittest_legacy -if not getattr(unittest_legacy, "__package__", None): - try: - import unittest2.suite as unittest_suite - except ImportError: - sys.exit("You have to install python-unittest2 to use this module") -else: - import unittest.suite as unittest_suite - -try: - import django - from logilab.common.modutils import modpath_from_file, load_module_from_modpath - DJANGO_FOUND = True -except ImportError: - DJANGO_FOUND = False - -CONF_FILE = 'pytestconf.py' - -## coverage hacks, do not read this, do not read this, do not read this - -# hey, but this is an aspect, right ?!!! -class TraceController(object): - nesting = 0 - - def pause_tracing(cls): - if not cls.nesting: - cls.tracefunc = staticmethod(getattr(sys, '__settrace__', sys.settrace)) - cls.oldtracer = getattr(sys, '__tracer__', None) - sys.__notrace__ = True - cls.tracefunc(None) - cls.nesting += 1 - pause_tracing = classmethod(pause_tracing) - - def resume_tracing(cls): - cls.nesting -= 1 - assert cls.nesting >= 0 - if not cls.nesting: - cls.tracefunc(cls.oldtracer) - delattr(sys, '__notrace__') - resume_tracing = classmethod(resume_tracing) - - -pause_tracing = TraceController.pause_tracing -resume_tracing = TraceController.resume_tracing - - -def nocoverage(func): - if hasattr(func, 'uncovered'): - return func - func.uncovered = True - def not_covered(*args, **kwargs): - pause_tracing() - try: - return func(*args, **kwargs) - finally: - resume_tracing() - not_covered.uncovered = True - return not_covered - - -## end of coverage hacks - - -TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$") -def this_is_a_testfile(filename): - """returns True if `filename` seems to be a test file""" - return TESTFILE_RE.match(osp.basename(filename)) - -TESTDIR_RE = re.compile("^(unit)?tests?$") -def this_is_a_testdir(dirpath): - """returns True if `filename` seems to be a test directory""" - return TESTDIR_RE.match(osp.basename(dirpath)) - - -def load_pytest_conf(path, parser): - """loads a ``pytestconf.py`` file and update default parser - and / or tester. - """ - namespace = {} - execfile(path, namespace) - if 'update_parser' in namespace: - namespace['update_parser'](parser) - return namespace.get('CustomPyTester', PyTester) - - -def project_root(parser, projdir=os.getcwd()): - """try to find project's root and add it to sys.path""" - previousdir = curdir = osp.abspath(projdir) - testercls = PyTester - conf_file_path = osp.join(curdir, CONF_FILE) - if osp.isfile(conf_file_path): - testercls = load_pytest_conf(conf_file_path, parser) - while this_is_a_testdir(curdir) or \ - osp.isfile(osp.join(curdir, '__init__.py')): - newdir = osp.normpath(osp.join(curdir, os.pardir)) - if newdir == curdir: - break - previousdir = curdir - curdir = newdir - conf_file_path = osp.join(curdir, CONF_FILE) - if osp.isfile(conf_file_path): - testercls = load_pytest_conf(conf_file_path, parser) - return previousdir, testercls - - -class GlobalTestReport(object): - """this class holds global test statistics""" - def __init__(self): - self.ran = 0 - self.skipped = 0 - self.failures = 0 - self.errors = 0 - self.ttime = 0 - self.ctime = 0 - self.modulescount = 0 - self.errmodules = [] - - def feed(self, filename, testresult, ttime, ctime): - """integrates new test information into internal statistics""" - ran = testresult.testsRun - self.ran += ran - self.skipped += len(getattr(testresult, 'skipped', ())) - self.failures += len(testresult.failures) - self.errors += len(testresult.errors) - self.ttime += ttime - self.ctime += ctime - self.modulescount += 1 - if not testresult.wasSuccessful(): - problems = len(testresult.failures) + len(testresult.errors) - self.errmodules.append((filename[:-3], problems, ran)) - - def failed_to_test_module(self, filename): - """called when the test module could not be imported by unittest - """ - self.errors += 1 - self.modulescount += 1 - self.ran += 1 - self.errmodules.append((filename[:-3], 1, 1)) - - def skip_module(self, filename): - self.modulescount += 1 - self.ran += 1 - self.errmodules.append((filename[:-3], 0, 0)) - - def __str__(self): - """this is just presentation stuff""" - line1 = ['Ran %s test cases in %.2fs (%.2fs CPU)' - % (self.ran, self.ttime, self.ctime)] - if self.errors: - line1.append('%s errors' % self.errors) - if self.failures: - line1.append('%s failures' % self.failures) - if self.skipped: - line1.append('%s skipped' % self.skipped) - modulesok = self.modulescount - len(self.errmodules) - if self.errors or self.failures: - line2 = '%s modules OK (%s failed)' % (modulesok, - len(self.errmodules)) - descr = ', '.join(['%s [%s/%s]' % info for info in self.errmodules]) - line3 = '\nfailures: %s' % descr - elif modulesok: - line2 = 'All %s modules OK' % modulesok - line3 = '' - else: - return '' - return '%s\n%s%s' % (', '.join(line1), line2, line3) - - - -def remove_local_modules_from_sys(testdir): - """remove all modules from cache that come from `testdir` - - This is used to avoid strange side-effects when using the - testall() mode of pytest. - For instance, if we run pytest on this tree:: - - A/test/test_utils.py - B/test/test_utils.py - - we **have** to clean sys.modules to make sure the correct test_utils - module is ran in B - """ - for modname, mod in sys.modules.items(): - if mod is None: - continue - if not hasattr(mod, '__file__'): - # this is the case of some built-in modules like sys, imp, marshal - continue - modfile = mod.__file__ - # if modfile is not an absolute path, it was probably loaded locally - # during the tests - if not osp.isabs(modfile) or modfile.startswith(testdir): - del sys.modules[modname] - - - -class PyTester(object): - """encapsulates testrun logic""" - - def __init__(self, cvg, options): - self.report = GlobalTestReport() - self.cvg = cvg - self.options = options - self.firstwrite = True - self._errcode = None - - def show_report(self): - """prints the report and returns appropriate exitcode""" - # everything has been ran, print report - print "*" * 79 - print self.report - - def get_errcode(self): - # errcode set explicitly - if self._errcode is not None: - return self._errcode - return self.report.failures + self.report.errors - - def set_errcode(self, errcode): - self._errcode = errcode - errcode = property(get_errcode, set_errcode) - - def testall(self, exitfirst=False): - """walks through current working directory, finds something - which can be considered as a testdir and runs every test there - """ - here = os.getcwd() - for dirname, dirs, _ in os.walk(here): - for skipped in STD_BLACKLIST: - if skipped in dirs: - dirs.remove(skipped) - basename = osp.basename(dirname) - if this_is_a_testdir(basename): - print "going into", dirname - # we found a testdir, let's explore it ! - if not self.testonedir(dirname, exitfirst): - break - dirs[:] = [] - if self.report.ran == 0: - print "no test dir found testing here:", here - # if no test was found during the visit, consider - # the local directory as a test directory even if - # it doesn't have a traditional test directory name - self.testonedir(here) - - def testonedir(self, testdir, exitfirst=False): - """finds each testfile in the `testdir` and runs it - - return true when all tests has been executed, false if exitfirst and - some test has failed. - """ - for filename in abspath_listdir(testdir): - if this_is_a_testfile(filename): - if self.options.exitfirst and not self.options.restart: - # overwrite restart file - try: - restartfile = open(FILE_RESTART, "w") - restartfile.close() - except Exception, e: - print >> sys.__stderr__, "Error while overwriting \ -succeeded test file :", osp.join(os.getcwd(), FILE_RESTART) - raise e - # run test and collect information - prog = self.testfile(filename, batchmode=True) - if exitfirst and (prog is None or not prog.result.wasSuccessful()): - return False - self.firstwrite = True - # clean local modules - remove_local_modules_from_sys(testdir) - return True - - def testfile(self, filename, batchmode=False): - """runs every test in `filename` - - :param filename: an absolute path pointing to a unittest file - """ - here = os.getcwd() - dirname = osp.dirname(filename) - if dirname: - os.chdir(dirname) - # overwrite restart file if it has not been done already - if self.options.exitfirst and not self.options.restart and self.firstwrite: - try: - restartfile = open(FILE_RESTART, "w") - restartfile.close() - except Exception, e: - print >> sys.__stderr__, "Error while overwriting \ -succeeded test file :", osp.join(os.getcwd(), FILE_RESTART) - raise e - modname = osp.basename(filename)[:-3] - try: - print >> sys.stderr, (' %s ' % osp.basename(filename)).center(70, '=') - except TypeError: # < py 2.4 bw compat - print >> sys.stderr, (' %s ' % osp.basename(filename)).center(70) - try: - tstart, cstart = time(), clock() - try: - testprog = SkipAwareTestProgram(modname, batchmode=batchmode, cvg=self.cvg, - options=self.options, outstream=sys.stderr) - except KeyboardInterrupt: - raise - except SystemExit, exc: - self.errcode = exc.code - raise - except testlib.SkipTest: - print "Module skipped:", filename - self.report.skip_module(filename) - return None - except Exception: - self.report.failed_to_test_module(filename) - print >> sys.stderr, 'unhandled exception occurred while testing', modname - import traceback - traceback.print_exc(file=sys.stderr) - return None - - tend, cend = time(), clock() - ttime, ctime = (tend - tstart), (cend - cstart) - self.report.feed(filename, testprog.result, ttime, ctime) - return testprog - finally: - if dirname: - os.chdir(here) - - - -class DjangoTester(PyTester): - - def load_django_settings(self, dirname): - """try to find project's setting and load it""" - curdir = osp.abspath(dirname) - previousdir = curdir - while not osp.isfile(osp.join(curdir, 'settings.py')) and \ - osp.isfile(osp.join(curdir, '__init__.py')): - newdir = osp.normpath(osp.join(curdir, os.pardir)) - if newdir == curdir: - raise AssertionError('could not find settings.py') - previousdir = curdir - curdir = newdir - # late django initialization - settings = load_module_from_modpath(modpath_from_file(osp.join(curdir, 'settings.py'))) - from django.core.management import setup_environ - setup_environ(settings) - settings.DEBUG = False - self.settings = settings - # add settings dir to pythonpath since it's the project's root - if curdir not in sys.path: - sys.path.insert(1, curdir) - - def before_testfile(self): - # Those imports must be done **after** setup_environ was called - from django.test.utils import setup_test_environment - from django.test.utils import create_test_db - setup_test_environment() - create_test_db(verbosity=0) - self.dbname = self.settings.TEST_DATABASE_NAME - - def after_testfile(self): - # Those imports must be done **after** setup_environ was called - from django.test.utils import teardown_test_environment - from django.test.utils import destroy_test_db - teardown_test_environment() - print 'destroying', self.dbname - destroy_test_db(self.dbname, verbosity=0) - - def testall(self, exitfirst=False): - """walks through current working directory, finds something - which can be considered as a testdir and runs every test there - """ - for dirname, dirs, files in os.walk(os.getcwd()): - for skipped in ('CVS', '.svn', '.hg'): - if skipped in dirs: - dirs.remove(skipped) - if 'tests.py' in files: - if not self.testonedir(dirname, exitfirst): - break - dirs[:] = [] - else: - basename = osp.basename(dirname) - if basename in ('test', 'tests'): - print "going into", dirname - # we found a testdir, let's explore it ! - if not self.testonedir(dirname, exitfirst): - break - dirs[:] = [] - - def testonedir(self, testdir, exitfirst=False): - """finds each testfile in the `testdir` and runs it - - return true when all tests has been executed, false if exitfirst and - some test has failed. - """ - # special django behaviour : if tests are splitted in several files, - # remove the main tests.py file and tests each test file separately - testfiles = [fpath for fpath in abspath_listdir(testdir) - if this_is_a_testfile(fpath)] - if len(testfiles) > 1: - try: - testfiles.remove(osp.join(testdir, 'tests.py')) - except ValueError: - pass - for filename in testfiles: - # run test and collect information - prog = self.testfile(filename, batchmode=True) - if exitfirst and (prog is None or not prog.result.wasSuccessful()): - return False - # clean local modules - remove_local_modules_from_sys(testdir) - return True - - def testfile(self, filename, batchmode=False): - """runs every test in `filename` - - :param filename: an absolute path pointing to a unittest file - """ - here = os.getcwd() - dirname = osp.dirname(filename) - if dirname: - os.chdir(dirname) - self.load_django_settings(dirname) - modname = osp.basename(filename)[:-3] - print >>sys.stderr, (' %s ' % osp.basename(filename)).center(70, '=') - try: - try: - tstart, cstart = time(), clock() - self.before_testfile() - testprog = SkipAwareTestProgram(modname, batchmode=batchmode, cvg=self.cvg) - tend, cend = time(), clock() - ttime, ctime = (tend - tstart), (cend - cstart) - self.report.feed(filename, testprog.result, ttime, ctime) - return testprog - except SystemExit: - raise - except Exception, exc: - import traceback - traceback.print_exc() - self.report.failed_to_test_module(filename) - print 'unhandled exception occurred while testing', modname - print 'error: %s' % exc - return None - finally: - self.after_testfile() - if dirname: - os.chdir(here) - - -def make_parser(): - """creates the OptionParser instance - """ - from optparse import OptionParser - parser = OptionParser(usage=PYTEST_DOC) - - parser.newargs = [] - def rebuild_cmdline(option, opt, value, parser): - """carry the option to unittest_main""" - parser.newargs.append(opt) - - def rebuild_and_store(option, opt, value, parser): - """carry the option to unittest_main and store - the value on current parser - """ - parser.newargs.append(opt) - setattr(parser.values, option.dest, True) - - def capture_and_rebuild(option, opt, value, parser): - warnings.simplefilter('ignore', DeprecationWarning) - rebuild_cmdline(option, opt, value, parser) - - # pytest options - parser.add_option('-t', dest='testdir', default=None, - help="directory where the tests will be found") - parser.add_option('-d', dest='dbc', default=False, - action="store_true", help="enable design-by-contract") - # unittest_main options provided and passed through pytest - parser.add_option('-v', '--verbose', callback=rebuild_cmdline, - action="callback", help="Verbose output") - parser.add_option('-i', '--pdb', callback=rebuild_and_store, - dest="pdb", action="callback", - help="Enable test failure inspection (conflicts with --coverage)") - parser.add_option('-x', '--exitfirst', callback=rebuild_and_store, - dest="exitfirst", default=False, - action="callback", help="Exit on first failure " - "(only make sense when pytest run one test file)") - parser.add_option('-R', '--restart', callback=rebuild_and_store, - dest="restart", default=False, - action="callback", - help="Restart tests from where it failed (implies exitfirst) " - "(only make sense if tests previously ran with exitfirst only)") - parser.add_option('--color', callback=rebuild_cmdline, - action="callback", - help="colorize tracebacks") - parser.add_option('-s', '--skip', - # XXX: I wish I could use the callback action but it - # doesn't seem to be able to get the value - # associated to the option - action="store", dest="skipped", default=None, - help="test names matching this name will be skipped " - "to skip several patterns, use commas") - parser.add_option('-q', '--quiet', callback=rebuild_cmdline, - action="callback", help="Minimal output") - parser.add_option('-P', '--profile', default=None, dest='profile', - help="Profile execution and store data in the given file") - parser.add_option('-m', '--match', default=None, dest='tags_pattern', - help="only execute test whose tag match the current pattern") - - try: - from logilab.devtools.lib.coverage import Coverage - parser.add_option('--coverage', dest="coverage", default=False, - action="store_true", - help="run tests with pycoverage (conflicts with --pdb)") - except ImportError: - pass - - if DJANGO_FOUND: - parser.add_option('-J', '--django', dest='django', default=False, - action="store_true", - help='use pytest for django test cases') - return parser - - -def parseargs(parser): - """Parse the command line and return (options processed), (options to pass to - unittest_main()), (explicitfile or None). - """ - # parse the command line - options, args = parser.parse_args() - if options.pdb and getattr(options, 'coverage', False): - parser.error("'pdb' and 'coverage' options are exclusive") - filenames = [arg for arg in args if arg.endswith('.py')] - if filenames: - if len(filenames) > 1: - parser.error("only one filename is acceptable") - explicitfile = filenames[0] - args.remove(explicitfile) - else: - explicitfile = None - # someone wants DBC - testlib.ENABLE_DBC = options.dbc - newargs = parser.newargs - if options.skipped: - newargs.extend(['--skip', options.skipped]) - # restart implies exitfirst - if options.restart: - options.exitfirst = True - # append additional args to the new sys.argv and let unittest_main - # do the rest - newargs += args - return options, explicitfile - - - -def run(): - parser = make_parser() - rootdir, testercls = project_root(parser) - options, explicitfile = parseargs(parser) - # mock a new command line - sys.argv[1:] = parser.newargs - covermode = getattr(options, 'coverage', None) - cvg = None - if not '' in sys.path: - sys.path.insert(0, '') - if covermode: - # control_import_coverage(rootdir) - from logilab.devtools.lib.coverage import Coverage - cvg = Coverage([rootdir]) - cvg.erase() - cvg.start() - if DJANGO_FOUND and options.django: - tester = DjangoTester(cvg, options) - else: - tester = testercls(cvg, options) - if explicitfile: - cmd, args = tester.testfile, (explicitfile,) - elif options.testdir: - cmd, args = tester.testonedir, (options.testdir, options.exitfirst) - else: - cmd, args = tester.testall, (options.exitfirst,) - try: - try: - if options.profile: - import hotshot - prof = hotshot.Profile(options.profile) - prof.runcall(cmd, *args) - prof.close() - print 'profile data saved in', options.profile - else: - cmd(*args) - except SystemExit: - raise - except: - import traceback - traceback.print_exc() - finally: - if covermode: - cvg.stop() - cvg.save() - tester.show_report() - if covermode: - print 'coverage information stored, use it with pycoverage -ra' - sys.exit(tester.errcode) - -class SkipAwareTestProgram(unittest.TestProgram): - # XXX: don't try to stay close to unittest.py, use optparse - USAGE = """\ -Usage: %(progName)s [options] [test] [...] - -Options: - -h, --help Show this message - -v, --verbose Verbose output - -i, --pdb Enable test failure inspection - -x, --exitfirst Exit on first failure - -s, --skip skip test matching this pattern (no regexp for now) - -q, --quiet Minimal output - --color colorize tracebacks - - -m, --match Run only test whose tag match this pattern - - -P, --profile FILE: Run the tests using cProfile and saving results - in FILE - -Examples: - %(progName)s - run default set of tests - %(progName)s MyTestSuite - run suite 'MyTestSuite' - %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething - %(progName)s MyTestCase - run all 'test*' test methods - in MyTestCase -""" - def __init__(self, module='__main__', defaultTest=None, batchmode=False, - cvg=None, options=None, outstream=sys.stderr): - self.batchmode = batchmode - self.cvg = cvg - self.options = options - self.outstream = outstream - super(SkipAwareTestProgram, self).__init__( - module=module, defaultTest=defaultTest, - testLoader=NonStrictTestLoader()) - - def parseArgs(self, argv): - self.pdbmode = False - self.exitfirst = False - self.skipped_patterns = [] - self.test_pattern = None - self.tags_pattern = None - self.colorize = False - self.profile_name = None - import getopt - try: - options, args = getopt.getopt(argv[1:], 'hHvixrqcp:s:m:P:', - ['help', 'verbose', 'quiet', 'pdb', - 'exitfirst', 'restart', - 'skip=', 'color', 'match=', 'profile=']) - for opt, value in options: - if opt in ('-h', '-H', '--help'): - self.usageExit() - if opt in ('-i', '--pdb'): - self.pdbmode = True - if opt in ('-x', '--exitfirst'): - self.exitfirst = True - if opt in ('-r', '--restart'): - self.restart = True - self.exitfirst = True - if opt in ('-q', '--quiet'): - self.verbosity = 0 - if opt in ('-v', '--verbose'): - self.verbosity = 2 - if opt in ('-s', '--skip'): - self.skipped_patterns = [pat.strip() for pat in - value.split(', ')] - if opt == '--color': - self.colorize = True - if opt in ('-m', '--match'): - #self.tags_pattern = value - self.options["tag_pattern"] = value - if opt in ('-P', '--profile'): - self.profile_name = value - self.testLoader.skipped_patterns = self.skipped_patterns - if len(args) == 0 and self.defaultTest is None: - suitefunc = getattr(self.module, 'suite', None) - if isinstance(suitefunc, (types.FunctionType, - types.MethodType)): - self.test = self.module.suite() - else: - self.test = self.testLoader.loadTestsFromModule(self.module) - return - if len(args) > 0: - self.test_pattern = args[0] - self.testNames = args - else: - self.testNames = (self.defaultTest, ) - self.createTests() - except getopt.error, msg: - self.usageExit(msg) - - def runTests(self): - if self.profile_name: - import cProfile - cProfile.runctx('self._runTests()', globals(), locals(), self.profile_name ) - else: - return self._runTests() - - def _runTests(self): - self.testRunner = SkipAwareTextTestRunner(verbosity=self.verbosity, - stream=self.outstream, - exitfirst=self.exitfirst, - pdbmode=self.pdbmode, - cvg=self.cvg, - test_pattern=self.test_pattern, - skipped_patterns=self.skipped_patterns, - colorize=self.colorize, - batchmode=self.batchmode, - options=self.options) - - def removeSucceededTests(obj, succTests): - """ Recursive function that removes succTests from - a TestSuite or TestCase - """ - if isinstance(obj, unittest.TestSuite): - removeSucceededTests(obj._tests, succTests) - if isinstance(obj, list): - for el in obj[:]: - if isinstance(el, unittest.TestSuite): - removeSucceededTests(el, succTests) - elif isinstance(el, unittest.TestCase): - descr = '.'.join((el.__class__.__module__, - el.__class__.__name__, - el._testMethodName)) - if descr in succTests: - obj.remove(el) - # take care, self.options may be None - if getattr(self.options, 'restart', False): - # retrieve succeeded tests from FILE_RESTART - try: - restartfile = open(FILE_RESTART, 'r') - try: - succeededtests = list(elem.rstrip('\n\r') for elem in - restartfile.readlines()) - removeSucceededTests(self.test, succeededtests) - finally: - restartfile.close() - except Exception, ex: - raise Exception("Error while reading succeeded tests into %s: %s" - % (osp.join(os.getcwd(), FILE_RESTART), ex)) - - result = self.testRunner.run(self.test) - # help garbage collection: we want TestSuite, which hold refs to every - # executed TestCase, to be gc'ed - del self.test - if getattr(result, "debuggers", None) and \ - getattr(self, "pdbmode", None): - start_interactive_mode(result) - if not getattr(self, "batchmode", None): - sys.exit(not result.wasSuccessful()) - self.result = result - - -class SkipAwareTextTestRunner(unittest.TextTestRunner): - - def __init__(self, stream=sys.stderr, verbosity=1, - exitfirst=False, pdbmode=False, cvg=None, test_pattern=None, - skipped_patterns=(), colorize=False, batchmode=False, - options=None): - super(SkipAwareTextTestRunner, self).__init__(stream=stream, - verbosity=verbosity) - self.exitfirst = exitfirst - self.pdbmode = pdbmode - self.cvg = cvg - self.test_pattern = test_pattern - self.skipped_patterns = skipped_patterns - self.colorize = colorize - self.batchmode = batchmode - self.options = options - - def _this_is_skipped(self, testedname): - return any([(pat in testedname) for pat in self.skipped_patterns]) - - def _runcondition(self, test, skipgenerator=True): - if isinstance(test, testlib.InnerTest): - testname = test.name - else: - if isinstance(test, testlib.TestCase): - meth = test._get_test_method() - func = meth.im_func - testname = '%s.%s' % (meth.im_class.__name__, func.__name__) - elif isinstance(test, types.FunctionType): - func = test - testname = func.__name__ - elif isinstance(test, types.MethodType): - func = test.im_func - testname = '%s.%s' % (test.im_class.__name__, func.__name__) - else: - return True # Not sure when this happens - if testlib.is_generator(test) and skipgenerator: - return self.does_match_tags(test) # Let inner tests decide at run time - if self._this_is_skipped(testname): - return False # this was explicitly skipped - if self.test_pattern is not None: - try: - classpattern, testpattern = self.test_pattern.split('.') - klass, name = testname.split('.') - if classpattern not in klass or testpattern not in name: - return False - except ValueError: - if self.test_pattern not in testname: - return False - - return self.does_match_tags(test) - - def does_match_tags(self, test): - if self.options is not None: - tags_pattern = getattr(self.options, 'tags_pattern', None) - if tags_pattern is not None: - tags = getattr(test, 'tags', testlib.Tags()) - if tags.inherit and isinstance(test, types.MethodType): - tags = tags | getattr(test.im_class, 'tags', testlib.Tags()) - return tags.match(tags_pattern) - return True # no pattern - - def _makeResult(self): - return testlib.SkipAwareTestResult(self.stream, self.descriptions, - self.verbosity, self.exitfirst, - self.pdbmode, self.cvg, self.colorize) - - def run(self, test): - "Run the given test case or test suite." - result = self._makeResult() - startTime = time() - test(result, runcondition=self._runcondition, options=self.options) - stopTime = time() - timeTaken = stopTime - startTime - result.printErrors() - if not self.batchmode: - self.stream.writeln(result.separator2) - run = result.testsRun - self.stream.writeln("Ran %d test%s in %.3fs" % - (run, run != 1 and "s" or "", timeTaken)) - self.stream.writeln() - if not result.wasSuccessful(): - if self.colorize: - self.stream.write(textutils.colorize_ansi("FAILED", color='red')) - else: - self.stream.write("FAILED") - else: - if self.colorize: - self.stream.write(textutils.colorize_ansi("OK", color='green')) - else: - self.stream.write("OK") - failed, errored, skipped = map(len, (result.failures, - result.errors, - result.skipped)) - - det_results = [] - for name, value in (("failures", result.failures), - ("errors",result.errors), - ("skipped", result.skipped)): - if value: - det_results.append("%s=%i" % (name, len(value))) - if det_results: - self.stream.write(" (") - self.stream.write(', '.join(det_results)) - self.stream.write(")") - self.stream.writeln("") - return result - -class NonStrictTestLoader(unittest.TestLoader): - """ - Overrides default testloader to be able to omit classname when - specifying tests to run on command line. - - For example, if the file test_foo.py contains :: - - class FooTC(TestCase): - def test_foo1(self): # ... - def test_foo2(self): # ... - def test_bar1(self): # ... - - class BarTC(TestCase): - def test_bar2(self): # ... - - 'python test_foo.py' will run the 3 tests in FooTC - 'python test_foo.py FooTC' will run the 3 tests in FooTC - 'python test_foo.py test_foo' will run test_foo1 and test_foo2 - 'python test_foo.py test_foo1' will run test_foo1 - 'python test_foo.py test_bar' will run FooTC.test_bar1 and BarTC.test_bar2 - """ - - def __init__(self): - self.skipped_patterns = () - - # some magic here to accept empty list by extending - # and to provide callable capability - def loadTestsFromNames(self, names, module=None): - suites = [] - for name in names: - suites.extend(self.loadTestsFromName(name, module)) - return self.suiteClass(suites) - - def _collect_tests(self, module): - tests = {} - for obj in vars(module).values(): - if (issubclass(type(obj), (types.ClassType, type)) and - issubclass(obj, unittest.TestCase)): - classname = obj.__name__ - if classname[0] == '_' or self._this_is_skipped(classname): - continue - methodnames = [] - # obj is a TestCase class - for attrname in dir(obj): - if attrname.startswith(self.testMethodPrefix): - attr = getattr(obj, attrname) - if callable(attr): - methodnames.append(attrname) - # keep track of class (obj) for convenience - tests[classname] = (obj, methodnames) - return tests - - def loadTestsFromSuite(self, module, suitename): - try: - suite = getattr(module, suitename)() - except AttributeError: - return [] - assert hasattr(suite, '_tests'), \ - "%s.%s is not a valid TestSuite" % (module.__name__, suitename) - # python2.3 does not implement __iter__ on suites, we need to return - # _tests explicitly - return suite._tests - - def loadTestsFromName(self, name, module=None): - parts = name.split('.') - if module is None or len(parts) > 2: - # let the base class do its job here - return [super(NonStrictTestLoader, self).loadTestsFromName(name)] - tests = self._collect_tests(module) - collected = [] - if len(parts) == 1: - pattern = parts[0] - if callable(getattr(module, pattern, None) - ) and pattern not in tests: - # consider it as a suite - return self.loadTestsFromSuite(module, pattern) - if pattern in tests: - # case python unittest_foo.py MyTestTC - klass, methodnames = tests[pattern] - for methodname in methodnames: - collected = [klass(methodname) - for methodname in methodnames] - else: - # case python unittest_foo.py something - for klass, methodnames in tests.values(): - # skip methodname if matched by skipped_patterns - for skip_pattern in self.skipped_patterns: - methodnames = [methodname - for methodname in methodnames - if skip_pattern not in methodname] - collected += [klass(methodname) - for methodname in methodnames - if pattern in methodname] - elif len(parts) == 2: - # case "MyClass.test_1" - classname, pattern = parts - klass, methodnames = tests.get(classname, (None, [])) - for methodname in methodnames: - collected = [klass(methodname) for methodname in methodnames - if pattern in methodname] - return collected - - def _this_is_skipped(self, testedname): - return any([(pat in testedname) for pat in self.skipped_patterns]) - - def getTestCaseNames(self, testCaseClass): - """Return a sorted sequence of method names found within testCaseClass - """ - is_skipped = self._this_is_skipped - classname = testCaseClass.__name__ - if classname[0] == '_' or is_skipped(classname): - return [] - testnames = super(NonStrictTestLoader, self).getTestCaseNames( - testCaseClass) - return [testname for testname in testnames if not is_skipped(testname)] - -def _ts_run(self, result, runcondition=None, options=None): - self._wrapped_run(result,runcondition=runcondition, options=options) - self._tearDownPreviousClass(None, result) - self._handleModuleTearDown(result) - return result - -def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None): - for test in self: - if result.shouldStop: - break - if unittest_suite._isnotsuite(test): - self._tearDownPreviousClass(test, result) - self._handleModuleFixture(test, result) - self._handleClassSetUp(test, result) - result._previousTestClass = test.__class__ - if (getattr(test.__class__, '_classSetupFailed', False) or - getattr(result, '_moduleSetUpFailed', False)): - continue - - if hasattr(test, '_wrapped_run'): - try: - test._wrapped_run(result, debug, runcondition=runcondition, options=options) - except TypeError: - test._wrapped_run(result, debug) - elif not debug: - try: - test(result, runcondition, options) - except TypeError: - test(result) - else: - test.debug() - - -def enable_dbc(*args): - """ - Without arguments, return True if contracts can be enabled and should be - enabled (see option -d), return False otherwise. - - With arguments, return False if contracts can't or shouldn't be enabled, - otherwise weave ContractAspect with items passed as arguments. - """ - if not ENABLE_DBC: - return False - try: - from logilab.aspects.weaver import weaver - from logilab.aspects.lib.contracts import ContractAspect - except ImportError: - sys.stderr.write( - 'Warning: logilab.aspects is not available. Contracts disabled.') - return False - for arg in args: - weaver.weave_module(arg, ContractAspect) - return True - - -# monkeypatch unittest and doctest (ouch !) -unittest._TextTestResult = testlib.SkipAwareTestResult -unittest.TextTestRunner = SkipAwareTextTestRunner -unittest.TestLoader = NonStrictTestLoader -unittest.TestProgram = SkipAwareTestProgram - -if sys.version_info >= (2, 4): - doctest.DocTestCase.__bases__ = (testlib.TestCase,) - # XXX check python2.6 compatibility - #doctest.DocTestCase._cleanups = [] - #doctest.DocTestCase._out = [] -else: - unittest.FunctionTestCase.__bases__ = (testlib.TestCase,) -unittest.TestSuite.run = _ts_run -unittest.TestSuite._wrapped_run = _ts_wrapped_run diff --git a/pylibs/logilab/common/setup.py b/pylibs/logilab/common/setup.py deleted file mode 100644 index da44fe0c..00000000 --- a/pylibs/logilab/common/setup.py +++ /dev/null @@ -1,170 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# pylint: disable=W0404,W0622,W0704,W0613 -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Generic Setup script, takes package info from __pkginfo__.py file. -""" -__docformat__ = "restructuredtext en" - -import os -import sys -import shutil -from os.path import isdir, exists, join - -try: - if os.environ.get('NO_SETUPTOOLS'): - raise ImportError() - from setuptools import setup - from setuptools.command import install_lib - USE_SETUPTOOLS = 1 -except ImportError: - from distutils.core import setup - from distutils.command import install_lib - USE_SETUPTOOLS = 0 - -try: - # python3 - from distutils.command.build_py import build_py_2to3 as build_py -except ImportError: - # python2.x - from distutils.command.build_py import build_py - -sys.modules.pop('__pkginfo__', None) -# import optional features -__pkginfo__ = __import__("__pkginfo__") -# import required features -from __pkginfo__ import modname, version, license, description, \ - web, author, author_email - -distname = getattr(__pkginfo__, 'distname', modname) -scripts = getattr(__pkginfo__, 'scripts', []) -data_files = getattr(__pkginfo__, 'data_files', None) -subpackage_of = getattr(__pkginfo__, 'subpackage_of', None) -include_dirs = getattr(__pkginfo__, 'include_dirs', []) -ext_modules = getattr(__pkginfo__, 'ext_modules', None) -install_requires = getattr(__pkginfo__, 'install_requires', None) -dependency_links = getattr(__pkginfo__, 'dependency_links', []) - -STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build') - -IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~') - -if exists('README'): - long_description = open('README').read() -else: - long_description = '' - -def ensure_scripts(linux_scripts): - """Creates the proper script names required for each platform - (taken from 4Suite) - """ - from distutils import util - if util.get_platform()[:3] == 'win': - scripts_ = [script + '.bat' for script in linux_scripts] - else: - scripts_ = linux_scripts - return scripts_ - -def get_packages(directory, prefix): - """return a list of subpackages for the given directory""" - result = [] - for package in os.listdir(directory): - absfile = join(directory, package) - if isdir(absfile): - if exists(join(absfile, '__init__.py')) or \ - package in ('test', 'tests'): - if prefix: - result.append('%s.%s' % (prefix, package)) - else: - result.append(package) - result += get_packages(absfile, result[-1]) - return result - -EMPTY_FILE = '''"""generated file, don't modify or your data will be lost""" -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - pass -''' - -class MyInstallLib(install_lib.install_lib): - """extend install_lib command to handle package __init__.py and - include_dirs variable if necessary - """ - def run(self): - """overridden from install_lib class""" - install_lib.install_lib.run(self) - # create Products.__init__.py if needed - if subpackage_of: - product_init = join(self.install_dir, subpackage_of, '__init__.py') - if not exists(product_init): - self.announce('creating %s' % product_init) - stream = open(product_init, 'w') - stream.write(EMPTY_FILE) - stream.close() - # manually install included directories if any - if include_dirs: - if subpackage_of: - base = join(subpackage_of, modname) - else: - base = modname - for directory in include_dirs: - dest = join(self.install_dir, base, directory) - shutil.rmtree(dest, ignore_errors=True) - shutil.copytree(directory, dest) - -def install(**kwargs): - """setup entry point""" - if USE_SETUPTOOLS: - if '--force-manifest' in sys.argv: - sys.argv.remove('--force-manifest') - # install-layout option was introduced in 2.5.3-1~exp1 - elif sys.version_info < (2, 5, 4) and '--install-layout=deb' in sys.argv: - sys.argv.remove('--install-layout=deb') - if subpackage_of: - package = subpackage_of + '.' + modname - kwargs['package_dir'] = {package : '.'} - packages = [package] + get_packages(os.getcwd(), package) - if USE_SETUPTOOLS: - kwargs['namespace_packages'] = [subpackage_of] - else: - kwargs['package_dir'] = {modname : '.'} - packages = [modname] + get_packages(os.getcwd(), modname) - if USE_SETUPTOOLS and install_requires: - kwargs['install_requires'] = install_requires - kwargs['dependency_links'] = dependency_links - kwargs['packages'] = packages - return setup(name = distname, - version = version, - license = license, - description = description, - long_description = long_description, - author = author, - author_email = author_email, - url = web, - scripts = ensure_scripts(scripts), - data_files = data_files, - ext_modules = ext_modules, - cmdclass = {'install_lib': MyInstallLib, - 'build_py': build_py}, - **kwargs - ) - -if __name__ == '__main__' : - install() diff --git a/pylibs/logilab/common/shellutils.py b/pylibs/logilab/common/shellutils.py deleted file mode 100644 index c7139135..00000000 --- a/pylibs/logilab/common/shellutils.py +++ /dev/null @@ -1,443 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""shell/term utilities, useful to write some python scripts instead of shell -scripts. -""" -__docformat__ = "restructuredtext en" - -import os -import glob -import shutil -import stat -import sys -import tempfile -import time -import fnmatch -import errno -import string -import random -from os.path import exists, isdir, islink, basename, join - -from logilab.common import STD_BLACKLIST, _handle_blacklist -from logilab.common.compat import raw_input -from logilab.common.compat import str_to_bytes - -try: - from logilab.common.proc import ProcInfo, NoSuchProcess -except ImportError: - # windows platform - class NoSuchProcess(Exception): pass - - def ProcInfo(pid): - raise NoSuchProcess() - - -class tempdir(object): - - def __enter__(self): - self.path = tempfile.mkdtemp() - return self.path - - def __exit__(self, exctype, value, traceback): - # rmtree in all cases - shutil.rmtree(self.path) - return traceback is None - - -class pushd(object): - def __init__(self, directory): - self.directory = directory - - def __enter__(self): - self.cwd = os.getcwd() - os.chdir(self.directory) - return self.directory - - def __exit__(self, exctype, value, traceback): - os.chdir(self.cwd) - - -def chown(path, login=None, group=None): - """Same as `os.chown` function but accepting user login or group name as - argument. If login or group is omitted, it's left unchanged. - - Note: you must own the file to chown it (or be root). Otherwise OSError is raised. - """ - if login is None: - uid = -1 - else: - try: - uid = int(login) - except ValueError: - import pwd # Platforms: Unix - uid = pwd.getpwnam(login).pw_uid - if group is None: - gid = -1 - else: - try: - gid = int(group) - except ValueError: - import grp - gid = grp.getgrnam(group).gr_gid - os.chown(path, uid, gid) - -def mv(source, destination, _action=shutil.move): - """A shell-like mv, supporting wildcards. - """ - sources = glob.glob(source) - if len(sources) > 1: - assert isdir(destination) - for filename in sources: - _action(filename, join(destination, basename(filename))) - else: - try: - source = sources[0] - except IndexError: - raise OSError('No file matching %s' % source) - if isdir(destination) and exists(destination): - destination = join(destination, basename(source)) - try: - _action(source, destination) - except OSError, ex: - raise OSError('Unable to move %r to %r (%s)' % ( - source, destination, ex)) - -def rm(*files): - """A shell-like rm, supporting wildcards. - """ - for wfile in files: - for filename in glob.glob(wfile): - if islink(filename): - os.remove(filename) - elif isdir(filename): - shutil.rmtree(filename) - else: - os.remove(filename) - -def cp(source, destination): - """A shell-like cp, supporting wildcards. - """ - mv(source, destination, _action=shutil.copy) - -def find(directory, exts, exclude=False, blacklist=STD_BLACKLIST): - """Recursively find files ending with the given extensions from the directory. - - :type directory: str - :param directory: - directory where the search should start - - :type exts: basestring or list or tuple - :param exts: - extensions or lists or extensions to search - - :type exclude: boolean - :param exts: - if this argument is True, returning files NOT ending with the given - extensions - - :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to the value of - `logilab.common.STD_BLACKLIST` - - :rtype: list - :return: - the list of all matching files - """ - if isinstance(exts, basestring): - exts = (exts,) - if exclude: - def match(filename, exts): - for ext in exts: - if filename.endswith(ext): - return False - return True - else: - def match(filename, exts): - for ext in exts: - if filename.endswith(ext): - return True - return False - files = [] - for dirpath, dirnames, filenames in os.walk(directory): - _handle_blacklist(blacklist, dirnames, filenames) - # don't append files if the directory is blacklisted - dirname = basename(dirpath) - if dirname in blacklist: - continue - files.extend([join(dirpath, f) for f in filenames if match(f, exts)]) - return files - - -def globfind(directory, pattern, blacklist=STD_BLACKLIST): - """Recursively finds files matching glob `pattern` under `directory`. - - This is an alternative to `logilab.common.shellutils.find`. - - :type directory: str - :param directory: - directory where the search should start - - :type pattern: basestring - :param pattern: - the glob pattern (e.g *.py, foo*.py, etc.) - - :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to the value of - `logilab.common.STD_BLACKLIST` - - :rtype: iterator - :return: - iterator over the list of all matching files - """ - for curdir, dirnames, filenames in os.walk(directory): - _handle_blacklist(blacklist, dirnames, filenames) - for fname in fnmatch.filter(filenames, pattern): - yield join(curdir, fname) - -def unzip(archive, destdir): - import zipfile - if not exists(destdir): - os.mkdir(destdir) - zfobj = zipfile.ZipFile(archive) - for name in zfobj.namelist(): - if name.endswith('/'): - os.mkdir(join(destdir, name)) - else: - outfile = open(join(destdir, name), 'wb') - outfile.write(zfobj.read(name)) - outfile.close() - -class Execute: - """This is a deadlock safe version of popen2 (no stdin), that returns - an object with errorlevel, out and err. - """ - - def __init__(self, command): - outfile = tempfile.mktemp() - errfile = tempfile.mktemp() - self.status = os.system("( %s ) >%s 2>%s" % - (command, outfile, errfile)) >> 8 - self.out = open(outfile, "r").read() - self.err = open(errfile, "r").read() - os.remove(outfile) - os.remove(errfile) - -def acquire_lock(lock_file, max_try=10, delay=10, max_delay=3600): - """Acquire a lock represented by a file on the file system - - If the process written in lock file doesn't exist anymore, we remove the - lock file immediately - If age of the lock_file is greater than max_delay, then we raise a UserWarning - """ - count = abs(max_try) - while count: - try: - fd = os.open(lock_file, os.O_EXCL | os.O_RDWR | os.O_CREAT) - os.write(fd, str_to_bytes(str(os.getpid())) ) - os.close(fd) - return True - except OSError, e: - if e.errno == errno.EEXIST: - try: - fd = open(lock_file, "r") - pid = int(fd.readline()) - pi = ProcInfo(pid) - age = (time.time() - os.stat(lock_file)[stat.ST_MTIME]) - if age / max_delay > 1 : - raise UserWarning("Command '%s' (pid %s) has locked the " - "file '%s' for %s minutes" - % (pi.name(), pid, lock_file, age/60)) - except UserWarning: - raise - except NoSuchProcess: - os.remove(lock_file) - except Exception: - # The try block is not essential. can be skipped. - # Note: ProcInfo object is only available for linux - # process information are not accessible... - # or lock_file is no more present... - pass - else: - raise - count -= 1 - time.sleep(delay) - else: - raise Exception('Unable to acquire %s' % lock_file) - -def release_lock(lock_file): - """Release a lock represented by a file on the file system.""" - os.remove(lock_file) - - -class ProgressBar(object): - """A simple text progression bar.""" - - def __init__(self, nbops, size=20, stream=sys.stdout, title=''): - if title: - self._fstr = '\r%s [%%-%ss]' % (title, int(size)) - else: - self._fstr = '\r[%%-%ss]' % int(size) - self._stream = stream - self._total = nbops - self._size = size - self._current = 0 - self._progress = 0 - self._current_text = None - self._last_text_write_size = 0 - - def _get_text(self): - return self._current_text - - def _set_text(self, text=None): - if text != self._current_text: - self._current_text = text - self.refresh() - - def _del_text(self): - self.text = None - - text = property(_get_text, _set_text, _del_text) - - def update(self): - """Update the progression bar.""" - self._current += 1 - progress = int((float(self._current)/float(self._total))*self._size) - if progress > self._progress: - self._progress = progress - self.refresh() - - def refresh(self): - """Refresh the progression bar display.""" - self._stream.write(self._fstr % ('.' * min(self._progress, self._size)) ) - if self._last_text_write_size or self._current_text: - template = ' %%-%is' % (self._last_text_write_size) - text = self._current_text - if text is None: - text = '' - self._stream.write(template % text) - self._last_text_write_size = len(text.rstrip()) - self._stream.flush() - - def finish(self): - self._stream.write('\n') - self._stream.flush() - - -class DummyProgressBar(object): - __slot__ = ('text',) - - def refresh(self): - pass - def update(self): - pass - def finish(self): - pass - - -_MARKER = object() -class progress(object): - - def __init__(self, nbops=_MARKER, size=_MARKER, stream=_MARKER, title=_MARKER, enabled=True): - self.nbops = nbops - self.size = size - self.stream = stream - self.title = title - self.enabled = enabled - - def __enter__(self): - if self.enabled: - kwargs = {} - for attr in ('nbops', 'size', 'stream', 'title'): - value = getattr(self, attr) - if value is not _MARKER: - kwargs[attr] = value - self.pb = ProgressBar(**kwargs) - else: - self.pb = DummyProgressBar() - return self.pb - - def __exit__(self, exc_type, exc_val, exc_tb): - self.pb.finish() - -class RawInput(object): - - def __init__(self, input=None, printer=None): - self._input = input or raw_input - self._print = printer - - def ask(self, question, options, default): - assert default in options - choices = [] - for option in options: - if option == default: - label = option[0].upper() - else: - label = option[0].lower() - if len(option) > 1: - label += '(%s)' % option[1:].lower() - choices.append((option, label)) - prompt = "%s [%s]: " % (question, - '/'.join([opt[1] for opt in choices])) - tries = 3 - while tries > 0: - answer = self._input(prompt).strip().lower() - if not answer: - return default - possible = [option for option, label in choices - if option.lower().startswith(answer)] - if len(possible) == 1: - return possible[0] - elif len(possible) == 0: - msg = '%s is not an option.' % answer - else: - msg = ('%s is an ambiguous answer, do you mean %s ?' % ( - answer, ' or '.join(possible))) - if self._print: - self._print(msg) - else: - print msg - tries -= 1 - raise Exception('unable to get a sensible answer') - - def confirm(self, question, default_is_yes=True): - default = default_is_yes and 'y' or 'n' - answer = self.ask(question, ('y', 'n'), default) - return answer == 'y' - -ASK = RawInput() - - -def getlogin(): - """avoid using os.getlogin() because of strange tty / stdin problems - (man 3 getlogin) - Another solution would be to use $LOGNAME, $USER or $USERNAME - """ - if sys.platform != 'win32': - import pwd # Platforms: Unix - return pwd.getpwuid(os.getuid())[0] - else: - return os.environ['USERNAME'] - -def generate_password(length=8, vocab=string.ascii_letters + string.digits): - """dumb password generation function""" - pwd = '' - for i in xrange(length): - pwd += random.choice(vocab) - return pwd diff --git a/pylibs/logilab/common/sphinx_ext.py b/pylibs/logilab/common/sphinx_ext.py deleted file mode 100644 index a24608ce..00000000 --- a/pylibs/logilab/common/sphinx_ext.py +++ /dev/null @@ -1,87 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -from logilab.common.decorators import monkeypatch - -from sphinx.ext import autodoc - -class DocstringOnlyModuleDocumenter(autodoc.ModuleDocumenter): - objtype = 'docstring' - def format_signature(self): - pass - def add_directive_header(self, sig): - pass - def document_members(self, all_members=False): - pass - - def resolve_name(self, modname, parents, path, base): - if modname is not None: - return modname, parents + [base] - return (path or '') + base, [] - - -#autodoc.add_documenter(DocstringOnlyModuleDocumenter) - -def setup(app): - app.add_autodocumenter(DocstringOnlyModuleDocumenter) - - - -from sphinx.ext.autodoc import (ViewList, Options, AutodocReporter, nodes, - assemble_option_dict, nested_parse_with_titles) - -@monkeypatch(autodoc.AutoDirective) -def run(self): - self.filename_set = set() # a set of dependent filenames - self.reporter = self.state.document.reporter - self.env = self.state.document.settings.env - self.warnings = [] - self.result = ViewList() - - # find out what documenter to call - objtype = self.name[4:] - doc_class = self._registry[objtype] - # process the options with the selected documenter's option_spec - self.genopt = Options(assemble_option_dict( - self.options.items(), doc_class.option_spec)) - # generate the output - documenter = doc_class(self, self.arguments[0]) - documenter.generate(more_content=self.content) - if not self.result: - return self.warnings - - # record all filenames as dependencies -- this will at least - # partially make automatic invalidation possible - for fn in self.filename_set: - self.env.note_dependency(fn) - - # use a custom reporter that correctly assigns lines to source - # filename/description and lineno - old_reporter = self.state.memo.reporter - self.state.memo.reporter = AutodocReporter(self.result, - self.state.memo.reporter) - if self.name in ('automodule', 'autodocstring'): - node = nodes.section() - # necessary so that the child nodes get the right source/line set - node.document = self.state.document - nested_parse_with_titles(self.state, self.result, node) - else: - node = nodes.paragraph() - node.document = self.state.document - self.state.nested_parse(self.result, 0, node) - self.state.memo.reporter = old_reporter - return self.warnings + node.children diff --git a/pylibs/logilab/common/sphinxutils.py b/pylibs/logilab/common/sphinxutils.py deleted file mode 100644 index ab6e8a18..00000000 --- a/pylibs/logilab/common/sphinxutils.py +++ /dev/null @@ -1,122 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Sphinx utils - -ModuleGenerator: Generate a file that lists all the modules of a list of -packages in order to pull all the docstring. -This should not be used in a makefile to systematically generate sphinx -documentation! - -Typical usage: - ->>> from logilab.common.sphinxutils import ModuleGenerator ->>> mgen = ModuleGenerator('logilab common', '/home/adim/src/logilab/common') ->>> mgen.generate('api_logilab_common.rst', exclude_dirs=('test',)) -""" - -import os, sys -import os.path as osp -import inspect - -from logilab.common import STD_BLACKLIST -from logilab.common.shellutils import globfind -from logilab.common.modutils import load_module_from_file, modpath_from_file - -def module_members(module): - members = [] - for name, value in inspect.getmembers(module): - if getattr(value, '__module__', None) == module.__name__: - members.append( (name, value) ) - return sorted(members) - - -def class_members(klass): - return sorted([name for name in vars(klass) - if name not in ('__doc__', '__module__', - '__dict__', '__weakref__')]) - -class ModuleGenerator: - file_header = """.. -*- coding: utf-8 -*-\n\n%s\n""" - module_def = """ -:mod:`%s` -=======%s - -.. automodule:: %s - :members: %s -""" - class_def = """ - -.. autoclass:: %s - :members: %s - -""" - - def __init__(self, project_title, code_dir): - self.title = project_title - self.code_dir = osp.abspath(code_dir) - - def generate(self, dest_file, exclude_dirs=STD_BLACKLIST): - """make the module file""" - self.fn = open(dest_file, 'w') - num = len(self.title) + 6 - title = "=" * num + "\n %s API\n" % self.title + "=" * num - self.fn.write(self.file_header % title) - self.gen_modules(exclude_dirs=exclude_dirs) - self.fn.close() - - def gen_modules(self, exclude_dirs): - """generate all modules""" - for module in self.find_modules(exclude_dirs): - modname = module.__name__ - classes = [] - modmembers = [] - for objname, obj in module_members(module): - if inspect.isclass(obj): - classmembers = class_members(obj) - classes.append( (objname, classmembers) ) - else: - modmembers.append(objname) - self.fn.write(self.module_def % (modname, '=' * len(modname), - modname, - ', '.join(modmembers))) - for klass, members in classes: - self.fn.write(self.class_def % (klass, ', '.join(members))) - - def find_modules(self, exclude_dirs): - basepath = osp.dirname(self.code_dir) - basedir = osp.basename(basepath) + osp.sep - if basedir not in sys.path: - sys.path.insert(1, basedir) - for filepath in globfind(self.code_dir, '*.py', exclude_dirs): - if osp.basename(filepath) in ('setup.py', '__pkginfo__.py'): - continue - try: - module = load_module_from_file(filepath) - except: # module might be broken or magic - dotted_path = modpath_from_file(filepath) - module = type('.'.join(dotted_path), (), {}) # mock it - yield module - - -if __name__ == '__main__': - # example : - title, code_dir, outfile = sys.argv[1:] - generator = ModuleGenerator(title, code_dir) - # XXX modnames = ['logilab'] - generator.generate(outfile, ('test', 'tests', 'examples', - 'data', 'doc', '.hg', 'migration')) diff --git a/pylibs/logilab/common/table.py b/pylibs/logilab/common/table.py deleted file mode 100644 index 744bb785..00000000 --- a/pylibs/logilab/common/table.py +++ /dev/null @@ -1,923 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Table management module.""" -__docformat__ = "restructuredtext en" - - -class Table(object): - """Table defines a data table with column and row names. - inv: - len(self.data) <= len(self.row_names) - forall(self.data, lambda x: len(x) <= len(self.col_names)) - """ - - def __init__(self, default_value=0, col_names=None, row_names=None): - self.col_names = [] - self.row_names = [] - self.data = [] - self.default_value = default_value - if col_names: - self.create_columns(col_names) - if row_names: - self.create_rows(row_names) - - def _next_row_name(self): - return 'row%s' % (len(self.row_names)+1) - - def __iter__(self): - return iter(self.data) - - def __eq__(self, other): - if other is None: - return False - else: - return list(self) == list(other) - - def __ne__(self, other): - return not self == other - - def __len__(self): - return len(self.row_names) - - ## Rows / Columns creation ################################################# - def create_rows(self, row_names): - """Appends row_names to the list of existing rows - """ - self.row_names.extend(row_names) - for row_name in row_names: - self.data.append([self.default_value]*len(self.col_names)) - - def create_columns(self, col_names): - """Appends col_names to the list of existing columns - """ - for col_name in col_names: - self.create_column(col_name) - - def create_row(self, row_name=None): - """Creates a rowname to the row_names list - """ - row_name = row_name or self._next_row_name() - self.row_names.append(row_name) - self.data.append([self.default_value]*len(self.col_names)) - - - def create_column(self, col_name): - """Creates a colname to the col_names list - """ - self.col_names.append(col_name) - for row in self.data: - row.append(self.default_value) - - ## Sort by column ########################################################## - def sort_by_column_id(self, col_id, method = 'asc'): - """Sorts the table (in-place) according to data stored in col_id - """ - try: - col_index = self.col_names.index(col_id) - self.sort_by_column_index(col_index, method) - except ValueError: - raise KeyError("Col (%s) not found in table" % (col_id)) - - - def sort_by_column_index(self, col_index, method = 'asc'): - """Sorts the table 'in-place' according to data stored in col_index - - method should be in ('asc', 'desc') - """ - sort_list = sorted([(row[col_index], row, row_name) - for row, row_name in zip(self.data, self.row_names)]) - # Sorting sort_list will sort according to col_index - # If we want reverse sort, then reverse list - if method.lower() == 'desc': - sort_list.reverse() - - # Rebuild data / row names - self.data = [] - self.row_names = [] - for val, row, row_name in sort_list: - self.data.append(row) - self.row_names.append(row_name) - - def groupby(self, colname, *others): - """builds indexes of data - :returns: nested dictionaries pointing to actual rows - """ - groups = {} - colnames = (colname,) + others - col_indexes = [self.col_names.index(col_id) for col_id in colnames] - for row in self.data: - ptr = groups - for col_index in col_indexes[:-1]: - ptr = ptr.setdefault(row[col_index], {}) - ptr = ptr.setdefault(row[col_indexes[-1]], - Table(default_value=self.default_value, - col_names=self.col_names)) - ptr.append_row(tuple(row)) - return groups - - def select(self, colname, value): - grouped = self.groupby(colname) - try: - return grouped[value] - except KeyError: - return [] - - def remove(self, colname, value): - col_index = self.col_names.index(colname) - for row in self.data[:]: - if row[col_index] == value: - self.data.remove(row) - - - ## The 'setter' part ####################################################### - def set_cell(self, row_index, col_index, data): - """sets value of cell 'row_indew', 'col_index' to data - """ - self.data[row_index][col_index] = data - - - def set_cell_by_ids(self, row_id, col_id, data): - """sets value of cell mapped by row_id and col_id to data - Raises a KeyError if row_id or col_id are not found in the table - """ - try: - row_index = self.row_names.index(row_id) - except ValueError: - raise KeyError("Row (%s) not found in table" % (row_id)) - else: - try: - col_index = self.col_names.index(col_id) - self.data[row_index][col_index] = data - except ValueError: - raise KeyError("Column (%s) not found in table" % (col_id)) - - - def set_row(self, row_index, row_data): - """sets the 'row_index' row - pre: - type(row_data) == types.ListType - len(row_data) == len(self.col_names) - """ - self.data[row_index] = row_data - - - def set_row_by_id(self, row_id, row_data): - """sets the 'row_id' column - pre: - type(row_data) == types.ListType - len(row_data) == len(self.row_names) - Raises a KeyError if row_id is not found - """ - try: - row_index = self.row_names.index(row_id) - self.set_row(row_index, row_data) - except ValueError: - raise KeyError('Row (%s) not found in table' % (row_id)) - - - def append_row(self, row_data, row_name=None): - """Appends a row to the table - pre: - type(row_data) == types.ListType - len(row_data) == len(self.col_names) - """ - row_name = row_name or self._next_row_name() - self.row_names.append(row_name) - self.data.append(row_data) - return len(self.data) - 1 - - def insert_row(self, index, row_data, row_name=None): - """Appends row_data before 'index' in the table. To make 'insert' - behave like 'list.insert', inserting in an out of range index will - insert row_data to the end of the list - pre: - type(row_data) == types.ListType - len(row_data) == len(self.col_names) - """ - row_name = row_name or self._next_row_name() - self.row_names.insert(index, row_name) - self.data.insert(index, row_data) - - - def delete_row(self, index): - """Deletes the 'index' row in the table, and returns it. - Raises an IndexError if index is out of range - """ - self.row_names.pop(index) - return self.data.pop(index) - - - def delete_row_by_id(self, row_id): - """Deletes the 'row_id' row in the table. - Raises a KeyError if row_id was not found. - """ - try: - row_index = self.row_names.index(row_id) - self.delete_row(row_index) - except ValueError: - raise KeyError('Row (%s) not found in table' % (row_id)) - - - def set_column(self, col_index, col_data): - """sets the 'col_index' column - pre: - type(col_data) == types.ListType - len(col_data) == len(self.row_names) - """ - - for row_index, cell_data in enumerate(col_data): - self.data[row_index][col_index] = cell_data - - - def set_column_by_id(self, col_id, col_data): - """sets the 'col_id' column - pre: - type(col_data) == types.ListType - len(col_data) == len(self.col_names) - Raises a KeyError if col_id is not found - """ - try: - col_index = self.col_names.index(col_id) - self.set_column(col_index, col_data) - except ValueError: - raise KeyError('Column (%s) not found in table' % (col_id)) - - - def append_column(self, col_data, col_name): - """Appends the 'col_index' column - pre: - type(col_data) == types.ListType - len(col_data) == len(self.row_names) - """ - self.col_names.append(col_name) - for row_index, cell_data in enumerate(col_data): - self.data[row_index].append(cell_data) - - - def insert_column(self, index, col_data, col_name): - """Appends col_data before 'index' in the table. To make 'insert' - behave like 'list.insert', inserting in an out of range index will - insert col_data to the end of the list - pre: - type(col_data) == types.ListType - len(col_data) == len(self.row_names) - """ - self.col_names.insert(index, col_name) - for row_index, cell_data in enumerate(col_data): - self.data[row_index].insert(index, cell_data) - - - def delete_column(self, index): - """Deletes the 'index' column in the table, and returns it. - Raises an IndexError if index is out of range - """ - self.col_names.pop(index) - return [row.pop(index) for row in self.data] - - - def delete_column_by_id(self, col_id): - """Deletes the 'col_id' col in the table. - Raises a KeyError if col_id was not found. - """ - try: - col_index = self.col_names.index(col_id) - self.delete_column(col_index) - except ValueError: - raise KeyError('Column (%s) not found in table' % (col_id)) - - - ## The 'getter' part ####################################################### - - def get_shape(self): - """Returns a tuple which represents the table's shape - """ - return len(self.row_names), len(self.col_names) - shape = property(get_shape) - - def __getitem__(self, indices): - """provided for convenience""" - rows, multirows = None, False - cols, multicols = None, False - if isinstance(indices, tuple): - rows = indices[0] - if len(indices) > 1: - cols = indices[1] - else: - rows = indices - # define row slice - if isinstance(rows, str): - try: - rows = self.row_names.index(rows) - except ValueError: - raise KeyError("Row (%s) not found in table" % (rows)) - if isinstance(rows, int): - rows = slice(rows, rows+1) - multirows = False - else: - rows = slice(None) - multirows = True - # define col slice - if isinstance(cols, str): - try: - cols = self.col_names.index(cols) - except ValueError: - raise KeyError("Column (%s) not found in table" % (cols)) - if isinstance(cols, int): - cols = slice(cols, cols+1) - multicols = False - else: - cols = slice(None) - multicols = True - # get sub-table - tab = Table() - tab.default_value = self.default_value - tab.create_rows(self.row_names[rows]) - tab.create_columns(self.col_names[cols]) - for idx, row in enumerate(self.data[rows]): - tab.set_row(idx, row[cols]) - if multirows : - if multicols: - return tab - else: - return [item[0] for item in tab.data] - else: - if multicols: - return tab.data[0] - else: - return tab.data[0][0] - - def get_cell_by_ids(self, row_id, col_id): - """Returns the element at [row_id][col_id] - """ - try: - row_index = self.row_names.index(row_id) - except ValueError: - raise KeyError("Row (%s) not found in table" % (row_id)) - else: - try: - col_index = self.col_names.index(col_id) - except ValueError: - raise KeyError("Column (%s) not found in table" % (col_id)) - return self.data[row_index][col_index] - - def get_row_by_id(self, row_id): - """Returns the 'row_id' row - """ - try: - row_index = self.row_names.index(row_id) - except ValueError: - raise KeyError("Row (%s) not found in table" % (row_id)) - return self.data[row_index] - - def get_column_by_id(self, col_id, distinct=False): - """Returns the 'col_id' col - """ - try: - col_index = self.col_names.index(col_id) - except ValueError: - raise KeyError("Column (%s) not found in table" % (col_id)) - return self.get_column(col_index, distinct) - - def get_columns(self): - """Returns all the columns in the table - """ - return [self[:, index] for index in range(len(self.col_names))] - - def get_column(self, col_index, distinct=False): - """get a column by index""" - col = [row[col_index] for row in self.data] - if distinct: - col = list(set(col)) - return col - - def apply_stylesheet(self, stylesheet): - """Applies the stylesheet to this table - """ - for instruction in stylesheet.instructions: - eval(instruction) - - - def transpose(self): - """Keeps the self object intact, and returns the transposed (rotated) - table. - """ - transposed = Table() - transposed.create_rows(self.col_names) - transposed.create_columns(self.row_names) - for col_index, column in enumerate(self.get_columns()): - transposed.set_row(col_index, column) - return transposed - - - def pprint(self): - """returns a string representing the table in a pretty - printed 'text' format. - """ - # The maximum row name (to know the start_index of the first col) - max_row_name = 0 - for row_name in self.row_names: - if len(row_name) > max_row_name: - max_row_name = len(row_name) - col_start = max_row_name + 5 - - lines = [] - # Build the 'first' line <=> the col_names one - # The first cell <=> an empty one - col_names_line = [' '*col_start] - for col_name in self.col_names: - col_names_line.append(col_name.encode('iso-8859-1') + ' '*5) - lines.append('|' + '|'.join(col_names_line) + '|') - max_line_length = len(lines[0]) - - # Build the table - for row_index, row in enumerate(self.data): - line = [] - # First, build the row_name's cell - row_name = self.row_names[row_index].encode('iso-8859-1') - line.append(row_name + ' '*(col_start-len(row_name))) - - # Then, build all the table's cell for this line. - for col_index, cell in enumerate(row): - col_name_length = len(self.col_names[col_index]) + 5 - data = str(cell) - line.append(data + ' '*(col_name_length - len(data))) - lines.append('|' + '|'.join(line) + '|') - if len(lines[-1]) > max_line_length: - max_line_length = len(lines[-1]) - - # Wrap the table with '-' to make a frame - lines.insert(0, '-'*max_line_length) - lines.append('-'*max_line_length) - return '\n'.join(lines) - - - def __repr__(self): - return repr(self.data) - - def as_text(self): - data = [] - # We must convert cells into strings before joining them - for row in self.data: - data.append([str(cell) for cell in row]) - lines = ['\t'.join(row) for row in data] - return '\n'.join(lines) - - - -class TableStyle: - """Defines a table's style - """ - - def __init__(self, table): - - self._table = table - self.size = dict([(col_name, '1*') for col_name in table.col_names]) - # __row_column__ is a special key to define the first column which - # actually has no name (<=> left most column <=> row names column) - self.size['__row_column__'] = '1*' - self.alignment = dict([(col_name, 'right') - for col_name in table.col_names]) - self.alignment['__row_column__'] = 'right' - - # We shouldn't have to create an entry for - # the 1st col (the row_column one) - self.units = dict([(col_name, '') for col_name in table.col_names]) - self.units['__row_column__'] = '' - - # XXX FIXME : params order should be reversed for all set() methods - def set_size(self, value, col_id): - """sets the size of the specified col_id to value - """ - self.size[col_id] = value - - def set_size_by_index(self, value, col_index): - """Allows to set the size according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - self.size[col_id] = value - - - def set_alignment(self, value, col_id): - """sets the alignment of the specified col_id to value - """ - self.alignment[col_id] = value - - - def set_alignment_by_index(self, value, col_index): - """Allows to set the alignment according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - self.alignment[col_id] = value - - - def set_unit(self, value, col_id): - """sets the unit of the specified col_id to value - """ - self.units[col_id] = value - - - def set_unit_by_index(self, value, col_index): - """Allows to set the unit according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - (Note that in the 'unit' case, you shouldn't have to set a unit - for the 1st column (the __row__column__ one)) - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - self.units[col_id] = value - - - def get_size(self, col_id): - """Returns the size of the specified col_id - """ - return self.size[col_id] - - - def get_size_by_index(self, col_index): - """Allows to get the size according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - return self.size[col_id] - - - def get_alignment(self, col_id): - """Returns the alignment of the specified col_id - """ - return self.alignment[col_id] - - - def get_alignment_by_index(self, col_index): - """Allors to get the alignment according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - return self.alignment[col_id] - - - def get_unit(self, col_id): - """Returns the unit of the specified col_id - """ - return self.units[col_id] - - - def get_unit_by_index(self, col_index): - """Allors to get the unit according to the column index rather than - using the column's id. - BE CAREFUL : the '0' column is the '__row_column__' one ! - """ - if col_index == 0: - col_id = '__row_column__' - else: - col_id = self._table.col_names[col_index-1] - - return self.units[col_id] - - -import re -CELL_PROG = re.compile("([0-9]+)_([0-9]+)") - -class TableStyleSheet: - """A simple Table stylesheet - Rules are expressions where cells are defined by the row_index - and col_index separated by an underscore ('_'). - For example, suppose you want to say that the (2,5) cell must be - the sum of its two preceding cells in the row, you would create - the following rule : - 2_5 = 2_3 + 2_4 - You can also use all the math.* operations you want. For example: - 2_5 = sqrt(2_3**2 + 2_4**2) - """ - - def __init__(self, rules = None): - rules = rules or [] - self.rules = [] - self.instructions = [] - for rule in rules: - self.add_rule(rule) - - - def add_rule(self, rule): - """Adds a rule to the stylesheet rules - """ - try: - source_code = ['from math import *'] - source_code.append(CELL_PROG.sub(r'self.data[\1][\2]', rule)) - self.instructions.append(compile('\n'.join(source_code), - 'table.py', 'exec')) - self.rules.append(rule) - except SyntaxError: - print "Bad Stylesheet Rule : %s [skipped]"%rule - - - def add_rowsum_rule(self, dest_cell, row_index, start_col, end_col): - """Creates and adds a rule to sum over the row at row_index from - start_col to end_col. - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_col >= 0 - end_col > start_col - """ - cell_list = ['%d_%d'%(row_index, index) for index in range(start_col, - end_col + 1)] - rule = '%d_%d=' % dest_cell + '+'.join(cell_list) - self.add_rule(rule) - - - def add_rowavg_rule(self, dest_cell, row_index, start_col, end_col): - """Creates and adds a rule to make the row average (from start_col - to end_col) - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_col >= 0 - end_col > start_col - """ - cell_list = ['%d_%d'%(row_index, index) for index in range(start_col, - end_col + 1)] - num = (end_col - start_col + 1) - rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num - self.add_rule(rule) - - - def add_colsum_rule(self, dest_cell, col_index, start_row, end_row): - """Creates and adds a rule to sum over the col at col_index from - start_row to end_row. - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_row >= 0 - end_row > start_row - """ - cell_list = ['%d_%d'%(index, col_index) for index in range(start_row, - end_row + 1)] - rule = '%d_%d=' % dest_cell + '+'.join(cell_list) - self.add_rule(rule) - - - def add_colavg_rule(self, dest_cell, col_index, start_row, end_row): - """Creates and adds a rule to make the col average (from start_row - to end_row) - dest_cell is a tuple of two elements (x,y) of the destination cell - No check is done for indexes ranges. - pre: - start_row >= 0 - end_row > start_row - """ - cell_list = ['%d_%d'%(index, col_index) for index in range(start_row, - end_row + 1)] - num = (end_row - start_row + 1) - rule = '%d_%d=' % dest_cell + '('+'+'.join(cell_list)+')/%f'%num - self.add_rule(rule) - - - -class TableCellRenderer: - """Defines a simple text renderer - """ - - def __init__(self, **properties): - """keywords should be properties with an associated boolean as value. - For example : - renderer = TableCellRenderer(units = True, alignment = False) - An unspecified property will have a 'False' value by default. - Possible properties are : - alignment, unit - """ - self.properties = properties - - - def render_cell(self, cell_coord, table, table_style): - """Renders the cell at 'cell_coord' in the table, using table_style - """ - row_index, col_index = cell_coord - cell_value = table.data[row_index][col_index] - final_content = self._make_cell_content(cell_value, - table_style, col_index +1) - return self._render_cell_content(final_content, - table_style, col_index + 1) - - - def render_row_cell(self, row_name, table, table_style): - """Renders the cell for 'row_id' row - """ - cell_value = row_name.encode('iso-8859-1') - return self._render_cell_content(cell_value, table_style, 0) - - - def render_col_cell(self, col_name, table, table_style): - """Renders the cell for 'col_id' row - """ - cell_value = col_name.encode('iso-8859-1') - col_index = table.col_names.index(col_name) - return self._render_cell_content(cell_value, table_style, col_index +1) - - - - def _render_cell_content(self, content, table_style, col_index): - """Makes the appropriate rendering for this cell content. - Rendering properties will be searched using the - *table_style.get_xxx_by_index(col_index)' methods - - **This method should be overridden in the derived renderer classes.** - """ - return content - - - def _make_cell_content(self, cell_content, table_style, col_index): - """Makes the cell content (adds decoration data, like units for - example) - """ - final_content = cell_content - if 'skip_zero' in self.properties: - replacement_char = self.properties['skip_zero'] - else: - replacement_char = 0 - if replacement_char and final_content == 0: - return replacement_char - - try: - units_on = self.properties['units'] - if units_on: - final_content = self._add_unit( - cell_content, table_style, col_index) - except KeyError: - pass - - return final_content - - - def _add_unit(self, cell_content, table_style, col_index): - """Adds unit to the cell_content if needed - """ - unit = table_style.get_unit_by_index(col_index) - return str(cell_content) + " " + unit - - - -class DocbookRenderer(TableCellRenderer): - """Defines how to render a cell for a docboook table - """ - - def define_col_header(self, col_index, table_style): - """Computes the colspec element according to the style - """ - size = table_style.get_size_by_index(col_index) - return '\n' % \ - (col_index, size) - - - def _render_cell_content(self, cell_content, table_style, col_index): - """Makes the appropriate rendering for this cell content. - Rendering properties will be searched using the - table_style.get_xxx_by_index(col_index)' methods. - """ - try: - align_on = self.properties['alignment'] - alignment = table_style.get_alignment_by_index(col_index) - if align_on: - return "%s\n" % \ - (alignment, cell_content) - except KeyError: - # KeyError <=> Default alignment - return "%s\n" % cell_content - - -class TableWriter: - """A class to write tables - """ - - def __init__(self, stream, table, style, **properties): - self._stream = stream - self.style = style or TableStyle(table) - self._table = table - self.properties = properties - self.renderer = None - - - def set_style(self, style): - """sets the table's associated style - """ - self.style = style - - - def set_renderer(self, renderer): - """sets the way to render cell - """ - self.renderer = renderer - - - def update_properties(self, **properties): - """Updates writer's properties (for cell rendering) - """ - self.properties.update(properties) - - - def write_table(self, title = ""): - """Writes the table - """ - raise NotImplementedError("write_table must be implemented !") - - - -class DocbookTableWriter(TableWriter): - """Defines an implementation of TableWriter to write a table in Docbook - """ - - def _write_headers(self): - """Writes col headers - """ - # Define col_headers (colstpec elements) - for col_index in range(len(self._table.col_names)+1): - self._stream.write(self.renderer.define_col_header(col_index, - self.style)) - - self._stream.write("\n\n") - # XXX FIXME : write an empty entry <=> the first (__row_column) column - self._stream.write('\n') - for col_name in self._table.col_names: - self._stream.write(self.renderer.render_col_cell( - col_name, self._table, - self.style)) - - self._stream.write("\n\n") - - - def _write_body(self): - """Writes the table body - """ - self._stream.write('\n') - - for row_index, row in enumerate(self._table.data): - self._stream.write('\n') - row_name = self._table.row_names[row_index] - # Write the first entry (row_name) - self._stream.write(self.renderer.render_row_cell(row_name, - self._table, - self.style)) - - for col_index, cell in enumerate(row): - self._stream.write(self.renderer.render_cell( - (row_index, col_index), - self._table, self.style)) - - self._stream.write('\n') - - self._stream.write('\n') - - - def write_table(self, title = ""): - """Writes the table - """ - self._stream.write('\nCodestin Search App\n'%(title)) - self._stream.write( - '\n'% - (len(self._table.col_names)+1)) - self._write_headers() - self._write_body() - - self._stream.write('\n
\n') - - diff --git a/pylibs/logilab/common/tasksqueue.py b/pylibs/logilab/common/tasksqueue.py deleted file mode 100644 index e95a77e1..00000000 --- a/pylibs/logilab/common/tasksqueue.py +++ /dev/null @@ -1,98 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Prioritized tasks queue""" - -__docformat__ = "restructuredtext en" - -from bisect import insort_left -from Queue import Queue - -LOW = 0 -MEDIUM = 10 -HIGH = 100 - -PRIORITY = { - 'LOW': LOW, - 'MEDIUM': MEDIUM, - 'HIGH': HIGH, - } -REVERSE_PRIORITY = dict((values, key) for key, values in PRIORITY.iteritems()) - - - -class PrioritizedTasksQueue(Queue): - - def _init(self, maxsize): - """Initialize the queue representation""" - self.maxsize = maxsize - # ordered list of task, from the lowest to the highest priority - self.queue = [] - - def _put(self, item): - """Put a new item in the queue""" - for i, task in enumerate(self.queue): - # equivalent task - if task == item: - # if new task has a higher priority, remove the one already - # queued so the new priority will be considered - if task < item: - item.merge(task) - del self.queue[i] - break - # else keep it so current order is kept - task.merge(item) - return - insort_left(self.queue, item) - - def _get(self): - """Get an item from the queue""" - return self.queue.pop() - - def __iter__(self): - return iter(self.queue) - - def remove(self, tid): - """remove a specific task from the queue""" - # XXX acquire lock - for i, task in enumerate(self): - if task.id == tid: - self.queue.pop(i) - return - raise ValueError('not task of id %s in queue' % tid) - -class Task(object): - def __init__(self, tid, priority=LOW): - # task id - self.id = tid - # task priority - self.priority = priority - - def __repr__(self): - return '' % (self.id, id(self)) - - def __cmp__(self, other): - return cmp(self.priority, other.priority) - - def __lt__(self, other): - return self.priority < other.priority - - def __eq__(self, other): - return self.id == other.id - - def merge(self, other): - pass diff --git a/pylibs/logilab/common/testlib.py b/pylibs/logilab/common/testlib.py deleted file mode 100644 index da49387a..00000000 --- a/pylibs/logilab/common/testlib.py +++ /dev/null @@ -1,1382 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Run tests. - -This will find all modules whose name match a given prefix in the test -directory, and run them. Various command line options provide -additional facilities. - -Command line options: - - -v verbose -- run tests in verbose mode with output to stdout - -q quiet -- don't print anything except if a test fails - -t testdir -- directory where the tests will be found - -x exclude -- add a test to exclude - -p profile -- profiled execution - -d dbc -- enable design-by-contract - -m match -- only run test matching the tag pattern which follow - -If no non-option arguments are present, prefixes used are 'test', -'regrtest', 'smoketest' and 'unittest'. - -""" -__docformat__ = "restructuredtext en" -# modified copy of some functions from test/regrtest.py from PyXml -# disable camel case warning -# pylint: disable=C0103 - -import sys -import os, os.path as osp -import re -import traceback -import inspect -import difflib -import tempfile -import math -import warnings -from shutil import rmtree -from operator import itemgetter -from ConfigParser import ConfigParser -from logilab.common.deprecation import deprecated -from itertools import dropwhile - -import unittest as unittest_legacy -if not getattr(unittest_legacy, "__package__", None): - try: - import unittest2 as unittest - from unittest2 import SkipTest - except ImportError: - sys.exit("You have to install python-unittest2 to use this module") -else: - import unittest - from unittest import SkipTest - -try: - from functools import wraps -except ImportError: - def wraps(wrapped): - def proxy(callable): - callable.__name__ = wrapped.__name__ - return callable - return proxy -try: - from test import test_support -except ImportError: - # not always available - class TestSupport: - def unload(self, test): - pass - test_support = TestSupport() - -# pylint: disable=W0622 -from logilab.common.compat import any, InheritableSet, callable -# pylint: enable=W0622 -from logilab.common.debugger import Debugger, colorize_source -from logilab.common.decorators import cached, classproperty -from logilab.common import textutils - - -__all__ = ['main', 'unittest_main', 'find_tests', 'run_test', 'spawn'] - -DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest', - 'func', 'validation') - - -if sys.version_info >= (2, 6): - # FIXME : this does not work as expected / breaks tests on testlib - # however testlib does not work on py3k for many reasons ... - from inspect import CO_GENERATOR -else: - from compiler.consts import CO_GENERATOR - -if sys.version_info >= (3, 0): - def is_generator(function): - flags = function.__code__.co_flags - return flags & CO_GENERATOR - -else: - def is_generator(function): - flags = function.func_code.co_flags - return flags & CO_GENERATOR - -# used by unittest to count the number of relevant levels in the traceback -__unittest = 1 - - -def with_tempdir(callable): - """A decorator ensuring no temporary file left when the function return - Work only for temporary file create with the tempfile module""" - @wraps(callable) - def proxy(*args, **kargs): - - old_tmpdir = tempfile.gettempdir() - new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-") - tempfile.tempdir = new_tmpdir - try: - return callable(*args, **kargs) - finally: - try: - rmtree(new_tmpdir, ignore_errors=True) - finally: - tempfile.tempdir = old_tmpdir - return proxy - -def in_tempdir(callable): - """A decorator moving the enclosed function inside the tempfile.tempfdir - """ - @wraps(callable) - def proxy(*args, **kargs): - - old_cwd = os.getcwd() - os.chdir(tempfile.tempdir) - try: - return callable(*args, **kargs) - finally: - os.chdir(old_cwd) - return proxy - -def within_tempdir(callable): - """A decorator run the enclosed function inside a tmpdir removed after execution - """ - proxy = with_tempdir(in_tempdir(callable)) - proxy.__name__ = callable.__name__ - return proxy - -def find_tests(testdir, - prefixes=DEFAULT_PREFIXES, suffix=".py", - excludes=(), - remove_suffix=True): - """ - Return a list of all applicable test modules. - """ - tests = [] - for name in os.listdir(testdir): - if not suffix or name.endswith(suffix): - for prefix in prefixes: - if name.startswith(prefix): - if remove_suffix and name.endswith(suffix): - name = name[:-len(suffix)] - if name not in excludes: - tests.append(name) - tests.sort() - return tests - - -## PostMortem Debug facilities ##### -def start_interactive_mode(result): - """starts an interactive shell so that the user can inspect errors - """ - debuggers = result.debuggers - descrs = result.error_descrs + result.fail_descrs - if len(debuggers) == 1: - # don't ask for test name if there's only one failure - debuggers[0].start() - else: - while True: - testindex = 0 - print "Choose a test to debug:" - # order debuggers in the same way than errors were printed - print "\n".join(['\t%s : %s' % (i, descr) for i, (_, descr) - in enumerate(descrs)]) - print "Type 'exit' (or ^D) to quit" - print - try: - todebug = raw_input('Enter a test name: ') - if todebug.strip().lower() == 'exit': - print - break - else: - try: - testindex = int(todebug) - debugger = debuggers[descrs[testindex][0]] - except (ValueError, IndexError): - print "ERROR: invalid test number %r" % (todebug, ) - else: - debugger.start() - except (EOFError, KeyboardInterrupt): - print - break - - -# test utils ################################################################## - -class SkipAwareTestResult(unittest._TextTestResult): - - def __init__(self, stream, descriptions, verbosity, - exitfirst=False, pdbmode=False, cvg=None, colorize=False): - super(SkipAwareTestResult, self).__init__(stream, - descriptions, verbosity) - self.skipped = [] - self.debuggers = [] - self.fail_descrs = [] - self.error_descrs = [] - self.exitfirst = exitfirst - self.pdbmode = pdbmode - self.cvg = cvg - self.colorize = colorize - self.pdbclass = Debugger - self.verbose = verbosity > 1 - - def descrs_for(self, flavour): - return getattr(self, '%s_descrs' % flavour.lower()) - - def _create_pdb(self, test_descr, flavour): - self.descrs_for(flavour).append( (len(self.debuggers), test_descr) ) - if self.pdbmode: - self.debuggers.append(self.pdbclass(sys.exc_info()[2])) - - def _iter_valid_frames(self, frames): - """only consider non-testlib frames when formatting traceback""" - lgc_testlib = osp.abspath(__file__) - std_testlib = osp.abspath(unittest.__file__) - invalid = lambda fi: osp.abspath(fi[1]) in (lgc_testlib, std_testlib) - for frameinfo in dropwhile(invalid, frames): - yield frameinfo - - def _exc_info_to_string(self, err, test): - """Converts a sys.exc_info()-style tuple of values into a string. - - This method is overridden here because we want to colorize - lines if --color is passed, and display local variables if - --verbose is passed - """ - exctype, exc, tb = err - output = ['Traceback (most recent call last)'] - frames = inspect.getinnerframes(tb) - colorize = self.colorize - frames = enumerate(self._iter_valid_frames(frames)) - for index, (frame, filename, lineno, funcname, ctx, ctxindex) in frames: - filename = osp.abspath(filename) - if ctx is None: # pyc files or C extensions for instance - source = '' - else: - source = ''.join(ctx) - if colorize: - filename = textutils.colorize_ansi(filename, 'magenta') - source = colorize_source(source) - output.append(' File "%s", line %s, in %s' % (filename, lineno, funcname)) - output.append(' %s' % source.strip()) - if self.verbose: - output.append('%r == %r' % (dir(frame), test.__module__)) - output.append('') - output.append(' ' + ' local variables '.center(66, '-')) - for varname, value in sorted(frame.f_locals.items()): - output.append(' %s: %r' % (varname, value)) - if varname == 'self': # special handy processing for self - for varname, value in sorted(vars(value).items()): - output.append(' self.%s: %r' % (varname, value)) - output.append(' ' + '-' * 66) - output.append('') - output.append(''.join(traceback.format_exception_only(exctype, exc))) - return '\n'.join(output) - - def addError(self, test, err): - """err -> (exc_type, exc, tcbk)""" - exc_type, exc, _ = err - if isinstance(exc, SkipTest): - assert exc_type == SkipTest - self.addSkip(test, exc) - else: - if self.exitfirst: - self.shouldStop = True - descr = self.getDescription(test) - super(SkipAwareTestResult, self).addError(test, err) - self._create_pdb(descr, 'error') - - def addFailure(self, test, err): - if self.exitfirst: - self.shouldStop = True - descr = self.getDescription(test) - super(SkipAwareTestResult, self).addFailure(test, err) - self._create_pdb(descr, 'fail') - - def addSkip(self, test, reason): - self.skipped.append((test, reason)) - if self.showAll: - self.stream.writeln("SKIPPED") - elif self.dots: - self.stream.write('S') - - def printErrors(self): - super(SkipAwareTestResult, self).printErrors() - self.printSkippedList() - - def printSkippedList(self): - # format (test, err) compatible with unittest2 - for test, err in self.skipped: - descr = self.getDescription(test) - self.stream.writeln(self.separator1) - self.stream.writeln("%s: %s" % ('SKIPPED', descr)) - self.stream.writeln("\t%s" % err) - - def printErrorList(self, flavour, errors): - for (_, descr), (test, err) in zip(self.descrs_for(flavour), errors): - self.stream.writeln(self.separator1) - self.stream.writeln("%s: %s" % (flavour, descr)) - self.stream.writeln(self.separator2) - self.stream.writeln(err) - self.stream.writeln('no stdout'.center(len(self.separator2))) - self.stream.writeln('no stderr'.center(len(self.separator2))) - -# Add deprecation warnings about new api used by module level fixtures in unittest2 -# http://www.voidspace.org.uk/python/articles/unittest2.shtml#setupmodule-and-teardownmodule -class _DebugResult(object): # simplify import statement among unittest flavors.. - "Used by the TestSuite to hold previous class when running in debug." - _previousTestClass = None - _moduleSetUpFailed = False - shouldStop = False - -from logilab.common.decorators import monkeypatch -@monkeypatch(unittest.TestSuite) -def _handleModuleTearDown(self, result): - previousModule = self._get_previous_module(result) - if previousModule is None: - return - if result._moduleSetUpFailed: - return - try: - module = sys.modules[previousModule] - except KeyError: - return - # add testlib specific deprecation warning and switch to new api - if hasattr(module, 'teardown_module'): - warnings.warn('Please rename teardown_module() to tearDownModule() instead.', - DeprecationWarning) - setattr(module, 'tearDownModule', module.teardown_module) - # end of monkey-patching - tearDownModule = getattr(module, 'tearDownModule', None) - if tearDownModule is not None: - try: - tearDownModule() - except Exception, e: - if isinstance(result, _DebugResult): - raise - errorName = 'tearDownModule (%s)' % previousModule - self._addClassOrModuleLevelException(result, e, errorName) - -@monkeypatch(unittest.TestSuite) -def _handleModuleFixture(self, test, result): - previousModule = self._get_previous_module(result) - currentModule = test.__class__.__module__ - if currentModule == previousModule: - return - self._handleModuleTearDown(result) - result._moduleSetUpFailed = False - try: - module = sys.modules[currentModule] - except KeyError: - return - # add testlib specific deprecation warning and switch to new api - if hasattr(module, 'setup_module'): - warnings.warn('Please rename setup_module() to setUpModule() instead.', - DeprecationWarning) - setattr(module, 'setUpModule', module.setup_module) - # end of monkey-patching - setUpModule = getattr(module, 'setUpModule', None) - if setUpModule is not None: - try: - setUpModule() - except Exception, e: - if isinstance(result, _DebugResult): - raise - result._moduleSetUpFailed = True - errorName = 'setUpModule (%s)' % currentModule - self._addClassOrModuleLevelException(result, e, errorName) - -# backward compatibility: TestSuite might be imported from lgc.testlib -TestSuite = unittest.TestSuite - -class keywords(dict): - """Keyword args (**kwargs) support for generative tests.""" - -class starargs(tuple): - """Variable arguments (*args) for generative tests.""" - def __new__(cls, *args): - return tuple.__new__(cls, args) - -unittest_main = unittest.main - - -class InnerTestSkipped(SkipTest): - """raised when a test is skipped""" - pass - -def parse_generative_args(params): - args = [] - varargs = () - kwargs = {} - flags = 0 # 2 <=> starargs, 4 <=> kwargs - for param in params: - if isinstance(param, starargs): - varargs = param - if flags: - raise TypeError('found starargs after keywords !') - flags |= 2 - args += list(varargs) - elif isinstance(param, keywords): - kwargs = param - if flags & 4: - raise TypeError('got multiple keywords parameters') - flags |= 4 - elif flags & 2 or flags & 4: - raise TypeError('found parameters after kwargs or args') - else: - args.append(param) - - return args, kwargs - - -class InnerTest(tuple): - def __new__(cls, name, *data): - instance = tuple.__new__(cls, data) - instance.name = name - return instance - -class Tags(InheritableSet): # 2.4 compat - """A set of tag able validate an expression""" - - def __init__(self, *tags, **kwargs): - self.inherit = kwargs.pop('inherit', True) - if kwargs: - raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys()) - - if len(tags) == 1 and not isinstance(tags[0], basestring): - tags = tags[0] - super(Tags, self).__init__(tags, **kwargs) - - def __getitem__(self, key): - return key in self - - def match(self, exp): - return eval(exp, {}, self) - - -# duplicate definition from unittest2 of the _deprecate decorator -def _deprecate(original_func): - def deprecated_func(*args, **kwargs): - warnings.warn( - ('Please use %s instead.' % original_func.__name__), - DeprecationWarning, 2) - return original_func(*args, **kwargs) - return deprecated_func - -class TestCase(unittest.TestCase): - """A unittest.TestCase extension with some additional methods.""" - maxDiff = None - pdbclass = Debugger - tags = Tags() - - def __init__(self, methodName='runTest'): - super(TestCase, self).__init__(methodName) - # internal API changed in python2.4 and needed by DocTestCase - if sys.version_info >= (2, 4): - self.__exc_info = sys.exc_info - self.__testMethodName = self._testMethodName - else: - # let's give easier access to _testMethodName to every subclasses - if hasattr(self, "__testMethodName"): - self._testMethodName = self.__testMethodName - self._current_test_descr = None - self._options_ = None - - @classproperty - @cached - def datadir(cls): # pylint: disable=E0213 - """helper attribute holding the standard test's data directory - - NOTE: this is a logilab's standard - """ - mod = __import__(cls.__module__) - return osp.join(osp.dirname(osp.abspath(mod.__file__)), 'data') - # cache it (use a class method to cache on class since TestCase is - # instantiated for each test run) - - @classmethod - def datapath(cls, *fname): - """joins the object's datadir and `fname`""" - return osp.join(cls.datadir, *fname) - - def set_description(self, descr): - """sets the current test's description. - This can be useful for generative tests because it allows to specify - a description per yield - """ - self._current_test_descr = descr - - # override default's unittest.py feature - def shortDescription(self): - """override default unittest shortDescription to handle correctly - generative tests - """ - if self._current_test_descr is not None: - return self._current_test_descr - return super(TestCase, self).shortDescription() - - def quiet_run(self, result, func, *args, **kwargs): - try: - func(*args, **kwargs) - except (KeyboardInterrupt, SystemExit): - raise - except: - result.addError(self, self.__exc_info()) - return False - return True - - def _get_test_method(self): - """return the test method""" - return getattr(self, self._testMethodName) - - def optval(self, option, default=None): - """return the option value or default if the option is not define""" - return getattr(self._options_, option, default) - - def __call__(self, result=None, runcondition=None, options=None): - """rewrite TestCase.__call__ to support generative tests - This is mostly a copy/paste from unittest.py (i.e same - variable names, same logic, except for the generative tests part) - """ - from logilab.common.pytest import FILE_RESTART - if result is None: - result = self.defaultTestResult() - result.pdbclass = self.pdbclass - self._options_ = options - # if result.cvg: - # result.cvg.start() - testMethod = self._get_test_method() - if runcondition and not runcondition(testMethod): - return # test is skipped - result.startTest(self) - try: - if not self.quiet_run(result, self.setUp): - return - generative = is_generator(testMethod.im_func) - # generative tests - if generative: - self._proceed_generative(result, testMethod, - runcondition) - else: - status = self._proceed(result, testMethod) - success = (status == 0) - if not self.quiet_run(result, self.tearDown): - return - if not generative and success: - if hasattr(options, "exitfirst") and options.exitfirst: - # add this test to restart file - try: - restartfile = open(FILE_RESTART, 'a') - try: - descr = '.'.join((self.__class__.__module__, - self.__class__.__name__, - self._testMethodName)) - restartfile.write(descr+os.linesep) - finally: - restartfile.close() - except Exception, ex: - print >> sys.__stderr__, "Error while saving \ -succeeded test into", osp.join(os.getcwd(), FILE_RESTART) - raise ex - result.addSuccess(self) - finally: - # if result.cvg: - # result.cvg.stop() - result.stopTest(self) - - def _proceed_generative(self, result, testfunc, runcondition=None): - # cancel startTest()'s increment - result.testsRun -= 1 - success = True - try: - for params in testfunc(): - if runcondition and not runcondition(testfunc, - skipgenerator=False): - if not (isinstance(params, InnerTest) - and runcondition(params)): - continue - if not isinstance(params, (tuple, list)): - params = (params, ) - func = params[0] - args, kwargs = parse_generative_args(params[1:]) - # increment test counter manually - result.testsRun += 1 - status = self._proceed(result, func, args, kwargs) - if status == 0: - result.addSuccess(self) - success = True - else: - success = False - # XXX Don't stop anymore if an error occured - #if status == 2: - # result.shouldStop = True - if result.shouldStop: # either on error or on exitfirst + error - break - except: - # if an error occurs between two yield - result.addError(self, self.__exc_info()) - success = False - return success - - def _proceed(self, result, testfunc, args=(), kwargs=None): - """proceed the actual test - returns 0 on success, 1 on failure, 2 on error - - Note: addSuccess can't be called here because we have to wait - for tearDown to be successfully executed to declare the test as - successful - """ - kwargs = kwargs or {} - try: - testfunc(*args, **kwargs) - except self.failureException: - result.addFailure(self, self.__exc_info()) - return 1 - except KeyboardInterrupt: - raise - except InnerTestSkipped, e: - result.addSkip(self, e) - return 1 - except SkipTest, e: - result.addSkip(self, e) - return 0 - except: - result.addError(self, self.__exc_info()) - return 2 - return 0 - - def defaultTestResult(self): - """return a new instance of the defaultTestResult""" - return SkipAwareTestResult() - - skip = _deprecate(unittest.TestCase.skipTest) - assertEquals = _deprecate(unittest.TestCase.assertEqual) - assertNotEquals = _deprecate(unittest.TestCase.assertNotEqual) - assertAlmostEquals = _deprecate(unittest.TestCase.assertAlmostEqual) - assertNotAlmostEquals = _deprecate(unittest.TestCase.assertNotAlmostEqual) - - def innerSkip(self, msg=None): - """mark a generative test as skipped for the reason""" - msg = msg or 'test was skipped' - raise InnerTestSkipped(msg) - - @deprecated('Please use assertDictEqual instead.') - def assertDictEquals(self, dict1, dict2, msg=None, context=None): - """compares two dicts - - If the two dict differ, the first difference is shown in the error - message - :param dict1: a Python Dictionary - :param dict2: a Python Dictionary - :param msg: custom message (String) in case of failure - """ - dict1 = dict(dict1) - msgs = [] - for key, value in dict2.items(): - try: - if dict1[key] != value: - msgs.append('%r != %r for key %r' % (dict1[key], value, - key)) - del dict1[key] - except KeyError: - msgs.append('missing %r key' % key) - if dict1: - msgs.append('dict2 is lacking %r' % dict1) - if msg: - self.failureException(msg) - elif msgs: - if context is not None: - base = '%s\n' % context - else: - base = '' - self.fail(base + '\n'.join(msgs)) - - @deprecated('Please use assertItemsEqual instead.') - def assertUnorderedIterableEquals(self, got, expected, msg=None): - """compares two iterable and shows difference between both - - :param got: the unordered Iterable that we found - :param expected: the expected unordered Iterable - :param msg: custom message (String) in case of failure - """ - got, expected = list(got), list(expected) - self.assertSetEqual(set(got), set(expected), msg) - if len(got) != len(expected): - if msg is None: - msg = ['Iterable have the same elements but not the same number', - '\t\ti\t'] - got_count = {} - expected_count = {} - for element in got: - got_count[element] = got_count.get(element, 0) + 1 - for element in expected: - expected_count[element] = expected_count.get(element, 0) + 1 - # we know that got_count.key() == expected_count.key() - # because of assertSetEqual - for element, count in got_count.iteritems(): - other_count = expected_count[element] - if other_count != count: - msg.append('\t%s\t%s\t%s' % (element, other_count, count)) - - self.fail(msg) - - assertUnorderedIterableEqual = assertUnorderedIterableEquals - assertUnordIterEquals = assertUnordIterEqual = assertUnorderedIterableEqual - - @deprecated('Please use assertSetEqual instead.') - def assertSetEquals(self,got,expected, msg=None): - """compares two sets and shows difference between both - - Don't use it for iterables other than sets. - - :param got: the Set that we found - :param expected: the second Set to be compared to the first one - :param msg: custom message (String) in case of failure - """ - - if not(isinstance(got, set) and isinstance(expected, set)): - warnings.warn("the assertSetEquals function if now intended for set only."\ - "use assertUnorderedIterableEquals instead.", - DeprecationWarning, 2) - return self.assertUnorderedIterableEquals(got, expected, msg) - - items={} - items['missing'] = expected - got - items['unexpected'] = got - expected - if any(items.itervalues()): - if msg is None: - msg = '\n'.join('%s:\n\t%s' % (key, "\n\t".join(str(value) for value in values)) - for key, values in items.iteritems() if values) - self.fail(msg) - - @deprecated('Please use assertListEqual instead.') - def assertListEquals(self, list_1, list_2, msg=None): - """compares two lists - - If the two list differ, the first difference is shown in the error - message - - :param list_1: a Python List - :param list_2: a second Python List - :param msg: custom message (String) in case of failure - """ - _l1 = list_1[:] - for i, value in enumerate(list_2): - try: - if _l1[0] != value: - from pprint import pprint - pprint(list_1) - pprint(list_2) - self.fail('%r != %r for index %d' % (_l1[0], value, i)) - del _l1[0] - except IndexError: - if msg is None: - msg = 'list_1 has only %d elements, not %s '\ - '(at least %r missing)'% (i, len(list_2), value) - self.fail(msg) - if _l1: - if msg is None: - msg = 'list_2 is lacking %r' % _l1 - self.fail(msg) - - @deprecated('Non-standard. Please use assertMultiLineEqual instead.') - def assertLinesEquals(self, string1, string2, msg=None, striplines=False): - """compare two strings and assert that the text lines of the strings - are equal. - - :param string1: a String - :param string2: a String - :param msg: custom message (String) in case of failure - :param striplines: Boolean to trigger line stripping before comparing - """ - lines1 = string1.splitlines() - lines2 = string2.splitlines() - if striplines: - lines1 = [l.strip() for l in lines1] - lines2 = [l.strip() for l in lines2] - self.assertListEqual(lines1, lines2, msg) - assertLineEqual = assertLinesEquals - - @deprecated('Non-standard: please copy test method to your TestCase class') - def assertXMLWellFormed(self, stream, msg=None, context=2): - """asserts the XML stream is well-formed (no DTD conformance check) - - :param context: number of context lines in standard message - (show all data if negative). - Only available with element tree - """ - try: - from xml.etree.ElementTree import parse - self._assertETXMLWellFormed(stream, parse, msg) - except ImportError: - from xml.sax import make_parser, SAXParseException - parser = make_parser() - try: - parser.parse(stream) - except SAXParseException, ex: - if msg is None: - stream.seek(0) - for _ in xrange(ex.getLineNumber()): - line = stream.readline() - pointer = ('' * (ex.getLineNumber() - 1)) + '^' - msg = 'XML stream not well formed: %s\n%s%s' % (ex, line, pointer) - self.fail(msg) - - @deprecated('Non-standard: please copy test method to your TestCase class') - def assertXMLStringWellFormed(self, xml_string, msg=None, context=2): - """asserts the XML string is well-formed (no DTD conformance check) - - :param context: number of context lines in standard message - (show all data if negative). - Only available with element tree - """ - try: - from xml.etree.ElementTree import fromstring - except ImportError: - from elementtree.ElementTree import fromstring - self._assertETXMLWellFormed(xml_string, fromstring, msg) - - def _assertETXMLWellFormed(self, data, parse, msg=None, context=2): - """internal function used by /assertXML(String)?WellFormed/ functions - - :param data: xml_data - :param parse: appropriate parser function for this data - :param msg: error message - :param context: number of context lines in standard message - (show all data if negative). - Only available with element tree - """ - from xml.parsers.expat import ExpatError - try: - from xml.etree.ElementTree import ParseError - except ImportError: - # compatibility for 1: - if len(tup)<=1: - self.fail( "tuple %s has no attributes (%s expected)"%(tup, - dict(element.attrib))) - self.assertDictEqual(element.attrib, tup[1]) - # check children - if len(element) or len(tup)>2: - if len(tup)<=2: - self.fail( "tuple %s has no children (%i expected)"%(tup, - len(element))) - if len(element) != len(tup[2]): - self.fail( "tuple %s has %i children%s (%i expected)"%(tup, - len(tup[2]), - ('', 's')[len(tup[2])>1], len(element))) - for index in xrange(len(tup[2])): - self.assertXMLEqualsTuple(element[index], tup[2][index]) - #check text - if element.text or len(tup)>3: - if len(tup)<=3: - self.fail( "tuple %s has no text value (%r expected)"%(tup, - element.text)) - self.assertTextEquals(element.text, tup[3]) - #check tail - if element.tail or len(tup)>4: - if len(tup)<=4: - self.fail( "tuple %s has no tail value (%r expected)"%(tup, - element.tail)) - self.assertTextEquals(element.tail, tup[4]) - - def _difftext(self, lines1, lines2, junk=None, msg_prefix='Texts differ'): - junk = junk or (' ', '\t') - # result is a generator - result = difflib.ndiff(lines1, lines2, charjunk=lambda x: x in junk) - read = [] - for line in result: - read.append(line) - # lines that don't start with a ' ' are diff ones - if not line.startswith(' '): - self.fail('\n'.join(['%s\n'%msg_prefix]+read + list(result))) - - @deprecated('Non-standard. Please use assertMultiLineEqual instead.') - def assertTextEquals(self, text1, text2, junk=None, - msg_prefix='Text differ', striplines=False): - """compare two multiline strings (using difflib and splitlines()) - - :param text1: a Python BaseString - :param text2: a second Python Basestring - :param junk: List of Caracters - :param msg_prefix: String (message prefix) - :param striplines: Boolean to trigger line stripping before comparing - """ - msg = [] - if not isinstance(text1, basestring): - msg.append('text1 is not a string (%s)'%(type(text1))) - if not isinstance(text2, basestring): - msg.append('text2 is not a string (%s)'%(type(text2))) - if msg: - self.fail('\n'.join(msg)) - lines1 = text1.strip().splitlines(True) - lines2 = text2.strip().splitlines(True) - if striplines: - lines1 = [line.strip() for line in lines1] - lines2 = [line.strip() for line in lines2] - self._difftext(lines1, lines2, junk, msg_prefix) - assertTextEqual = assertTextEquals - - @deprecated('Non-standard: please copy test method to your TestCase class') - def assertStreamEquals(self, stream1, stream2, junk=None, - msg_prefix='Stream differ'): - """compare two streams (using difflib and readlines())""" - # if stream2 is stream2, readlines() on stream1 will also read lines - # in stream2, so they'll appear different, although they're not - if stream1 is stream2: - return - # make sure we compare from the beginning of the stream - stream1.seek(0) - stream2.seek(0) - # compare - self._difftext(stream1.readlines(), stream2.readlines(), junk, - msg_prefix) - - assertStreamEqual = assertStreamEquals - - @deprecated('Non-standard: please copy test method to your TestCase class') - def assertFileEquals(self, fname1, fname2, junk=(' ', '\t')): - """compares two files using difflib""" - self.assertStreamEqual(open(fname1), open(fname2), junk, - msg_prefix='Files differs\n-:%s\n+:%s\n'%(fname1, fname2)) - - assertFileEqual = assertFileEquals - - @deprecated('Non-standard: please copy test method to your TestCase class') - def assertDirEquals(self, path_a, path_b): - """compares two files using difflib""" - assert osp.exists(path_a), "%s doesn't exists" % path_a - assert osp.exists(path_b), "%s doesn't exists" % path_b - - all_a = [ (ipath[len(path_a):].lstrip('/'), idirs, ifiles) - for ipath, idirs, ifiles in os.walk(path_a)] - all_a.sort(key=itemgetter(0)) - - all_b = [ (ipath[len(path_b):].lstrip('/'), idirs, ifiles) - for ipath, idirs, ifiles in os.walk(path_b)] - all_b.sort(key=itemgetter(0)) - - iter_a, iter_b = iter(all_a), iter(all_b) - partial_iter = True - ipath_a, idirs_a, ifiles_a = data_a = None, None, None - while True: - try: - ipath_a, idirs_a, ifiles_a = datas_a = iter_a.next() - partial_iter = False - ipath_b, idirs_b, ifiles_b = datas_b = iter_b.next() - partial_iter = True - - - self.assert_(ipath_a == ipath_b, - "unexpected %s in %s while looking %s from %s" % - (ipath_a, path_a, ipath_b, path_b)) - - - errors = {} - sdirs_a = set(idirs_a) - sdirs_b = set(idirs_b) - errors["unexpected directories"] = sdirs_a - sdirs_b - errors["missing directories"] = sdirs_b - sdirs_a - - sfiles_a = set(ifiles_a) - sfiles_b = set(ifiles_b) - errors["unexpected files"] = sfiles_a - sfiles_b - errors["missing files"] = sfiles_b - sfiles_a - - - msgs = [ "%s: %s"% (name, items) - for name, items in errors.iteritems() if items] - - if msgs: - msgs.insert(0, "%s and %s differ :" % ( - osp.join(path_a, ipath_a), - osp.join(path_b, ipath_b), - )) - self.fail("\n".join(msgs)) - - for files in (ifiles_a, ifiles_b): - files.sort() - - for index, path in enumerate(ifiles_a): - self.assertFileEquals(osp.join(path_a, ipath_a, path), - osp.join(path_b, ipath_b, ifiles_b[index])) - - except StopIteration: - break - - assertDirEqual = assertDirEquals - - def assertIsInstance(self, obj, klass, msg=None, strict=False): - """check if an object is an instance of a class - - :param obj: the Python Object to be checked - :param klass: the target class - :param msg: a String for a custom message - :param strict: if True, check that the class of is ; - else check with 'isinstance' - """ - if strict: - warnings.warn('[API] Non-standard. Strict parameter has vanished', - DeprecationWarning, stacklevel=2) - if msg is None: - if strict: - msg = '%r is not of class %s but of %s' - else: - msg = '%r is not an instance of %s but of %s' - msg = msg % (obj, klass, type(obj)) - if strict: - self.assert_(obj.__class__ is klass, msg) - else: - self.assert_(isinstance(obj, klass), msg) - - @deprecated('Please use assertIsNone instead.') - def assertNone(self, obj, msg=None): - """assert obj is None - - :param obj: Python Object to be tested - """ - if msg is None: - msg = "reference to %r when None expected"%(obj,) - self.assert_( obj is None, msg ) - - @deprecated('Please use assertIsNotNone instead.') - def assertNotNone(self, obj, msg=None): - """assert obj is not None""" - if msg is None: - msg = "unexpected reference to None" - self.assert_( obj is not None, msg ) - - @deprecated('Non-standard. Please use assertAlmostEqual instead.') - def assertFloatAlmostEquals(self, obj, other, prec=1e-5, - relative=False, msg=None): - """compares if two floats have a distance smaller than expected - precision. - - :param obj: a Float - :param other: another Float to be comparted to - :param prec: a Float describing the precision - :param relative: boolean switching to relative/absolute precision - :param msg: a String for a custom message - """ - if msg is None: - msg = "%r != %r" % (obj, other) - if relative: - prec = prec*math.fabs(obj) - self.assert_(math.fabs(obj - other) < prec, msg) - - def failUnlessRaises(self, excClass, callableObj=None, *args, **kwargs): - """override default failUnlessRaises method to return the raised - exception instance. - - Fail unless an exception of class excClass is thrown - by callableObj when invoked with arguments args and keyword - arguments kwargs. If a different type of exception is - thrown, it will not be caught, and the test case will be - deemed to have suffered an error, exactly as for an - unexpected exception. - - CAUTION! There are subtle differences between Logilab and unittest2 - - exc is not returned in standard version - - context capabilities in standard version - - try/except/else construction (minor) - - :param excClass: the Exception to be raised - :param callableObj: a callable Object which should raise - :param args: a List of arguments for - :param kwargs: a List of keyword arguments for - """ - # XXX cube vcslib : test_branches_from_app - if callableObj is None: - _assert = super(TestCase, self).assertRaises - return _assert(excClass, callableObj, *args, **kwargs) - try: - callableObj(*args, **kwargs) - except excClass, exc: - class ProxyException: - def __init__(self, obj): - self._obj = obj - def __getattr__(self, attr): - warn_msg = ("This exception was retrieved with the old testlib way " - "`exc = self.assertRaises(Exc, callable)`, please use " - "the context manager instead'") - warnings.warn(warn_msg, DeprecationWarning, 2) - return self._obj.__getattribute__(attr) - return ProxyException(exc) - else: - if hasattr(excClass, '__name__'): - excName = excClass.__name__ - else: - excName = str(excClass) - raise self.failureException("%s not raised" % excName) - - assertRaises = failUnlessRaises - - -import doctest - -class SkippedSuite(unittest.TestSuite): - def test(self): - """just there to trigger test execution""" - self.skipped_test('doctest module has no DocTestSuite class') - - -class DocTestFinder(doctest.DocTestFinder): - - def __init__(self, *args, **kwargs): - self.skipped = kwargs.pop('skipped', ()) - doctest.DocTestFinder.__init__(self, *args, **kwargs) - - def _get_test(self, obj, name, module, globs, source_lines): - """override default _get_test method to be able to skip tests - according to skipped attribute's value - - Note: Python (<=2.4) use a _name_filter which could be used for that - purpose but it's no longer available in 2.5 - Python 2.5 seems to have a [SKIP] flag - """ - if getattr(obj, '__name__', '') in self.skipped: - return None - return doctest.DocTestFinder._get_test(self, obj, name, module, - globs, source_lines) - - -class DocTest(TestCase): - """trigger module doctest - I don't know how to make unittest.main consider the DocTestSuite instance - without this hack - """ - skipped = () - def __call__(self, result=None, runcondition=None, options=None):\ - # pylint: disable=W0613 - try: - finder = DocTestFinder(skipped=self.skipped) - if sys.version_info >= (2, 4): - suite = doctest.DocTestSuite(self.module, test_finder=finder) - if sys.version_info >= (2, 5): - # XXX iirk - doctest.DocTestCase._TestCase__exc_info = sys.exc_info - else: - suite = doctest.DocTestSuite(self.module) - except AttributeError: - suite = SkippedSuite() - return suite.run(result) - run = __call__ - - def test(self): - """just there to trigger test execution""" - -MAILBOX = None - -class MockSMTP: - """fake smtplib.SMTP""" - - def __init__(self, host, port): - self.host = host - self.port = port - global MAILBOX - self.reveived = MAILBOX = [] - - def set_debuglevel(self, debuglevel): - """ignore debug level""" - - def sendmail(self, fromaddr, toaddres, body): - """push sent mail in the mailbox""" - self.reveived.append((fromaddr, toaddres, body)) - - def quit(self): - """ignore quit""" - - -class MockConfigParser(ConfigParser): - """fake ConfigParser.ConfigParser""" - - def __init__(self, options): - ConfigParser.__init__(self) - for section, pairs in options.iteritems(): - self.add_section(section) - for key, value in pairs.iteritems(): - self.set(section, key, value) - def write(self, _): - raise NotImplementedError() - - -class MockConnection: - """fake DB-API 2.0 connexion AND cursor (i.e. cursor() return self)""" - - def __init__(self, results): - self.received = [] - self.states = [] - self.results = results - - def cursor(self): - """Mock cursor method""" - return self - def execute(self, query, args=None): - """Mock execute method""" - self.received.append( (query, args) ) - def fetchone(self): - """Mock fetchone method""" - return self.results[0] - def fetchall(self): - """Mock fetchall method""" - return self.results - def commit(self): - """Mock commiy method""" - self.states.append( ('commit', len(self.received)) ) - def rollback(self): - """Mock rollback method""" - self.states.append( ('rollback', len(self.received)) ) - def close(self): - """Mock close method""" - pass - - -def mock_object(**params): - """creates an object using params to set attributes - >>> option = mock_object(verbose=False, index=range(5)) - >>> option.verbose - False - >>> option.index - [0, 1, 2, 3, 4] - """ - return type('Mock', (), params)() - - -def create_files(paths, chroot): - """Creates directories and files found in . - - :param paths: list of relative paths to files or directories - :param chroot: the root directory in which paths will be created - - >>> from os.path import isdir, isfile - >>> isdir('/tmp/a') - False - >>> create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], '/tmp') - >>> isdir('/tmp/a') - True - >>> isdir('/tmp/a/b/c') - True - >>> isfile('/tmp/a/b/c/d/e.py') - True - >>> isfile('/tmp/a/b/foo.py') - True - """ - dirs, files = set(), set() - for path in paths: - path = osp.join(chroot, path) - filename = osp.basename(path) - # path is a directory path - if filename == '': - dirs.add(path) - # path is a filename path - else: - dirs.add(osp.dirname(path)) - files.add(path) - for dirpath in dirs: - if not osp.isdir(dirpath): - os.makedirs(dirpath) - for filepath in files: - open(filepath, 'w').close() - - -class AttrObject: # XXX cf mock_object - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - -def tag(*args, **kwargs): - """descriptor adding tag to a function""" - def desc(func): - assert not hasattr(func, 'tags') - func.tags = Tags(*args, **kwargs) - return func - return desc - -def require_version(version): - """ Compare version of python interpreter to the given one. Skip the test - if older. - """ - def check_require_version(f): - version_elements = version.split('.') - try: - compare = tuple([int(v) for v in version_elements]) - except ValueError: - raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version) - current = sys.version_info[:3] - if current < compare: - def new_f(self, *args, **kwargs): - self.skipTest('Need at least %s version of python. Current version is %s.' % (version, '.'.join([str(element) for element in current]))) - new_f.__name__ = f.__name__ - return new_f - else: - return f - return check_require_version - -def require_module(module): - """ Check if the given module is loaded. Skip the test if not. - """ - def check_require_module(f): - try: - __import__(module) - return f - except ImportError: - def new_f(self, *args, **kwargs): - self.skipTest('%s can not be imported.' % module) - new_f.__name__ = f.__name__ - return new_f - return check_require_module - diff --git a/pylibs/logilab/common/textutils.py b/pylibs/logilab/common/textutils.py deleted file mode 100644 index bdeed415..00000000 --- a/pylibs/logilab/common/textutils.py +++ /dev/null @@ -1,532 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Some text manipulation utility functions. - - -:group text formatting: normalize_text, normalize_paragraph, pretty_match,\ -unquote, colorize_ansi -:group text manipulation: searchall, splitstrip -:sort: text formatting, text manipulation - -:type ANSI_STYLES: dict(str) -:var ANSI_STYLES: dictionary mapping style identifier to ANSI terminal code - -:type ANSI_COLORS: dict(str) -:var ANSI_COLORS: dictionary mapping color identifier to ANSI terminal code - -:type ANSI_PREFIX: str -:var ANSI_PREFIX: - ANSI terminal code notifying the start of an ANSI escape sequence - -:type ANSI_END: str -:var ANSI_END: - ANSI terminal code notifying the end of an ANSI escape sequence - -:type ANSI_RESET: str -:var ANSI_RESET: - ANSI terminal code resetting format defined by a previous ANSI escape sequence -""" -__docformat__ = "restructuredtext en" - -import sys -import re -import os.path as osp -from warnings import warn -from unicodedata import normalize as _uninormalize -try: - from os import linesep -except ImportError: - linesep = '\n' # gae - -from logilab.common.deprecation import deprecated - -MANUAL_UNICODE_MAP = { - u'\xa1': u'!', # INVERTED EXCLAMATION MARK - u'\u0142': u'l', # LATIN SMALL LETTER L WITH STROKE - u'\u2044': u'/', # FRACTION SLASH - u'\xc6': u'AE', # LATIN CAPITAL LETTER AE - u'\xa9': u'(c)', # COPYRIGHT SIGN - u'\xab': u'"', # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK - u'\xe6': u'ae', # LATIN SMALL LETTER AE - u'\xae': u'(r)', # REGISTERED SIGN - u'\u0153': u'oe', # LATIN SMALL LIGATURE OE - u'\u0152': u'OE', # LATIN CAPITAL LIGATURE OE - u'\xd8': u'O', # LATIN CAPITAL LETTER O WITH STROKE - u'\xf8': u'o', # LATIN SMALL LETTER O WITH STROKE - u'\xbb': u'"', # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK - u'\xdf': u'ss', # LATIN SMALL LETTER SHARP S - } - -def unormalize(ustring, ignorenonascii=None, substitute=None): - """replace diacritical characters with their corresponding ascii characters - - Convert the unicode string to its long normalized form (unicode character - will be transform into several characters) and keep the first one only. - The normal form KD (NFKD) will apply the compatibility decomposition, i.e. - replace all compatibility characters with their equivalents. - - :type substitute: str - :param substitute: replacement character to use if decomposition fails - - :see: Another project about ASCII transliterations of Unicode text - http://pypi.python.org/pypi/Unidecode - """ - # backward compatibility, ignorenonascii was a boolean - if ignorenonascii is not None: - warn("ignorenonascii is deprecated, use substitute named parameter instead", - DeprecationWarning, stacklevel=2) - if ignorenonascii: - substitute = '' - res = [] - for letter in ustring[:]: - try: - replacement = MANUAL_UNICODE_MAP[letter] - except KeyError: - replacement = _uninormalize('NFKD', letter)[0] - if ord(replacement) >= 2 ** 7: - if substitute is None: - raise ValueError("can't deal with non-ascii based characters") - replacement = substitute - res.append(replacement) - return u''.join(res) - -def unquote(string): - """remove optional quotes (simple or double) from the string - - :type string: str or unicode - :param string: an optionally quoted string - - :rtype: str or unicode - :return: the unquoted string (or the input string if it wasn't quoted) - """ - if not string: - return string - if string[0] in '"\'': - string = string[1:] - if string[-1] in '"\'': - string = string[:-1] - return string - - -_BLANKLINES_RGX = re.compile('\r?\n\r?\n') -_NORM_SPACES_RGX = re.compile('\s+') - -def normalize_text(text, line_len=80, indent='', rest=False): - """normalize a text to display it with a maximum line size and - optionally arbitrary indentation. Line jumps are normalized but blank - lines are kept. The indentation string may be used to insert a - comment (#) or a quoting (>) mark for instance. - - :type text: str or unicode - :param text: the input text to normalize - - :type line_len: int - :param line_len: expected maximum line's length, default to 80 - - :type indent: str or unicode - :param indent: optional string to use as indentation - - :rtype: str or unicode - :return: - the input text normalized to fit on lines with a maximized size - inferior to `line_len`, and optionally prefixed by an - indentation string - """ - if rest: - normp = normalize_rest_paragraph - else: - normp = normalize_paragraph - result = [] - for text in _BLANKLINES_RGX.split(text): - result.append(normp(text, line_len, indent)) - return ('%s%s%s' % (linesep, indent, linesep)).join(result) - - -def normalize_paragraph(text, line_len=80, indent=''): - """normalize a text to display it with a maximum line size and - optionally arbitrary indentation. Line jumps are normalized. The - indentation string may be used top insert a comment mark for - instance. - - :type text: str or unicode - :param text: the input text to normalize - - :type line_len: int - :param line_len: expected maximum line's length, default to 80 - - :type indent: str or unicode - :param indent: optional string to use as indentation - - :rtype: str or unicode - :return: - the input text normalized to fit on lines with a maximized size - inferior to `line_len`, and optionally prefixed by an - indentation string - """ - text = _NORM_SPACES_RGX.sub(' ', text) - line_len = line_len - len(indent) - lines = [] - while text: - aline, text = splittext(text.strip(), line_len) - lines.append(indent + aline) - return linesep.join(lines) - -def normalize_rest_paragraph(text, line_len=80, indent=''): - """normalize a ReST text to display it with a maximum line size and - optionally arbitrary indentation. Line jumps are normalized. The - indentation string may be used top insert a comment mark for - instance. - - :type text: str or unicode - :param text: the input text to normalize - - :type line_len: int - :param line_len: expected maximum line's length, default to 80 - - :type indent: str or unicode - :param indent: optional string to use as indentation - - :rtype: str or unicode - :return: - the input text normalized to fit on lines with a maximized size - inferior to `line_len`, and optionally prefixed by an - indentation string - """ - toreport = '' - lines = [] - line_len = line_len - len(indent) - for line in text.splitlines(): - line = toreport + _NORM_SPACES_RGX.sub(' ', line.strip()) - toreport = '' - while len(line) > line_len: - # too long line, need split - line, toreport = splittext(line, line_len) - lines.append(indent + line) - if toreport: - line = toreport + ' ' - toreport = '' - else: - line = '' - if line: - lines.append(indent + line.strip()) - return linesep.join(lines) - - -def splittext(text, line_len): - """split the given text on space according to the given max line size - - return a 2-uple: - * a line <= line_len if possible - * the rest of the text which has to be reported on another line - """ - if len(text) <= line_len: - return text, '' - pos = min(len(text)-1, line_len) - while pos > 0 and text[pos] != ' ': - pos -= 1 - if pos == 0: - pos = min(len(text), line_len) - while len(text) > pos and text[pos] != ' ': - pos += 1 - return text[:pos], text[pos+1:].strip() - - -def splitstrip(string, sep=','): - """return a list of stripped string by splitting the string given as - argument on `sep` (',' by default). Empty string are discarded. - - >>> splitstrip('a, b, c , 4,,') - ['a', 'b', 'c', '4'] - >>> splitstrip('a') - ['a'] - >>> - - :type string: str or unicode - :param string: a csv line - - :type sep: str or unicode - :param sep: field separator, default to the comma (',') - - :rtype: str or unicode - :return: the unquoted string (or the input string if it wasn't quoted) - """ - return [word.strip() for word in string.split(sep) if word.strip()] - -get_csv = deprecated('get_csv is deprecated, use splitstrip')(splitstrip) - - -def split_url_or_path(url_or_path): - """return the latest component of a string containing either an url of the - form :// or a local file system path - """ - if '://' in url_or_path: - return url_or_path.rstrip('/').rsplit('/', 1) - return osp.split(url_or_path.rstrip(osp.sep)) - - -def text_to_dict(text): - """parse multilines text containing simple 'key=value' lines and return a - dict of {'key': 'value'}. When the same key is encountered multiple time, - value is turned into a list containing all values. - - >>> text_to_dict('''multiple=1 - ... multiple= 2 - ... single =3 - ... ''') - {'single': '3', 'multiple': ['1', '2']} - - """ - res = {} - if not text: - return res - for line in text.splitlines(): - line = line.strip() - if line and not line.startswith('#'): - key, value = [w.strip() for w in line.split('=', 1)] - if key in res: - try: - res[key].append(value) - except AttributeError: - res[key] = [res[key], value] - else: - res[key] = value - return res - - -_BLANK_URE = r'(\s|,)+' -_BLANK_RE = re.compile(_BLANK_URE) -__VALUE_URE = r'-?(([0-9]+\.[0-9]*)|((0x?)?[0-9]+))' -__UNITS_URE = r'[a-zA-Z]+' -_VALUE_RE = re.compile(r'(?P%s)(?P%s)?'%(__VALUE_URE, __UNITS_URE)) - -BYTE_UNITS = { - "b": 1, - "kb": 1024, - "mb": 1024 ** 2, - "gb": 1024 ** 3, - "tb": 1024 ** 4, -} - -TIME_UNITS = { - "ms": 0.0001, - "s": 1, - "min": 60, - "h": 60 * 60, - "d": 60 * 60 *24, -} - -def apply_units(string, units, inter=None, final=float, blank_reg=_BLANK_RE, - value_reg=_VALUE_RE): - """Parse the string applying the units defined in units - (e.g.: "1.5m",{'m',60} -> 80). - - :type string: str or unicode - :param string: the string to parse - - :type units: dict (or any object with __getitem__ using basestring key) - :param units: a dict mapping a unit string repr to its value - - :type inter: type - :param inter: used to parse every intermediate value (need __sum__) - - :type blank_reg: regexp - :param blank_reg: should match every blank char to ignore. - - :type value_reg: regexp with "value" and optional "unit" group - :param value_reg: match a value and it's unit into the - """ - if inter is None: - inter = final - string = _BLANK_RE.sub('', string) - values = [] - for match in value_reg.finditer(string): - dic = match.groupdict() - #import sys - #print >> sys.stderr, dic - lit, unit = dic["value"], dic.get("unit") - value = inter(lit) - if unit is not None: - try: - value *= units[unit.lower()] - except KeyError: - raise KeyError('invalid unit %s. valid units are %s' % - (unit, units.keys())) - values.append(value) - return final(sum(values)) - - -_LINE_RGX = re.compile('\r\n|\r+|\n') - -def pretty_match(match, string, underline_char='^'): - """return a string with the match location underlined: - - >>> import re - >>> print(pretty_match(re.search('mange', 'il mange du bacon'), 'il mange du bacon')) - il mange du bacon - ^^^^^ - >>> - - :type match: _sre.SRE_match - :param match: object returned by re.match, re.search or re.finditer - - :type string: str or unicode - :param string: - the string on which the regular expression has been applied to - obtain the `match` object - - :type underline_char: str or unicode - :param underline_char: - character to use to underline the matched section, default to the - carret '^' - - :rtype: str or unicode - :return: - the original string with an inserted line to underline the match - location - """ - start = match.start() - end = match.end() - string = _LINE_RGX.sub(linesep, string) - start_line_pos = string.rfind(linesep, 0, start) - if start_line_pos == -1: - start_line_pos = 0 - result = [] - else: - result = [string[:start_line_pos]] - start_line_pos += len(linesep) - offset = start - start_line_pos - underline = ' ' * offset + underline_char * (end - start) - end_line_pos = string.find(linesep, end) - if end_line_pos == -1: - string = string[start_line_pos:] - result.append(string) - result.append(underline) - else: - end = string[end_line_pos + len(linesep):] - string = string[start_line_pos:end_line_pos] - result.append(string) - result.append(underline) - result.append(end) - return linesep.join(result).rstrip() - - -# Ansi colorization ########################################################### - -ANSI_PREFIX = '\033[' -ANSI_END = 'm' -ANSI_RESET = '\033[0m' -ANSI_STYLES = { - 'reset': "0", - 'bold': "1", - 'italic': "3", - 'underline': "4", - 'blink': "5", - 'inverse': "7", - 'strike': "9", -} -ANSI_COLORS = { - 'reset': "0", - 'black': "30", - 'red': "31", - 'green': "32", - 'yellow': "33", - 'blue': "34", - 'magenta': "35", - 'cyan': "36", - 'white': "37", -} - -def _get_ansi_code(color=None, style=None): - """return ansi escape code corresponding to color and style - - :type color: str or None - :param color: - the color name (see `ANSI_COLORS` for available values) - or the color number when 256 colors are available - - :type style: str or None - :param style: - style string (see `ANSI_COLORS` for available values). To get - several style effects at the same time, use a coma as separator. - - :raise KeyError: if an unexistent color or style identifier is given - - :rtype: str - :return: the built escape code - """ - ansi_code = [] - if style: - style_attrs = splitstrip(style) - for effect in style_attrs: - ansi_code.append(ANSI_STYLES[effect]) - if color: - if color.isdigit(): - ansi_code.extend(['38', '5']) - ansi_code.append(color) - else: - ansi_code.append(ANSI_COLORS[color]) - if ansi_code: - return ANSI_PREFIX + ';'.join(ansi_code) + ANSI_END - return '' - -def colorize_ansi(msg, color=None, style=None): - """colorize message by wrapping it with ansi escape codes - - :type msg: str or unicode - :param msg: the message string to colorize - - :type color: str or None - :param color: - the color identifier (see `ANSI_COLORS` for available values) - - :type style: str or None - :param style: - style string (see `ANSI_COLORS` for available values). To get - several style effects at the same time, use a coma as separator. - - :raise KeyError: if an unexistent color or style identifier is given - - :rtype: str or unicode - :return: the ansi escaped string - """ - # If both color and style are not defined, then leave the text as is - if color is None and style is None: - return msg - escape_code = _get_ansi_code(color, style) - # If invalid (or unknown) color, don't wrap msg with ansi codes - if escape_code: - return '%s%s%s' % (escape_code, msg, ANSI_RESET) - return msg - -DIFF_STYLE = {'separator': 'cyan', 'remove': 'red', 'add': 'green'} - -def diff_colorize_ansi(lines, out=sys.stdout, style=DIFF_STYLE): - for line in lines: - if line[:4] in ('--- ', '+++ '): - out.write(colorize_ansi(line, style['separator'])) - elif line[0] == '-': - out.write(colorize_ansi(line, style['remove'])) - elif line[0] == '+': - out.write(colorize_ansi(line, style['add'])) - elif line[:4] == '--- ': - out.write(colorize_ansi(line, style['separator'])) - elif line[:4] == '+++ ': - out.write(colorize_ansi(line, style['separator'])) - else: - out.write(line) - diff --git a/pylibs/logilab/common/tree.py b/pylibs/logilab/common/tree.py deleted file mode 100644 index 885eb0fa..00000000 --- a/pylibs/logilab/common/tree.py +++ /dev/null @@ -1,369 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Base class to represent a tree structure. - - - - -""" -__docformat__ = "restructuredtext en" - -import sys - -from logilab.common import flatten -from logilab.common.visitor import VisitedMixIn, FilteredIterator, no_filter - -## Exceptions ################################################################# - -class NodeNotFound(Exception): - """raised when a node has not been found""" - -EX_SIBLING_NOT_FOUND = "No such sibling as '%s'" -EX_CHILD_NOT_FOUND = "No such child as '%s'" -EX_NODE_NOT_FOUND = "No such node as '%s'" - - -# Base node ################################################################### - -class Node(object): - """a basic tree node, characterized by an id""" - - def __init__(self, nid=None) : - self.id = nid - # navigation - self.parent = None - self.children = [] - - def __iter__(self): - return iter(self.children) - - def __str__(self, indent=0): - s = ['%s%s %s' % (' '*indent, self.__class__.__name__, self.id)] - indent += 2 - for child in self.children: - try: - s.append(child.__str__(indent)) - except TypeError: - s.append(child.__str__()) - return '\n'.join(s) - - def is_leaf(self): - return not self.children - - def append(self, child): - """add a node to children""" - self.children.append(child) - child.parent = self - - def remove(self, child): - """remove a child node""" - self.children.remove(child) - child.parent = None - - def insert(self, index, child): - """insert a child node""" - self.children.insert(index, child) - child.parent = self - - def replace(self, old_child, new_child): - """replace a child node with another""" - i = self.children.index(old_child) - self.children.pop(i) - self.children.insert(i, new_child) - new_child.parent = self - - def get_sibling(self, nid): - """return the sibling node that has given id""" - try: - return self.parent.get_child_by_id(nid) - except NodeNotFound : - raise NodeNotFound(EX_SIBLING_NOT_FOUND % nid) - - def next_sibling(self): - """ - return the next sibling for this node if any - """ - parent = self.parent - if parent is None: - # root node has no sibling - return None - index = parent.children.index(self) - try: - return parent.children[index+1] - except IndexError: - return None - - def previous_sibling(self): - """ - return the previous sibling for this node if any - """ - parent = self.parent - if parent is None: - # root node has no sibling - return None - index = parent.children.index(self) - if index > 0: - return parent.children[index-1] - return None - - def get_node_by_id(self, nid): - """ - return node in whole hierarchy that has given id - """ - root = self.root() - try: - return root.get_child_by_id(nid, 1) - except NodeNotFound : - raise NodeNotFound(EX_NODE_NOT_FOUND % nid) - - def get_child_by_id(self, nid, recurse=None): - """ - return child of given id - """ - if self.id == nid: - return self - for c in self.children : - if recurse: - try: - return c.get_child_by_id(nid, 1) - except NodeNotFound : - continue - if c.id == nid : - return c - raise NodeNotFound(EX_CHILD_NOT_FOUND % nid) - - def get_child_by_path(self, path): - """ - return child of given path (path is a list of ids) - """ - if len(path) > 0 and path[0] == self.id: - if len(path) == 1 : - return self - else : - for c in self.children : - try: - return c.get_child_by_path(path[1:]) - except NodeNotFound : - pass - raise NodeNotFound(EX_CHILD_NOT_FOUND % path) - - def depth(self): - """ - return depth of this node in the tree - """ - if self.parent is not None: - return 1 + self.parent.depth() - else : - return 0 - - def depth_down(self): - """ - return depth of the tree from this node - """ - if self.children: - return 1 + max([c.depth_down() for c in self.children]) - return 1 - - def width(self): - """ - return the width of the tree from this node - """ - return len(self.leaves()) - - def root(self): - """ - return the root node of the tree - """ - if self.parent is not None: - return self.parent.root() - return self - - def leaves(self): - """ - return a list with all the leaves nodes descendant from this node - """ - leaves = [] - if self.children: - for child in self.children: - leaves += child.leaves() - return leaves - else: - return [self] - - def flatten(self, _list=None): - """ - return a list with all the nodes descendant from this node - """ - if _list is None: - _list = [] - _list.append(self) - for c in self.children: - c.flatten(_list) - return _list - - def lineage(self): - """ - return list of parents up to root node - """ - lst = [self] - if self.parent is not None: - lst.extend(self.parent.lineage()) - return lst - -class VNode(Node, VisitedMixIn): - """a visitable node - """ - pass - - -class BinaryNode(VNode): - """a binary node (i.e. only two children - """ - def __init__(self, lhs=None, rhs=None) : - VNode.__init__(self) - if lhs is not None or rhs is not None: - assert lhs and rhs - self.append(lhs) - self.append(rhs) - - def remove(self, child): - """remove the child and replace this node with the other child - """ - self.children.remove(child) - self.parent.replace(self, self.children[0]) - - def get_parts(self): - """ - return the left hand side and the right hand side of this node - """ - return self.children[0], self.children[1] - - - -if sys.version_info[0:2] >= (2, 2): - list_class = list -else: - from UserList import UserList - list_class = UserList - -class ListNode(VNode, list_class): - """Used to manipulate Nodes as Lists - """ - def __init__(self): - list_class.__init__(self) - VNode.__init__(self) - self.children = self - - def __str__(self, indent=0): - return '%s%s %s' % (indent*' ', self.__class__.__name__, - ', '.join([str(v) for v in self])) - - def append(self, child): - """add a node to children""" - list_class.append(self, child) - child.parent = self - - def insert(self, index, child): - """add a node to children""" - list_class.insert(self, index, child) - child.parent = self - - def remove(self, child): - """add a node to children""" - list_class.remove(self, child) - child.parent = None - - def pop(self, index): - """add a node to children""" - child = list_class.pop(self, index) - child.parent = None - - def __iter__(self): - return list_class.__iter__(self) - -# construct list from tree #################################################### - -def post_order_list(node, filter_func=no_filter): - """ - create a list with tree nodes for which the function returned true - in a post order fashion - """ - l, stack = [], [] - poped, index = 0, 0 - while node: - if filter_func(node): - if node.children and not poped: - stack.append((node, index)) - index = 0 - node = node.children[0] - else: - l.append(node) - index += 1 - try: - node = stack[-1][0].children[index] - except IndexError: - node = None - else: - node = None - poped = 0 - if node is None and stack: - node, index = stack.pop() - poped = 1 - return l - -def pre_order_list(node, filter_func=no_filter): - """ - create a list with tree nodes for which the function returned true - in a pre order fashion - """ - l, stack = [], [] - poped, index = 0, 0 - while node: - if filter_func(node): - if not poped: - l.append(node) - if node.children and not poped: - stack.append((node, index)) - index = 0 - node = node.children[0] - else: - index += 1 - try: - node = stack[-1][0].children[index] - except IndexError: - node = None - else: - node = None - poped = 0 - if node is None and len(stack) > 1: - node, index = stack.pop() - poped = 1 - return l - -class PostfixedDepthFirstIterator(FilteredIterator): - """a postfixed depth first iterator, designed to be used with visitors - """ - def __init__(self, node, filter_func=None): - FilteredIterator.__init__(self, node, post_order_list, filter_func) - -class PrefixedDepthFirstIterator(FilteredIterator): - """a prefixed depth first iterator, designed to be used with visitors - """ - def __init__(self, node, filter_func=None): - FilteredIterator.__init__(self, node, pre_order_list, filter_func) - diff --git a/pylibs/logilab/common/umessage.py b/pylibs/logilab/common/umessage.py deleted file mode 100644 index 85d564c0..00000000 --- a/pylibs/logilab/common/umessage.py +++ /dev/null @@ -1,167 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Unicode email support (extends email from stdlib). - - - - -""" -__docformat__ = "restructuredtext en" - -import email -from encodings import search_function -import sys -if sys.version_info >= (2, 5): - from email.utils import parseaddr, parsedate - from email.header import decode_header -else: - from email.Utils import parseaddr, parsedate - from email.Header import decode_header - -from datetime import datetime - -try: - from mx.DateTime import DateTime -except ImportError: - DateTime = datetime - -import logilab.common as lgc - - -def decode_QP(string): - parts = [] - for decoded, charset in decode_header(string): - if not charset : - charset = 'iso-8859-15' - parts.append(unicode(decoded, charset, 'replace')) - - return u' '.join(parts) - -def message_from_file(fd): - try: - return UMessage(email.message_from_file(fd)) - except email.Errors.MessageParseError: - return '' - -def message_from_string(string): - try: - return UMessage(email.message_from_string(string)) - except email.Errors.MessageParseError: - return '' - -class UMessage: - """Encapsulates an email.Message instance and returns only unicode objects. - """ - - def __init__(self, message): - self.message = message - - # email.Message interface ################################################# - - def get(self, header, default=None): - value = self.message.get(header, default) - if value: - return decode_QP(value) - return value - - def get_all(self, header, default=()): - return [decode_QP(val) for val in self.message.get_all(header, default) - if val is not None] - - def get_payload(self, index=None, decode=False): - message = self.message - if index is None: - payload = message.get_payload(index, decode) - if isinstance(payload, list): - return [UMessage(msg) for msg in payload] - if message.get_content_maintype() != 'text': - return payload - - charset = message.get_content_charset() or 'iso-8859-1' - if search_function(charset) is None: - charset = 'iso-8859-1' - return unicode(payload or '', charset, "replace") - else: - payload = UMessage(message.get_payload(index, decode)) - return payload - - def is_multipart(self): - return self.message.is_multipart() - - def get_boundary(self): - return self.message.get_boundary() - - def walk(self): - for part in self.message.walk(): - yield UMessage(part) - - def get_content_maintype(self): - return unicode(self.message.get_content_maintype()) - - def get_content_type(self): - return unicode(self.message.get_content_type()) - - def get_filename(self, failobj=None): - value = self.message.get_filename(failobj) - if value is failobj: - return value - try: - return unicode(value) - except UnicodeDecodeError: - return u'error decoding filename' - - # other convenience methods ############################################### - - def headers(self): - """return an unicode string containing all the message's headers""" - values = [] - for header in self.message.keys(): - values.append(u'%s: %s' % (header, self.get(header))) - return '\n'.join(values) - - def multi_addrs(self, header): - """return a list of 2-uple (name, address) for the given address (which - is expected to be an header containing address such as from, to, cc...) - """ - persons = [] - for person in self.get_all(header, ()): - name, mail = parseaddr(person) - persons.append((name, mail)) - return persons - - def date(self, alternative_source=False, return_str=False): - """return a datetime object for the email's date or None if no date is - set or if it can't be parsed - """ - value = self.get('date') - if value is None and alternative_source: - unix_from = self.message.get_unixfrom() - if unix_from is not None: - try: - value = unix_from.split(" ", 2)[2] - except IndexError: - pass - if value is not None: - datetuple = parsedate(value) - if datetuple: - if lgc.USE_MX_DATETIME: - return DateTime(*datetuple[:6]) - return datetime(*datetuple[:6]) - elif not return_str: - return None - return value diff --git a/pylibs/logilab/common/ureports/__init__.py b/pylibs/logilab/common/ureports/__init__.py deleted file mode 100644 index dcffcfa3..00000000 --- a/pylibs/logilab/common/ureports/__init__.py +++ /dev/null @@ -1,174 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Universal report objects and some formatting drivers. - -A way to create simple reports using python objects, primarily designed to be -formatted as text and html. -""" -from __future__ import generators -__docformat__ = "restructuredtext en" - -import sys -from cStringIO import StringIO -from StringIO import StringIO as UStringIO - -from logilab.common.textutils import linesep - - -def get_nodes(node, klass): - """return an iterator on all children node of the given klass""" - for child in node.children: - if isinstance(child, klass): - yield child - # recurse (FIXME: recursion controled by an option) - for grandchild in get_nodes(child, klass): - yield grandchild - -def layout_title(layout): - """try to return the layout's title as string, return None if not found - """ - for child in layout.children: - if isinstance(child, Title): - return ' '.join([node.data for node in get_nodes(child, Text)]) - -def build_summary(layout, level=1): - """make a summary for the report, including X level""" - assert level > 0 - level -= 1 - summary = List(klass='summary') - for child in layout.children: - if not isinstance(child, Section): - continue - label = layout_title(child) - if not label and not child.id: - continue - if not child.id: - child.id = label.replace(' ', '-') - node = Link('#'+child.id, label=label or child.id) - # FIXME: Three following lines produce not very compliant - # docbook: there are some useless . They might be - # replaced by the three commented lines but this then produces - # a bug in html display... - if level and [n for n in child.children if isinstance(n, Section)]: - node = Paragraph([node, build_summary(child, level)]) - summary.append(node) -# summary.append(node) -# if level and [n for n in child.children if isinstance(n, Section)]: -# summary.append(build_summary(child, level)) - return summary - - -class BaseWriter(object): - """base class for ureport writers""" - - def format(self, layout, stream=None, encoding=None): - """format and write the given layout into the stream object - - unicode policy: unicode strings may be found in the layout; - try to call stream.write with it, but give it back encoded using - the given encoding if it fails - """ - if stream is None: - stream = sys.stdout - if not encoding: - encoding = getattr(stream, 'encoding', 'UTF-8') - self.encoding = encoding or 'UTF-8' - self.__compute_funcs = [] - self.out = stream - self.begin_format(layout) - layout.accept(self) - self.end_format(layout) - - def format_children(self, layout): - """recurse on the layout children and call their accept method - (see the Visitor pattern) - """ - for child in getattr(layout, 'children', ()): - child.accept(self) - - def writeln(self, string=''): - """write a line in the output buffer""" - self.write(string + linesep) - - def write(self, string): - """write a string in the output buffer""" - try: - self.out.write(string) - except UnicodeEncodeError: - self.out.write(string.encode(self.encoding)) - - def begin_format(self, layout): - """begin to format a layout""" - self.section = 0 - - def end_format(self, layout): - """finished to format a layout""" - - def get_table_content(self, table): - """trick to get table content without actually writing it - - return an aligned list of lists containing table cells values as string - """ - result = [[]] - cols = table.cols - for cell in self.compute_content(table): - if cols == 0: - result.append([]) - cols = table.cols - cols -= 1 - result[-1].append(cell) - # fill missing cells - while len(result[-1]) < cols: - result[-1].append('') - return result - - def compute_content(self, layout): - """trick to compute the formatting of children layout before actually - writing it - - return an iterator on strings (one for each child element) - """ - # use cells ! - def write(data): - try: - stream.write(data) - except UnicodeEncodeError: - stream.write(data.encode(self.encoding)) - def writeln(data=''): - try: - stream.write(data+linesep) - except UnicodeEncodeError: - stream.write(data.encode(self.encoding)+linesep) - self.write = write - self.writeln = writeln - self.__compute_funcs.append((write, writeln)) - for child in layout.children: - stream = UStringIO() - child.accept(self) - yield stream.getvalue() - self.__compute_funcs.pop() - try: - self.write, self.writeln = self.__compute_funcs[-1] - except IndexError: - del self.write - del self.writeln - - -from logilab.common.ureports.nodes import * -from logilab.common.ureports.text_writer import TextWriter -from logilab.common.ureports.html_writer import HTMLWriter diff --git a/pylibs/logilab/common/ureports/docbook_writer.py b/pylibs/logilab/common/ureports/docbook_writer.py deleted file mode 100644 index e75cbe09..00000000 --- a/pylibs/logilab/common/ureports/docbook_writer.py +++ /dev/null @@ -1,139 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""HTML formatting drivers for ureports""" -from __future__ import generators -__docformat__ = "restructuredtext en" - -from logilab.common.ureports import HTMLWriter - -class DocbookWriter(HTMLWriter): - """format layouts as HTML""" - - def begin_format(self, layout): - """begin to format a layout""" - super(HTMLWriter, self).begin_format(layout) - if self.snippet is None: - self.writeln('') - self.writeln(""" - -""") - - def end_format(self, layout): - """finished to format a layout""" - if self.snippet is None: - self.writeln('') - - def visit_section(self, layout): - """display a section (using (level 0) or
)""" - if self.section == 0: - tag = "chapter" - else: - tag = "section" - self.section += 1 - self.writeln(self._indent('<%s%s>' % (tag, self.handle_attrs(layout)))) - self.format_children(layout) - self.writeln(self._indent(''% tag)) - self.section -= 1 - - def visit_title(self, layout): - """display a title using Codestin Search App') - - def visit_table(self, layout): - """display a table as html""" - self.writeln(self._indent(' Codestin Search App' \ - % (self.handle_attrs(layout), layout.title))) - self.writeln(self._indent(' '% layout.cols)) - for i in range(layout.cols): - self.writeln(self._indent(' ' % i)) - - table_content = self.get_table_content(layout) - # write headers - if layout.cheaders: - self.writeln(self._indent(' ')) - self._write_row(table_content[0]) - self.writeln(self._indent(' ')) - table_content = table_content[1:] - elif layout.rcheaders: - self.writeln(self._indent(' ')) - self._write_row(table_content[-1]) - self.writeln(self._indent(' ')) - table_content = table_content[:-1] - # write body - self.writeln(self._indent(' ')) - for i in range(len(table_content)): - row = table_content[i] - self.writeln(self._indent(' ')) - for j in range(len(row)): - cell = row[j] or ' ' - self.writeln(self._indent(' %s' % cell)) - self.writeln(self._indent(' ')) - self.writeln(self._indent(' ')) - self.writeln(self._indent(' ')) - self.writeln(self._indent(' ')) - - def _write_row(self, row): - """write content of row (using )""" - self.writeln(' ') - for j in range(len(row)): - cell = row[j] or ' ' - self.writeln(' %s' % cell) - self.writeln(self._indent(' ')) - - def visit_list(self, layout): - """display a list (using )""" - self.writeln(self._indent(' ' % self.handle_attrs(layout))) - for row in list(self.compute_content(layout)): - self.writeln(' %s' % row) - self.writeln(self._indent(' ')) - - def visit_paragraph(self, layout): - """display links (using )""" - self.write(self._indent(' ')) - self.format_children(layout) - self.writeln('') - - def visit_span(self, layout): - """display links (using

)""" - #TODO: translate in docbook - self.write('' % self.handle_attrs(layout)) - self.format_children(layout) - self.write('') - - def visit_link(self, layout): - """display links (using )""" - self.write('%s' % (layout.url, - self.handle_attrs(layout), - layout.label)) - - def visit_verbatimtext(self, layout): - """display verbatim text (using )""" - self.writeln(self._indent(' ')) - self.write(layout.data.replace('&', '&').replace('<', '<')) - self.writeln(self._indent(' ')) - - def visit_text(self, layout): - """add some text""" - self.write(layout.data.replace('&', '&').replace('<', '<')) - - def _indent(self, string): - """correctly indent string according to section""" - return ' ' * 2*(self.section) + string diff --git a/pylibs/logilab/common/ureports/html_writer.py b/pylibs/logilab/common/ureports/html_writer.py deleted file mode 100644 index 1d095034..00000000 --- a/pylibs/logilab/common/ureports/html_writer.py +++ /dev/null @@ -1,131 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""HTML formatting drivers for ureports""" -__docformat__ = "restructuredtext en" - -from cgi import escape - -from logilab.common.ureports import BaseWriter - - -class HTMLWriter(BaseWriter): - """format layouts as HTML""" - - def __init__(self, snippet=None): - super(HTMLWriter, self).__init__() - self.snippet = snippet - - def handle_attrs(self, layout): - """get an attribute string from layout member attributes""" - attrs = '' - klass = getattr(layout, 'klass', None) - if klass: - attrs += ' class="%s"' % klass - nid = getattr(layout, 'id', None) - if nid: - attrs += ' id="%s"' % nid - return attrs - - def begin_format(self, layout): - """begin to format a layout""" - super(HTMLWriter, self).begin_format(layout) - if self.snippet is None: - self.writeln('') - self.writeln('') - - def end_format(self, layout): - """finished to format a layout""" - if self.snippet is None: - self.writeln('') - self.writeln('') - - - def visit_section(self, layout): - """display a section as html, using div + h[section level]""" - self.section += 1 - self.writeln('' % self.handle_attrs(layout)) - self.format_children(layout) - self.writeln('') - self.section -= 1 - - def visit_title(self, layout): - """display a title using """ - self.write('' % (self.section, self.handle_attrs(layout))) - self.format_children(layout) - self.writeln('' % self.section) - - def visit_table(self, layout): - """display a table as html""" - self.writeln('' % self.handle_attrs(layout)) - table_content = self.get_table_content(layout) - for i in range(len(table_content)): - row = table_content[i] - if i == 0 and layout.rheaders: - self.writeln('') - elif i+1 == len(table_content) and layout.rrheaders: - self.writeln('') - else: - self.writeln('' % (i%2 and 'even' or 'odd')) - for j in range(len(row)): - cell = row[j] or ' ' - if (layout.rheaders and i == 0) or \ - (layout.cheaders and j == 0) or \ - (layout.rrheaders and i+1 == len(table_content)) or \ - (layout.rcheaders and j+1 == len(row)): - self.writeln('%s' % cell) - else: - self.writeln('%s' % cell) - self.writeln('') - self.writeln('') - - def visit_list(self, layout): - """display a list as html""" - self.writeln('' % self.handle_attrs(layout)) - for row in list(self.compute_content(layout)): - self.writeln('

  • %s
  • ' % row) - self.writeln('') - - def visit_paragraph(self, layout): - """display links (using

    )""" - self.write('

    ') - self.format_children(layout) - self.write('

    ') - - def visit_span(self, layout): - """display links (using

    )""" - self.write('' % self.handle_attrs(layout)) - self.format_children(layout) - self.write('') - - def visit_link(self, layout): - """display links (using )""" - self.write(' %s' % (layout.url, - self.handle_attrs(layout), - layout.label)) - def visit_verbatimtext(self, layout): - """display verbatim text (using

    )"""
    -        self.write('
    ')
    -        self.write(layout.data.replace('&', '&').replace('<', '<'))
    -        self.write('
    ') - - def visit_text(self, layout): - """add some text""" - data = layout.data - if layout.escaped: - data = data.replace('&', '&').replace('<', '<') - self.write(data) diff --git a/pylibs/logilab/common/ureports/text_writer.py b/pylibs/logilab/common/ureports/text_writer.py deleted file mode 100644 index 04c8f263..00000000 --- a/pylibs/logilab/common/ureports/text_writer.py +++ /dev/null @@ -1,140 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""Text formatting drivers for ureports""" -__docformat__ = "restructuredtext en" - -from logilab.common.textutils import linesep -from logilab.common.ureports import BaseWriter - - -TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^'] -BULLETS = ['*', '-'] - -class TextWriter(BaseWriter): - """format layouts as text - (ReStructured inspiration but not totally handled yet) - """ - def begin_format(self, layout): - super(TextWriter, self).begin_format(layout) - self.list_level = 0 - self.pending_urls = [] - - def visit_section(self, layout): - """display a section as text - """ - self.section += 1 - self.writeln() - self.format_children(layout) - if self.pending_urls: - self.writeln() - for label, url in self.pending_urls: - self.writeln('.. _`%s`: %s' % (label, url)) - self.pending_urls = [] - self.section -= 1 - self.writeln() - - def visit_title(self, layout): - title = ''.join(list(self.compute_content(layout))) - self.writeln(title) - try: - self.writeln(TITLE_UNDERLINES[self.section] * len(title)) - except IndexError: - print "FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT" - - def visit_paragraph(self, layout): - """enter a paragraph""" - self.format_children(layout) - self.writeln() - - def visit_span(self, layout): - """enter a span""" - self.format_children(layout) - - def visit_table(self, layout): - """display a table as text""" - table_content = self.get_table_content(layout) - # get columns width - cols_width = [0]*len(table_content[0]) - for row in table_content: - for index in range(len(row)): - col = row[index] - cols_width[index] = max(cols_width[index], len(col)) - if layout.klass == 'field': - self.field_table(layout, table_content, cols_width) - else: - self.default_table(layout, table_content, cols_width) - self.writeln() - - def default_table(self, layout, table_content, cols_width): - """format a table""" - cols_width = [size+1 for size in cols_width] - format_strings = ' '.join(['%%-%ss'] * len(cols_width)) - format_strings = format_strings % tuple(cols_width) - format_strings = format_strings.split(' ') - table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n' - headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n' - # FIXME: layout.cheaders - self.write(table_linesep) - for i in range(len(table_content)): - self.write('|') - line = table_content[i] - for j in range(len(line)): - self.write(format_strings[j] % line[j]) - self.write('|') - if i == 0 and layout.rheaders: - self.write(headsep) - else: - self.write(table_linesep) - - def field_table(self, layout, table_content, cols_width): - """special case for field table""" - assert layout.cols == 2 - format_string = '%s%%-%ss: %%s' % (linesep, cols_width[0]) - for field, value in table_content: - self.write(format_string % (field, value)) - - - def visit_list(self, layout): - """display a list layout as text""" - bullet = BULLETS[self.list_level % len(BULLETS)] - indent = ' ' * self.list_level - self.list_level += 1 - for child in layout.children: - self.write('%s%s%s ' % (linesep, indent, bullet)) - child.accept(self) - self.list_level -= 1 - - def visit_link(self, layout): - """add a hyperlink""" - if layout.label != layout.url: - self.write('`%s`_' % layout.label) - self.pending_urls.append( (layout.label, layout.url) ) - else: - self.write(layout.url) - - def visit_verbatimtext(self, layout): - """display a verbatim layout as text (so difficult ;) - """ - self.writeln('::\n') - for line in layout.data.splitlines(): - self.writeln(' ' + line) - self.writeln() - - def visit_text(self, layout): - """add some text""" - self.write(layout.data) diff --git a/pylibs/logilab/common/urllib2ext.py b/pylibs/logilab/common/urllib2ext.py deleted file mode 100644 index 08797a41..00000000 --- a/pylibs/logilab/common/urllib2ext.py +++ /dev/null @@ -1,87 +0,0 @@ -import logging -import urllib2 - -import kerberos as krb - -class GssapiAuthError(Exception): - """raised on error during authentication process""" - -import re -RGX = re.compile('(?:.*,)*\s*Negotiate\s*([^,]*),?', re.I) - -def get_negociate_value(headers): - for authreq in headers.getheaders('www-authenticate'): - match = RGX.search(authreq) - if match: - return match.group(1) - -class HTTPGssapiAuthHandler(urllib2.BaseHandler): - """Negotiate HTTP authentication using context from GSSAPI""" - - handler_order = 400 # before Digest Auth - - def __init__(self): - self._reset() - - def _reset(self): - self._retried = 0 - self._context = None - - def clean_context(self): - if self._context is not None: - krb.authGSSClientClean(self._context) - - def http_error_401(self, req, fp, code, msg, headers): - try: - if self._retried > 5: - raise urllib2.HTTPError(req.get_full_url(), 401, - "negotiate auth failed", headers, None) - self._retried += 1 - logging.debug('gssapi handler, try %s' % self._retried) - negotiate = get_negociate_value(headers) - if negotiate is None: - logging.debug('no negociate found in a www-authenticate header') - return None - logging.debug('HTTPGssapiAuthHandler: negotiate 1 is %r' % negotiate) - result, self._context = krb.authGSSClientInit("HTTP@%s" % req.get_host()) - if result < 1: - raise GssapiAuthError("HTTPGssapiAuthHandler: init failed with %d" % result) - result = krb.authGSSClientStep(self._context, negotiate) - if result < 0: - raise GssapiAuthError("HTTPGssapiAuthHandler: step 1 failed with %d" % result) - client_response = krb.authGSSClientResponse(self._context) - logging.debug('HTTPGssapiAuthHandler: client response is %s...' % client_response[:10]) - req.add_unredirected_header('Authorization', "Negotiate %s" % client_response) - server_response = self.parent.open(req) - negotiate = get_negociate_value(server_response.info()) - if negotiate is None: - logging.warning('HTTPGssapiAuthHandler: failed to authenticate server') - else: - logging.debug('HTTPGssapiAuthHandler negotiate 2: %s' % negotiate) - result = krb.authGSSClientStep(self._context, negotiate) - if result < 1: - raise GssapiAuthError("HTTPGssapiAuthHandler: step 2 failed with %d" % result) - return server_response - except GssapiAuthError, exc: - logging.error(repr(exc)) - finally: - self.clean_context() - self._reset() - -if __name__ == '__main__': - import sys - # debug - import httplib - httplib.HTTPConnection.debuglevel = 1 - httplib.HTTPSConnection.debuglevel = 1 - # debug - import logging - logging.basicConfig(level=logging.DEBUG) - # handle cookies - import cookielib - cj = cookielib.CookieJar() - ch = urllib2.HTTPCookieProcessor(cj) - # test with url sys.argv[1] - h = HTTPGssapiAuthHandler() - response = urllib2.build_opener(h, ch).open(sys.argv[1]) - print '\nresponse: %s\n--------------\n' % response.code, response.info() diff --git a/pylibs/logilab/common/visitor.py b/pylibs/logilab/common/visitor.py deleted file mode 100644 index 802d2bef..00000000 --- a/pylibs/logilab/common/visitor.py +++ /dev/null @@ -1,107 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""A generic visitor abstract implementation. - - - - -""" -__docformat__ = "restructuredtext en" - -def no_filter(_): - return 1 - -# Iterators ################################################################### -class FilteredIterator(object): - - def __init__(self, node, list_func, filter_func=None): - self._next = [(node, 0)] - if filter_func is None: - filter_func = no_filter - self._list = list_func(node, filter_func) - - def next(self): - try: - return self._list.pop(0) - except : - return None - -# Base Visitor ################################################################ -class Visitor(object): - - def __init__(self, iterator_class, filter_func=None): - self._iter_class = iterator_class - self.filter = filter_func - - def visit(self, node, *args, **kargs): - """ - launch the visit on a given node - - call 'open_visit' before the beginning of the visit, with extra args - given - when all nodes have been visited, call the 'close_visit' method - """ - self.open_visit(node, *args, **kargs) - return self.close_visit(self._visit(node)) - - def _visit(self, node): - iterator = self._get_iterator(node) - n = iterator.next() - while n: - result = n.accept(self) - n = iterator.next() - return result - - def _get_iterator(self, node): - return self._iter_class(node, self.filter) - - def open_visit(self, *args, **kargs): - """ - method called at the beginning of the visit - """ - pass - - def close_visit(self, result): - """ - method called at the end of the visit - """ - return result - -# standard visited mixin ###################################################### -class VisitedMixIn(object): - """ - Visited interface allow node visitors to use the node - """ - def get_visit_name(self): - """ - return the visit name for the mixed class. When calling 'accept', the - method <'visit_' + name returned by this method> will be called on the - visitor - """ - try: - return self.TYPE.replace('-', '_') - except: - return self.__class__.__name__.lower() - - def accept(self, visitor, *args, **kwargs): - func = getattr(visitor, 'visit_%s' % self.get_visit_name()) - return func(self, *args, **kwargs) - - def leave(self, visitor, *args, **kwargs): - func = getattr(visitor, 'leave_%s' % self.get_visit_name()) - return func(self, *args, **kwargs) diff --git a/pylibs/logilab/common/xmlrpcutils.py b/pylibs/logilab/common/xmlrpcutils.py deleted file mode 100644 index 1d30d829..00000000 --- a/pylibs/logilab/common/xmlrpcutils.py +++ /dev/null @@ -1,131 +0,0 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""XML-RPC utilities.""" -__docformat__ = "restructuredtext en" - -import xmlrpclib -from base64 import encodestring -#from cStringIO import StringIO - -ProtocolError = xmlrpclib.ProtocolError - -## class BasicAuthTransport(xmlrpclib.Transport): -## def __init__(self, username=None, password=None): -## self.username = username -## self.password = password -## self.verbose = None -## self.has_ssl = httplib.__dict__.has_key("HTTPConnection") - -## def request(self, host, handler, request_body, verbose=None): -## # issue XML-RPC request -## if self.has_ssl: -## if host.startswith("https:"): h = httplib.HTTPSConnection(host) -## else: h = httplib.HTTPConnection(host) -## else: h = httplib.HTTP(host) - -## h.putrequest("POST", handler) - -## # required by HTTP/1.1 -## if not self.has_ssl: # HTTPConnection already does 1.1 -## h.putheader("Host", host) -## h.putheader("Connection", "close") - -## if request_body: h.send(request_body) -## if self.has_ssl: -## response = h.getresponse() -## if response.status != 200: -## raise xmlrpclib.ProtocolError(host + handler, -## response.status, -## response.reason, -## response.msg) -## file = response.fp -## else: -## errcode, errmsg, headers = h.getreply() -## if errcode != 200: -## raise xmlrpclib.ProtocolError(host + handler, errcode, -## errmsg, headers) - -## file = h.getfile() - -## return self.parse_response(file) - - - -class AuthMixin: - """basic http authentication mixin for xmlrpc transports""" - - def __init__(self, username, password, encoding): - self.verbose = 0 - self.username = username - self.password = password - self.encoding = encoding - - def request(self, host, handler, request_body, verbose=0): - """issue XML-RPC request""" - h = self.make_connection(host) - h.putrequest("POST", handler) - # required by XML-RPC - h.putheader("User-Agent", self.user_agent) - h.putheader("Content-Type", "text/xml") - h.putheader("Content-Length", str(len(request_body))) - h.putheader("Host", host) - h.putheader("Connection", "close") - # basic auth - if self.username is not None and self.password is not None: - h.putheader("AUTHORIZATION", "Basic %s" % encodestring( - "%s:%s" % (self.username, self.password)).replace("\012", "")) - h.endheaders() - # send body - if request_body: - h.send(request_body) - # get and check reply - errcode, errmsg, headers = h.getreply() - if errcode != 200: - raise ProtocolError(host + handler, errcode, errmsg, headers) - file = h.getfile() -## # FIXME: encoding ??? iirc, this fix a bug in xmlrpclib but... -## data = h.getfile().read() -## if self.encoding != 'UTF-8': -## data = data.replace("version='1.0'", -## "version='1.0' encoding='%s'" % self.encoding) -## result = StringIO() -## result.write(data) -## result.seek(0) -## return self.parse_response(result) - return self.parse_response(file) - -class BasicAuthTransport(AuthMixin, xmlrpclib.Transport): - """basic http authentication transport""" - -class BasicAuthSafeTransport(AuthMixin, xmlrpclib.SafeTransport): - """basic https authentication transport""" - - -def connect(url, user=None, passwd=None, encoding='ISO-8859-1'): - """return an xml rpc server on , using user / password if specified - """ - if user or passwd: - assert user and passwd is not None - if url.startswith('https://'): - transport = BasicAuthSafeTransport(user, passwd, encoding) - else: - transport = BasicAuthTransport(user, passwd, encoding) - else: - transport = None - server = xmlrpclib.ServerProxy(url, transport, encoding=encoding) - return server diff --git a/pylibs/logilab/common/xmlutils.py b/pylibs/logilab/common/xmlutils.py deleted file mode 100644 index d383b9d5..00000000 --- a/pylibs/logilab/common/xmlutils.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of logilab-common. -# -# logilab-common is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# logilab-common is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . -"""XML utilities. - -This module contains useful functions for parsing and using XML data. For the -moment, there is only one function that can parse the data inside a processing -instruction and return a Python dictionary. - - - - -""" -__docformat__ = "restructuredtext en" - -import re - -RE_DOUBLE_QUOTE = re.compile('([\w\-\.]+)="([^"]+)"') -RE_SIMPLE_QUOTE = re.compile("([\w\-\.]+)='([^']+)'") - -def parse_pi_data(pi_data): - """ - Utility function that parses the data contained in an XML - processing instruction and returns a dictionary of keywords and their - associated values (most of the time, the processing instructions contain - data like ``keyword="value"``, if a keyword is not associated to a value, - for example ``keyword``, it will be associated to ``None``). - - :param pi_data: data contained in an XML processing instruction. - :type pi_data: unicode - - :returns: Dictionary of the keywords (Unicode strings) associated to - their values (Unicode strings) as they were defined in the - data. - :rtype: dict - """ - results = {} - for elt in pi_data.split(): - if RE_DOUBLE_QUOTE.match(elt): - kwd, val = RE_DOUBLE_QUOTE.match(elt).groups() - elif RE_SIMPLE_QUOTE.match(elt): - kwd, val = RE_SIMPLE_QUOTE.match(elt).groups() - else: - kwd, val = elt, None - results[kwd] = val - return results diff --git a/pylibs/pyflakes/__init__.py b/pylibs/pyflakes/__init__.py deleted file mode 100644 index 652a8f47..00000000 --- a/pylibs/pyflakes/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ - -__version__ = '0.4.0' diff --git a/pylibs/pyflakes/checker.py b/pylibs/pyflakes/checker.py deleted file mode 100644 index fa2494e0..00000000 --- a/pylibs/pyflakes/checker.py +++ /dev/null @@ -1,635 +0,0 @@ -# -*- test-case-name: pyflakes -*- -# (c) 2005-2010 Divmod, Inc. -# See LICENSE file for details - -import __builtin__ -import os.path -import _ast - -from pyflakes import messages - - -# utility function to iterate over an AST node's children, adapted -# from Python 2.6's standard ast module -try: - import ast - iter_child_nodes = ast.iter_child_nodes -except (ImportError, AttributeError): - def iter_child_nodes(node, astcls=_ast.AST): - """ - Yield all direct child nodes of *node*, that is, all fields that are nodes - and all items of fields that are lists of nodes. - """ - for name in node._fields: - field = getattr(node, name, None) - if isinstance(field, astcls): - yield field - elif isinstance(field, list): - for item in field: - yield item - - -class Binding(object): - """ - Represents the binding of a value to a name. - - The checker uses this to keep track of which names have been bound and - which names have not. See L{Assignment} for a special type of binding that - is checked with stricter rules. - - @ivar used: pair of (L{Scope}, line-number) indicating the scope and - line number that this binding was last used - """ - - def __init__(self, name, source): - self.name = name - self.source = source - self.used = False - - - def __str__(self): - return self.name - - - def __repr__(self): - return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__, - self.name, - self.source.lineno, - id(self)) - - - -class UnBinding(Binding): - '''Created by the 'del' operator.''' - - - -class Importation(Binding): - """ - A binding created by an import statement. - - @ivar fullName: The complete name given to the import statement, - possibly including multiple dotted components. - @type fullName: C{str} - """ - def __init__(self, name, source): - self.fullName = name - name = name.split('.')[0] - super(Importation, self).__init__(name, source) - - - -class Argument(Binding): - """ - Represents binding a name as an argument. - """ - - - -class Assignment(Binding): - """ - Represents binding a name with an explicit assignment. - - The checker will raise warnings for any Assignment that isn't used. Also, - the checker does not consider assignments in tuple/list unpacking to be - Assignments, rather it treats them as simple Bindings. - """ - - - -class FunctionDefinition(Binding): - _property_decorator = False - - - -class ExportBinding(Binding): - """ - A binding created by an C{__all__} assignment. If the names in the list - can be determined statically, they will be treated as names for export and - additional checking applied to them. - - The only C{__all__} assignment that can be recognized is one which takes - the value of a literal list containing literal strings. For example:: - - __all__ = ["foo", "bar"] - - Names which are imported and not otherwise used but appear in the value of - C{__all__} will not have an unused import warning reported for them. - """ - def names(self): - """ - Return a list of the names referenced by this binding. - """ - names = [] - if isinstance(self.source, _ast.List): - for node in self.source.elts: - if isinstance(node, _ast.Str): - names.append(node.s) - return names - - - -class Scope(dict): - importStarred = False # set to True when import * is found - - - def __repr__(self): - return '<%s at 0x%x %s>' % (self.__class__.__name__, id(self), dict.__repr__(self)) - - - def __init__(self): - super(Scope, self).__init__() - - - -class ClassScope(Scope): - pass - - - -class FunctionScope(Scope): - """ - I represent a name scope for a function. - - @ivar globals: Names declared 'global' in this function. - """ - def __init__(self): - super(FunctionScope, self).__init__() - self.globals = {} - - - -class ModuleScope(Scope): - pass - - -# Globally defined names which are not attributes of the __builtin__ module. -_MAGIC_GLOBALS = ['__file__', '__builtins__'] - - - -class Checker(object): - """ - I check the cleanliness and sanity of Python code. - - @ivar _deferredFunctions: Tracking list used by L{deferFunction}. Elements - of the list are two-tuples. The first element is the callable passed - to L{deferFunction}. The second element is a copy of the scope stack - at the time L{deferFunction} was called. - - @ivar _deferredAssignments: Similar to C{_deferredFunctions}, but for - callables which are deferred assignment checks. - """ - - nodeDepth = 0 - traceTree = False - - def __init__(self, tree, filename=None): - if filename is None: - filename = '(none)' - self._deferredFunctions = [] - self._deferredAssignments = [] - self.dead_scopes = [] - self.messages = [] - self.filename = filename - self.scopeStack = [ModuleScope()] - self.futuresAllowed = True - self.handleChildren(tree) - self._runDeferred(self._deferredFunctions) - # Set _deferredFunctions to None so that deferFunction will fail - # noisily if called after we've run through the deferred functions. - self._deferredFunctions = None - self._runDeferred(self._deferredAssignments) - # Set _deferredAssignments to None so that deferAssignment will fail - # noisly if called after we've run through the deferred assignments. - self._deferredAssignments = None - del self.scopeStack[1:] - self.popScope() - self.check_dead_scopes() - - - def deferFunction(self, callable): - ''' - Schedule a function handler to be called just before completion. - - This is used for handling function bodies, which must be deferred - because code later in the file might modify the global scope. When - `callable` is called, the scope at the time this is called will be - restored, however it will contain any new bindings added to it. - ''' - self._deferredFunctions.append((callable, self.scopeStack[:])) - - - def deferAssignment(self, callable): - """ - Schedule an assignment handler to be called just after deferred - function handlers. - """ - self._deferredAssignments.append((callable, self.scopeStack[:])) - - - def _runDeferred(self, deferred): - """ - Run the callables in C{deferred} using their associated scope stack. - """ - for handler, scope in deferred: - self.scopeStack = scope - handler() - - - def scope(self): - return self.scopeStack[-1] - scope = property(scope) - - def popScope(self): - self.dead_scopes.append(self.scopeStack.pop()) - - - def check_dead_scopes(self): - """ - Look at scopes which have been fully examined and report names in them - which were imported but unused. - """ - for scope in self.dead_scopes: - export = isinstance(scope.get('__all__'), ExportBinding) - if export: - all = scope['__all__'].names() - if os.path.split(self.filename)[1] != '__init__.py': - # Look for possible mistakes in the export list - undefined = set(all) - set(scope) - for name in undefined: - self.report( - messages.UndefinedExport, - scope['__all__'].source, - name) - else: - all = [] - - # Look for imported names that aren't used. - for importation in scope.itervalues(): - if isinstance(importation, Importation): - if not importation.used and importation.name not in all: - self.report( - messages.UnusedImport, - importation.source, - importation.name) - - - def pushFunctionScope(self): - self.scopeStack.append(FunctionScope()) - - def pushClassScope(self): - self.scopeStack.append(ClassScope()) - - def report(self, messageClass, *args, **kwargs): - self.messages.append(messageClass(self.filename, *args, **kwargs)) - - def handleChildren(self, tree): - for node in iter_child_nodes(tree): - self.handleNode(node, tree) - - def isDocstring(self, node): - """ - Determine if the given node is a docstring, as long as it is at the - correct place in the node tree. - """ - return isinstance(node, _ast.Str) or \ - (isinstance(node, _ast.Expr) and - isinstance(node.value, _ast.Str)) - - def handleNode(self, node, parent): - node.parent = parent - if self.traceTree: - print ' ' * self.nodeDepth + node.__class__.__name__ - self.nodeDepth += 1 - if self.futuresAllowed and not \ - (isinstance(node, _ast.ImportFrom) or self.isDocstring(node)): - self.futuresAllowed = False - nodeType = node.__class__.__name__.upper() - try: - handler = getattr(self, nodeType) - handler(node) - finally: - self.nodeDepth -= 1 - if self.traceTree: - print ' ' * self.nodeDepth + 'end ' + node.__class__.__name__ - - def ignore(self, node): - pass - - # "stmt" type nodes - RETURN = DELETE = PRINT = WHILE = IF = WITH = RAISE = TRYEXCEPT = \ - TRYFINALLY = ASSERT = EXEC = EXPR = handleChildren - - CONTINUE = BREAK = PASS = ignore - - # "expr" type nodes - BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = COMPARE = \ - CALL = REPR = ATTRIBUTE = SUBSCRIPT = LIST = TUPLE = handleChildren - - NUM = STR = ELLIPSIS = ignore - - # "slice" type nodes - SLICE = EXTSLICE = INDEX = handleChildren - - # expression contexts are node instances too, though being constants - LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore - - # same for operators - AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = \ - BITOR = BITXOR = BITAND = FLOORDIV = INVERT = NOT = UADD = USUB = \ - EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore - - # additional node types - COMPREHENSION = EXCEPTHANDLER = KEYWORD = handleChildren - - def addBinding(self, loc, value, reportRedef=True): - '''Called when a binding is altered. - - - `loc` is the location (an object with lineno and optionally - col_offset attributes) of the statement responsible for the change - - `value` is the optional new value, a Binding instance, associated - with the binding; if None, the binding is deleted if it exists. - - if `reportRedef` is True (default), rebinding while unused will be - reported. - ''' - if (isinstance(self.scope.get(value.name), FunctionDefinition) - and isinstance(value, FunctionDefinition)): - if not value._property_decorator: - self.report(messages.RedefinedFunction, - loc, value.name, self.scope[value.name].source) - - if not isinstance(self.scope, ClassScope): - for scope in self.scopeStack[::-1]: - existing = scope.get(value.name) - if (isinstance(existing, Importation) - and not existing.used - and (not isinstance(value, Importation) or value.fullName == existing.fullName) - and reportRedef): - - self.report(messages.RedefinedWhileUnused, - loc, value.name, scope[value.name].source) - - if isinstance(value, UnBinding): - try: - del self.scope[value.name] - except KeyError: - self.report(messages.UndefinedName, loc, value.name) - else: - self.scope[value.name] = value - - def GLOBAL(self, node): - """ - Keep track of globals declarations. - """ - if isinstance(self.scope, FunctionScope): - self.scope.globals.update(dict.fromkeys(node.names)) - - def LISTCOMP(self, node): - # handle generators before element - for gen in node.generators: - self.handleNode(gen, node) - self.handleNode(node.elt, node) - - GENERATOREXP = SETCOMP = LISTCOMP - - # dictionary comprehensions; introduced in Python 2.7 - def DICTCOMP(self, node): - for gen in node.generators: - self.handleNode(gen, node) - self.handleNode(node.key, node) - self.handleNode(node.value, node) - - def FOR(self, node): - """ - Process bindings for loop variables. - """ - vars = [] - def collectLoopVars(n): - if isinstance(n, _ast.Name): - vars.append(n.id) - elif isinstance(n, _ast.expr_context): - return - else: - for c in iter_child_nodes(n): - collectLoopVars(c) - - collectLoopVars(node.target) - for varn in vars: - if (isinstance(self.scope.get(varn), Importation) - # unused ones will get an unused import warning - and self.scope[varn].used): - self.report(messages.ImportShadowedByLoopVar, - node, varn, self.scope[varn].source) - - self.handleChildren(node) - - def NAME(self, node): - """ - Handle occurrence of Name (which can be a load/store/delete access.) - """ - # Locate the name in locals / function / globals scopes. - if isinstance(node.ctx, (_ast.Load, _ast.AugLoad)): - # try local scope - importStarred = self.scope.importStarred - try: - self.scope[node.id].used = (self.scope, node) - except KeyError: - pass - else: - return - - # try enclosing function scopes - - for scope in self.scopeStack[-2:0:-1]: - importStarred = importStarred or scope.importStarred - if not isinstance(scope, FunctionScope): - continue - try: - scope[node.id].used = (self.scope, node) - except KeyError: - pass - else: - return - - # try global scope - - importStarred = importStarred or self.scopeStack[0].importStarred - try: - self.scopeStack[0][node.id].used = (self.scope, node) - except KeyError: - if ((not hasattr(__builtin__, node.id)) - and node.id not in _MAGIC_GLOBALS - and not importStarred): - if (os.path.basename(self.filename) == '__init__.py' and - node.id == '__path__'): - # the special name __path__ is valid only in packages - pass - else: - self.report(messages.UndefinedName, node, node.id) - elif isinstance(node.ctx, (_ast.Store, _ast.AugStore)): - # if the name hasn't already been defined in the current scope - if isinstance(self.scope, FunctionScope) and node.id not in self.scope: - # for each function or module scope above us - for scope in self.scopeStack[:-1]: - if not isinstance(scope, (FunctionScope, ModuleScope)): - continue - # if the name was defined in that scope, and the name has - # been accessed already in the current scope, and hasn't - # been declared global - if (node.id in scope - and scope[node.id].used - and scope[node.id].used[0] is self.scope - and node.id not in self.scope.globals): - # then it's probably a mistake - self.report(messages.UndefinedLocal, - scope[node.id].used[1], - node.id, - scope[node.id].source) - break - - if isinstance(node.parent, - (_ast.For, _ast.comprehension, _ast.Tuple, _ast.List)): - binding = Binding(node.id, node) - elif (node.id == '__all__' and - isinstance(self.scope, ModuleScope)): - binding = ExportBinding(node.id, node.parent.value) - else: - binding = Assignment(node.id, node) - if node.id in self.scope: - binding.used = self.scope[node.id].used - self.addBinding(node, binding) - elif isinstance(node.ctx, _ast.Del): - if isinstance(self.scope, FunctionScope) and \ - node.id in self.scope.globals: - del self.scope.globals[node.id] - else: - self.addBinding(node, UnBinding(node.id, node)) - else: - # must be a Param context -- this only happens for names in function - # arguments, but these aren't dispatched through here - raise RuntimeError( - "Got impossible expression context: %r" % (node.ctx,)) - - - def FUNCTIONDEF(self, node): - # the decorators attribute is called decorator_list as of Python 2.6 - if hasattr(node, 'decorators'): - for deco in node.decorators: - self.handleNode(deco, node) - else: - for deco in node.decorator_list: - self.handleNode(deco, node) - - # Check for property decorator - func_def = FunctionDefinition(node.name, node) - for decorator in node.decorator_list: - if getattr(decorator, 'attr', None) in ('setter', 'deleter'): - func_def._property_decorator = True - - self.addBinding(node, func_def) - self.LAMBDA(node) - - def LAMBDA(self, node): - for default in node.args.defaults: - self.handleNode(default, node) - - def runFunction(): - args = [] - - def addArgs(arglist): - for arg in arglist: - if isinstance(arg, _ast.Tuple): - addArgs(arg.elts) - else: - if arg.id in args: - self.report(messages.DuplicateArgument, - node, arg.id) - args.append(arg.id) - - self.pushFunctionScope() - addArgs(node.args.args) - # vararg/kwarg identifiers are not Name nodes - if node.args.vararg: - args.append(node.args.vararg) - if node.args.kwarg: - args.append(node.args.kwarg) - for name in args: - self.addBinding(node, Argument(name, node), reportRedef=False) - if isinstance(node.body, list): - # case for FunctionDefs - for stmt in node.body: - self.handleNode(stmt, node) - else: - # case for Lambdas - self.handleNode(node.body, node) - def checkUnusedAssignments(): - """ - Check to see if any assignments have not been used. - """ - for name, binding in self.scope.iteritems(): - if (not binding.used and not name in self.scope.globals - and isinstance(binding, Assignment)): - self.report(messages.UnusedVariable, - binding.source, name) - self.deferAssignment(checkUnusedAssignments) - self.popScope() - - self.deferFunction(runFunction) - - - def CLASSDEF(self, node): - """ - Check names used in a class definition, including its decorators, base - classes, and the body of its definition. Additionally, add its name to - the current scope. - """ - # decorator_list is present as of Python 2.6 - for deco in getattr(node, 'decorator_list', []): - self.handleNode(deco, node) - for baseNode in node.bases: - self.handleNode(baseNode, node) - self.pushClassScope() - for stmt in node.body: - self.handleNode(stmt, node) - self.popScope() - self.addBinding(node, Binding(node.name, node)) - - def ASSIGN(self, node): - self.handleNode(node.value, node) - for target in node.targets: - self.handleNode(target, node) - - def AUGASSIGN(self, node): - # AugAssign is awkward: must set the context explicitly and visit twice, - # once with AugLoad context, once with AugStore context - node.target.ctx = _ast.AugLoad() - self.handleNode(node.target, node) - self.handleNode(node.value, node) - node.target.ctx = _ast.AugStore() - self.handleNode(node.target, node) - - def IMPORT(self, node): - for alias in node.names: - name = alias.asname or alias.name - importation = Importation(name, node) - self.addBinding(node, importation) - - def IMPORTFROM(self, node): - if node.module == '__future__': - if not self.futuresAllowed: - self.report(messages.LateFutureImport, node, - [n.name for n in node.names]) - else: - self.futuresAllowed = False - - for alias in node.names: - if alias.name == '*': - self.scope.importStarred = True - self.report(messages.ImportStarUsed, node, node.module) - continue - name = alias.asname or alias.name - importation = Importation(name, node) - if node.module == '__future__': - importation.used = (self.scope, node) - self.addBinding(node, importation) diff --git a/pylibs/pyflakes/messages.py b/pylibs/pyflakes/messages.py deleted file mode 100644 index 73bf4cc3..00000000 --- a/pylibs/pyflakes/messages.py +++ /dev/null @@ -1,96 +0,0 @@ -# (c) 2005 Divmod, Inc. See LICENSE file for details - -class Message(object): - message = '' - message_args = () - def __init__(self, filename, loc, use_column=True): - self.filename = filename - self.lineno = loc.lineno - self.col = getattr(loc, 'col_offset', None) if use_column else None - - def __str__(self): - return '%s:%s: %s' % (self.filename, self.lineno, self.message % self.message_args) - - -class UnusedImport(Message): - message = '%r imported but unused' - def __init__(self, filename, loc, name): - Message.__init__(self, filename, loc, use_column=False) - self.message_args = (name,) - - -class RedefinedWhileUnused(Message): - message = 'redefinition of unused %r from line %r' - def __init__(self, filename, loc, name, orig_loc): - Message.__init__(self, filename, loc) - self.message_args = (name, orig_loc.lineno) - - -class ImportShadowedByLoopVar(Message): - message = 'import %r from line %r shadowed by loop variable' - def __init__(self, filename, loc, name, orig_loc): - Message.__init__(self, filename, loc) - self.message_args = (name, orig_loc.lineno) - - -class ImportStarUsed(Message): - message = "'from %s import *' used; unable to detect undefined names" - def __init__(self, filename, loc, modname): - Message.__init__(self, filename, loc) - self.message_args = (modname,) - - -class UndefinedName(Message): - message = 'undefined name %r' - def __init__(self, filename, loc, name): - Message.__init__(self, filename, loc) - self.message_args = (name,) - - - -class UndefinedExport(Message): - message = 'undefined name %r in __all__' - def __init__(self, filename, loc, name): - Message.__init__(self, filename, loc) - self.message_args = (name,) - - - -class UndefinedLocal(Message): - message = "local variable %r (defined in enclosing scope on line %r) referenced before assignment" - def __init__(self, filename, loc, name, orig_loc): - Message.__init__(self, filename, loc) - self.message_args = (name, orig_loc.lineno) - - -class DuplicateArgument(Message): - message = 'duplicate argument %r in function definition' - def __init__(self, filename, loc, name): - Message.__init__(self, filename, loc) - self.message_args = (name,) - - -class RedefinedFunction(Message): - message = 'redefinition of function %r from line %r' - def __init__(self, filename, loc, name, orig_loc): - Message.__init__(self, filename, loc) - self.message_args = (name, orig_loc.lineno) - - -class LateFutureImport(Message): - message = 'future import(s) %r after other statements' - def __init__(self, filename, loc, names): - Message.__init__(self, filename, loc) - self.message_args = (names,) - - -class UnusedVariable(Message): - """ - Indicates that a variable has been explicity assigned to but not actually - used. - """ - - message = 'local variable %r is assigned to but never used' - def __init__(self, filename, loc, names): - Message.__init__(self, filename, loc) - self.message_args = (names,) diff --git a/pylibs/pylint/__init__.py b/pylibs/pylint/__init__.py deleted file mode 100644 index 0c4bd134..00000000 --- a/pylibs/pylint/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" Copyright (c) 2002-2008 LOGILAB S.A. (Paris, FRANCE). -http://www.logilab.fr/ -- mailto:contact@logilab.fr -""" - diff --git a/pylibs/pylint/__pkginfo__.py b/pylibs/pylint/__pkginfo__.py deleted file mode 100644 index 37721f93..00000000 --- a/pylibs/pylint/__pkginfo__.py +++ /dev/null @@ -1,68 +0,0 @@ -# pylint: disable=W0622,C0103 -# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""pylint packaging information""" - -modname = distname = 'pylint' - -numversion = (0, 25, 0) -version = '.'.join([str(num) for num in numversion]) - -install_requires = ['logilab-common >= 0.53.0', 'logilab-astng >= 0.21.1'] - -license = 'GPL' -copyright = 'Logilab S.A.' -description = "python code static checker" -web = "http://www.logilab.org/project/%s" % distname -ftp = "ftp://ftp.logilab.org/pub/%s" % modname -mailinglist = "mailto://python-projects@lists.logilab.org" -author = 'Logilab' -author_email = 'python-projects@lists.logilab.org' - -classifiers = ['Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU General Public License (GPL)', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Topic :: Software Development :: Debuggers', - 'Topic :: Software Development :: Quality Assurance', - 'Topic :: Software Development :: Testing', - ] - - -long_desc = """\ - Pylint is a Python source code analyzer which looks for programming - errors, helps enforcing a coding standard and sniffs for some code - smells (as defined in Martin Fowler's Refactoring book) - . - Pylint can be seen as another PyChecker since nearly all tests you - can do with PyChecker can also be done with Pylint. However, Pylint - offers some more features, like checking length of lines of code, - checking if variable names are well-formed according to your coding - standard, or checking if declared interfaces are truly implemented, - and much more. - . - Additionally, it is possible to write plugins to add your own checks. - . - Pylint is shipped with "pylint-gui", "pyreverse" (UML diagram generator) - and "symilar" (an independent similarities checker).""" - -from os.path import join -scripts = [join('bin', filename) - for filename in ('pylint', 'pylint-gui', "symilar", "epylint", - "pyreverse")] - diff --git a/pylibs/pylint/checkers/__init__.py b/pylibs/pylint/checkers/__init__.py deleted file mode 100644 index d33cacf3..00000000 --- a/pylibs/pylint/checkers/__init__.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""utilities methods and classes for checkers - -Base id of standard checkers (used in msg and report ids): -01: base -02: classes -03: format -04: import -05: misc -06: variables -07: exceptions -08: similar -09: design_analysis -10: newstyle -11: typecheck -12: logging -13: string_format -14-50: not yet used: reserved for future internal checkers. -51-99: perhaps used: reserved for external checkers - -The raw_metrics checker has no number associated since it doesn't emit any -messages nor reports. XXX not true, emit a 07 report ! - -""" - -import tokenize -from os import listdir -from os.path import dirname, join, isdir, splitext - -from logilab.astng.utils import ASTWalker -from logilab.common.configuration import OptionsProviderMixIn - -from pylint.reporters import diff_string, EmptyReport - -def table_lines_from_stats(stats, old_stats, columns): - """get values listed in from and , - and return a formated list of values, designed to be given to a - ureport.Table object - """ - lines = [] - for m_type in columns: - new = stats[m_type] - format = str - if isinstance(new, float): - format = lambda num: '%.3f' % num - old = old_stats.get(m_type) - if old is not None: - diff_str = diff_string(old, new) - old = format(old) - else: - old, diff_str = 'NC', 'NC' - lines += (m_type.replace('_', ' '), format(new), old, diff_str) - return lines - - -class BaseChecker(OptionsProviderMixIn, ASTWalker): - """base class for checkers""" - # checker name (you may reuse an existing one) - name = None - # options level (0 will be displaying in --help, 1 in --long-help) - level = 1 - # ordered list of options to control the ckecker behaviour - options = () - # messages issued by this checker - msgs = {} - # reports issued by this checker - reports = () - - def __init__(self, linter=None): - """checker instances should have the linter as argument - - linter is an object implementing ILinter - """ - ASTWalker.__init__(self, self) - self.name = self.name.lower() - OptionsProviderMixIn.__init__(self) - self.linter = linter - # messages that are active for the current check - self.active_msgs = set() - - def add_message(self, msg_id, line=None, node=None, args=None): - """add a message of a given type""" - self.linter.add_message(msg_id, line, node, args) - - def package_dir(self): - """return the base directory for the analysed package""" - return dirname(self.linter.base_file) - - - # dummy methods implementing the IChecker interface - - def open(self): - """called before visiting project (i.e set of modules)""" - - def close(self): - """called after visiting project (i.e set of modules)""" - -class BaseRawChecker(BaseChecker): - """base class for raw checkers""" - - def process_module(self, node): - """process a module - - the module's content is accessible via the stream object - - stream must implement the readline method - """ - stream = node.file_stream - stream.seek(0) - self.process_tokens(tokenize.generate_tokens(stream.readline)) - - def process_tokens(self, tokens): - """should be overridden by subclasses""" - raise NotImplementedError() - - -PY_EXTS = ('.py', '.pyc', '.pyo', '.pyw', '.so', '.dll') - -def initialize(linter): - """initialize linter with checkers in this package """ - package_load(linter, __path__[0]) - -def package_load(linter, directory): - """load all module and package in the given directory, looking for a - 'register' function in each one, used to register pylint checkers - """ - globs = globals() - imported = {} - for filename in listdir(directory): - basename, extension = splitext(filename) - if basename in imported or basename == '__pycache__': - continue - if extension in PY_EXTS and basename != '__init__' or ( - not extension and basename != 'CVS' and - isdir(join(directory, basename))): - try: - module = __import__(basename, globs, globs, None) - except ValueError: - # empty module name (usually emacs auto-save files) - continue - except ImportError, exc: - import sys - print >> sys.stderr, "Problem importing module %s: %s" % (filename, exc) - else: - if hasattr(module, 'register'): - module.register(linter) - imported[basename] = 1 - -__all__ = ('CheckerHandler', 'BaseChecker', 'initialize', 'package_load') diff --git a/pylibs/pylint/checkers/base.py b/pylibs/pylint/checkers/base.py deleted file mode 100644 index a0629a28..00000000 --- a/pylibs/pylint/checkers/base.py +++ /dev/null @@ -1,757 +0,0 @@ -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# Copyright (c) 2009-2010 Arista Networks, Inc. -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""basic checker for Python code""" - - -from logilab import astng -from logilab.common.ureports import Table -from logilab.astng import are_exclusive - -from pylint.interfaces import IASTNGChecker -from pylint.reporters import diff_string -from pylint.checkers import BaseChecker, EmptyReport -from pylint.checkers.utils import check_messages - - -import re - -# regex for class/function/variable/constant name -CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$') -MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$') -CONST_NAME_RGX = re.compile('(([A-Z_][A-Z0-9_]*)|(__.*__))$') -COMP_VAR_RGX = re.compile('[A-Za-z_][A-Za-z0-9_]*$') -DEFAULT_NAME_RGX = re.compile('[a-z_][a-z0-9_]{2,30}$') -# do not require a doc string on system methods -NO_REQUIRED_DOC_RGX = re.compile('__.*__') - -del re - -def in_loop(node): - """return True if the node is inside a kind of for loop""" - parent = node.parent - while parent is not None: - if isinstance(parent, (astng.For, astng.ListComp, astng.SetComp, - astng.DictComp, astng.GenExpr)): - return True - parent = parent.parent - return False - -def in_nested_list(nested_list, obj): - """return true if the object is an element of or of a nested - list - """ - for elmt in nested_list: - if isinstance(elmt, (list, tuple)): - if in_nested_list(elmt, obj): - return True - elif elmt == obj: - return True - return False - -def report_by_type_stats(sect, stats, old_stats): - """make a report of - - * percentage of different types documented - * percentage of different types with a bad name - """ - # percentage of different types documented and/or with a bad name - nice_stats = {} - for node_type in ('module', 'class', 'method', 'function'): - try: - total = stats[node_type] - except KeyError: - raise EmptyReport() - nice_stats[node_type] = {} - if total != 0: - try: - documented = total - stats['undocumented_'+node_type] - percent = (documented * 100.) / total - nice_stats[node_type]['percent_documented'] = '%.2f' % percent - except KeyError: - nice_stats[node_type]['percent_documented'] = 'NC' - try: - percent = (stats['badname_'+node_type] * 100.) / total - nice_stats[node_type]['percent_badname'] = '%.2f' % percent - except KeyError: - nice_stats[node_type]['percent_badname'] = 'NC' - lines = ('type', 'number', 'old number', 'difference', - '%documented', '%badname') - for node_type in ('module', 'class', 'method', 'function'): - new = stats[node_type] - old = old_stats.get(node_type, None) - if old is not None: - diff_str = diff_string(old, new) - else: - old, diff_str = 'NC', 'NC' - lines += (node_type, str(new), str(old), diff_str, - nice_stats[node_type].get('percent_documented', '0'), - nice_stats[node_type].get('percent_badname', '0')) - sect.append(Table(children=lines, cols=6, rheaders=1)) - -class _BasicChecker(BaseChecker): - __implements__ = IASTNGChecker - name = 'basic' - -class BasicErrorChecker(_BasicChecker): - msgs = { - 'E0100': ('__init__ method is a generator', - 'Used when the special class method __init__ is turned into a ' - 'generator by a yield in its body.'), - 'E0101': ('Explicit return in __init__', - 'Used when the special class method __init__ has an explicit \ - return value.'), - 'E0102': ('%s already defined line %s', - 'Used when a function / class / method is redefined.'), - 'E0103': ('%r not properly in loop', - 'Used when break or continue keywords are used outside a loop.'), - - 'E0104': ('Return outside function', - 'Used when a "return" statement is found outside a function or ' - 'method.'), - 'E0105': ('Yield outside function', - 'Used when a "yield" statement is found outside a function or ' - 'method.'), - 'E0106': ('Return with argument inside generator', - 'Used when a "return" statement with an argument is found ' - 'outside in a generator function or method (e.g. with some ' - '"yield" statements).'), - 'E0107': ("Use of the non-existent %s operator", - "Used when you attempt to use the C-style pre-increment or" - "pre-decrement operator -- and ++, which doesn't exist in Python."), - } - - def __init__(self, linter): - _BasicChecker.__init__(self, linter) - - @check_messages('E0102') - def visit_class(self, node): - self._check_redefinition('class', node) - - @check_messages('E0100', 'E0101', 'E0102', 'E0106') - def visit_function(self, node): - self._check_redefinition(node.is_method() and 'method' or 'function', node) - # checks for max returns, branch, return in __init__ - returns = node.nodes_of_class(astng.Return, - skip_klass=(astng.Function, astng.Class)) - if node.is_method() and node.name == '__init__': - if node.is_generator(): - self.add_message('E0100', node=node) - else: - values = [r.value for r in returns] - if [v for v in values if not (v is None or - (isinstance(v, astng.Const) and v.value is None) - or (isinstance(v, astng.Name) and v.name == 'None'))]: - self.add_message('E0101', node=node) - elif node.is_generator(): - # make sure we don't mix non-None returns and yields - for retnode in returns: - if isinstance(retnode.value, astng.Const) and \ - retnode.value.value is not None: - self.add_message('E0106', node=node, - line=retnode.fromlineno) - - @check_messages('E0104') - def visit_return(self, node): - if not isinstance(node.frame(), astng.Function): - self.add_message('E0104', node=node) - - @check_messages('E0105') - def visit_yield(self, node): - if not isinstance(node.frame(), astng.Function): - self.add_message('E0105', node=node) - - @check_messages('E0103') - def visit_continue(self, node): - self._check_in_loop(node, 'continue') - - @check_messages('E0103') - def visit_break(self, node): - self._check_in_loop(node, 'break') - - @check_messages('E0107') - def visit_unaryop(self, node): - """check use of the non-existent ++ adn -- operator operator""" - if ((node.op in '+-') and - isinstance(node.operand, astng.UnaryOp) and - (node.operand.op == node.op)): - self.add_message('E0107', node=node, args=node.op*2) - - def _check_in_loop(self, node, node_name): - """check that a node is inside a for or while loop""" - _node = node.parent - while _node: - if isinstance(_node, (astng.For, astng.While)): - break - _node = _node.parent - else: - self.add_message('E0103', node=node, args=node_name) - - def _check_redefinition(self, redeftype, node): - """check for redefinition of a function / method / class name""" - defined_self = node.parent.frame()[node.name] - if defined_self is not node and not are_exclusive(node, defined_self): - self.add_message('E0102', node=node, - args=(redeftype, defined_self.fromlineno)) - - - -class BasicChecker(_BasicChecker): - """checks for : - * doc strings - * modules / classes / functions / methods / arguments / variables name - * number of arguments, local variables, branches, returns and statements in -functions, methods - * required module attributes - * dangerous default values as arguments - * redefinition of function / method / class - * uses of the global statement - """ - - __implements__ = IASTNGChecker - - name = 'basic' - msgs = { - 'W0101': ('Unreachable code', - 'Used when there is some code behind a "return" or "raise" \ - statement, which will never be accessed.'), - 'W0102': ('Dangerous default value %s as argument', - 'Used when a mutable value as list or dictionary is detected in \ - a default value for an argument.'), - 'W0104': ('Statement seems to have no effect', - 'Used when a statement doesn\'t have (or at least seems to) \ - any effect.'), - 'W0105': ('String statement has no effect', - 'Used when a string is used as a statement (which of course \ - has no effect). This is a particular case of W0104 with its \ - own message so you can easily disable it if you\'re using \ - those strings as documentation, instead of comments.'), - 'W0106': ('Expression "%s" is assigned to nothing', - 'Used when an expression that is not a function call is assigned\ - to nothing. Probably something else was intended.'), - 'W0108': ('Lambda may not be necessary', - 'Used when the body of a lambda expression is a function call \ - on the same argument list as the lambda itself; such lambda \ - expressions are in all but a few cases replaceable with the \ - function being called in the body of the lambda.'), - 'W0109': ("Duplicate key %r in dictionary", - "Used when a dictionary expression binds the same key multiple \ - times."), - 'W0122': ('Use of the exec statement', - 'Used when you use the "exec" statement, to discourage its \ - usage. That doesn\'t mean you can not use it !'), - - 'W0141': ('Used builtin function %r', - 'Used when a black listed builtin function is used (see the ' - 'bad-function option). Usual black listed functions are the ones ' - 'like map, or filter , where Python offers now some cleaner ' - 'alternative like list comprehension.'), - 'W0142': ('Used * or ** magic', - 'Used when a function or method is called using `*args` or ' - '`**kwargs` to dispatch arguments. This doesn\'t improve ' - 'readability and should be used with care.'), - 'W0150': ("%s statement in finally block may swallow exception", - "Used when a break or a return statement is found inside the \ - finally clause of a try...finally block: the exceptions raised \ - in the try clause will be silently swallowed instead of being \ - re-raised."), - 'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?', - 'A call of assert on a tuple will always evaluate to true if ' - 'the tuple is not empty, and will always evaluate to false if ' - 'it is.'), - - 'C0121': ('Missing required attribute "%s"', # W0103 - 'Used when an attribute required for modules is missing.'), - - } - - options = (('required-attributes', - {'default' : (), 'type' : 'csv', - 'metavar' : '', - 'help' : 'Required attributes for module, separated by a ' - 'comma'} - ), - ('bad-functions', - {'default' : ('map', 'filter', 'apply', 'input'), - 'type' :'csv', 'metavar' : '', - 'help' : 'List of builtins function names that should not be ' - 'used, separated by a comma'} - ), - ) - reports = ( ('RP0101', 'Statistics by type', report_by_type_stats), ) - - def __init__(self, linter): - _BasicChecker.__init__(self, linter) - self.stats = None - self._tryfinallys = None - - def open(self): - """initialize visit variables and statistics - """ - self._tryfinallys = [] - self.stats = self.linter.add_stats(module=0, function=0, - method=0, class_=0) - - def visit_module(self, node): - """check module name, docstring and required arguments - """ - self.stats['module'] += 1 - for attr in self.config.required_attributes: - if attr not in node: - self.add_message('C0121', node=node, args=attr) - - def visit_class(self, node): - """check module name, docstring and redefinition - increment branch counter - """ - self.stats['class'] += 1 - - @check_messages('W0104', 'W0105') - def visit_discard(self, node): - """check for various kind of statements without effect""" - expr = node.value - if isinstance(expr, astng.Const) and isinstance(expr.value, - basestring): - # treat string statement in a separated message - self.add_message('W0105', node=node) - return - # ignore if this is : - # * a direct function call - # * the unique child of a try/except body - # * a yield (which are wrapped by a discard node in _ast XXX) - # warn W0106 if we have any underlying function call (we can't predict - # side effects), else W0104 - if (isinstance(expr, (astng.Yield, astng.CallFunc)) or - (isinstance(node.parent, astng.TryExcept) and - node.parent.body == [node])): - return - if any(expr.nodes_of_class(astng.CallFunc)): - self.add_message('W0106', node=node, args=expr.as_string()) - else: - self.add_message('W0104', node=node) - - @check_messages('W0108') - def visit_lambda(self, node): - """check whether or not the lambda is suspicious - """ - # if the body of the lambda is a call expression with the same - # argument list as the lambda itself, then the lambda is - # possibly unnecessary and at least suspicious. - if node.args.defaults: - # If the arguments of the lambda include defaults, then a - # judgment cannot be made because there is no way to check - # that the defaults defined by the lambda are the same as - # the defaults defined by the function called in the body - # of the lambda. - return - call = node.body - if not isinstance(call, astng.CallFunc): - # The body of the lambda must be a function call expression - # for the lambda to be unnecessary. - return - # XXX are lambda still different with astng >= 0.18 ? - # *args and **kwargs need to be treated specially, since they - # are structured differently between the lambda and the function - # call (in the lambda they appear in the args.args list and are - # indicated as * and ** by two bits in the lambda's flags, but - # in the function call they are omitted from the args list and - # are indicated by separate attributes on the function call node). - ordinary_args = list(node.args.args) - if node.args.kwarg: - if (not call.kwargs - or not isinstance(call.kwargs, astng.Name) - or node.args.kwarg != call.kwargs.name): - return - elif call.kwargs: - return - if node.args.vararg: - if (not call.starargs - or not isinstance(call.starargs, astng.Name) - or node.args.vararg != call.starargs.name): - return - elif call.starargs: - return - # The "ordinary" arguments must be in a correspondence such that: - # ordinary_args[i].name == call.args[i].name. - if len(ordinary_args) != len(call.args): - return - for i in xrange(len(ordinary_args)): - if not isinstance(call.args[i], astng.Name): - return - if node.args.args[i].name != call.args[i].name: - return - self.add_message('W0108', line=node.fromlineno, node=node) - - def visit_function(self, node): - """check function name, docstring, arguments, redefinition, - variable names, max locals - """ - self.stats[node.is_method() and 'method' or 'function'] += 1 - # check for dangerous default values as arguments - for default in node.args.defaults: - try: - value = default.infer().next() - except astng.InferenceError: - continue - if isinstance(value, (astng.Dict, astng.List)): - if value is default: - msg = default.as_string() - else: - msg = '%s (%s)' % (default.as_string(), value.as_string()) - self.add_message('W0102', node=node, args=(msg,)) - - @check_messages('W0101', 'W0150') - def visit_return(self, node): - """1 - check is the node has a right sibling (if so, that's some - unreachable code) - 2 - check is the node is inside the finally clause of a try...finally - block - """ - self._check_unreachable(node) - # Is it inside final body of a try...finally bloc ? - self._check_not_in_finally(node, 'return', (astng.Function,)) - - @check_messages('W0101') - def visit_continue(self, node): - """check is the node has a right sibling (if so, that's some unreachable - code) - """ - self._check_unreachable(node) - - @check_messages('W0101', 'W0150') - def visit_break(self, node): - """1 - check is the node has a right sibling (if so, that's some - unreachable code) - 2 - check is the node is inside the finally clause of a try...finally - block - """ - # 1 - Is it right sibling ? - self._check_unreachable(node) - # 2 - Is it inside final body of a try...finally bloc ? - self._check_not_in_finally(node, 'break', (astng.For, astng.While,)) - - @check_messages('W0101') - def visit_raise(self, node): - """check is the node has a right sibling (if so, that's some unreachable - code) - """ - self._check_unreachable(node) - - @check_messages('W0122') - def visit_exec(self, node): - """just print a warning on exec statements""" - self.add_message('W0122', node=node) - - @check_messages('W0141', 'W0142') - def visit_callfunc(self, node): - """visit a CallFunc node -> check if this is not a blacklisted builtin - call and check for * or ** use - """ - if isinstance(node.func, astng.Name): - name = node.func.name - # ignore the name if it's not a builtin (i.e. not defined in the - # locals nor globals scope) - if not (name in node.frame() or - name in node.root()): - if name in self.config.bad_functions: - self.add_message('W0141', node=node, args=name) - if node.starargs or node.kwargs: - scope = node.scope() - if isinstance(scope, astng.Function): - toprocess = [(n, vn) for (n, vn) in ((node.starargs, scope.args.vararg), - (node.kwargs, scope.args.kwarg)) if n] - if toprocess: - for cfnode, fargname in toprocess[:]: - if getattr(cfnode, 'name', None) == fargname: - toprocess.remove((cfnode, fargname)) - if not toprocess: - return # W0142 can be skipped - self.add_message('W0142', node=node.func) - - @check_messages('W0199') - def visit_assert(self, node): - """check the use of an assert statement on a tuple.""" - if node.fail is None and isinstance(node.test, astng.Tuple) and \ - len(node.test.elts) == 2: - self.add_message('W0199', line=node.fromlineno, node=node) - - @check_messages('W0109') - def visit_dict(self, node): - """check duplicate key in dictionary""" - keys = set() - for k, v in node.items: - if isinstance(k, astng.Const): - key = k.value - if key in keys: - self.add_message('W0109', node=node, args=key) - keys.add(key) - - def visit_tryfinally(self, node): - """update try...finally flag""" - self._tryfinallys.append(node) - - def leave_tryfinally(self, node): - """update try...finally flag""" - self._tryfinallys.pop() - - def _check_unreachable(self, node): - """check unreachable code""" - unreach_stmt = node.next_sibling() - if unreach_stmt is not None: - self.add_message('W0101', node=unreach_stmt) - - def _check_not_in_finally(self, node, node_name, breaker_classes=()): - """check that a node is not inside a finally clause of a - try...finally statement. - If we found before a try...finally bloc a parent which its type is - in breaker_classes, we skip the whole check.""" - # if self._tryfinallys is empty, we're not a in try...finally bloc - if not self._tryfinallys: - return - # the node could be a grand-grand...-children of the try...finally - _parent = node.parent - _node = node - while _parent and not isinstance(_parent, breaker_classes): - if hasattr(_parent, 'finalbody') and _node in _parent.finalbody: - self.add_message('W0150', node=node, args=node_name) - return - _node = _parent - _parent = _node.parent - - - -class NameChecker(_BasicChecker): - msgs = { - 'C0102': ('Black listed name "%s"', - 'Used when the name is listed in the black list (unauthorized \ - names).'), - 'C0103': ('Invalid name "%s" (should match %s)', - 'Used when the name doesn\'t match the regular expression \ - associated to its type (constant, variable, class...).'), - - } - options = (('module-rgx', - {'default' : MOD_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'module names'} - ), - ('const-rgx', - {'default' : CONST_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'module level names'} - ), - ('class-rgx', - {'default' : CLASS_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'class names'} - ), - ('function-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'function names'} - ), - ('method-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'method names'} - ), - ('attr-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'instance attribute names'} - ), - ('argument-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'argument names'}), - ('variable-rgx', - {'default' : DEFAULT_NAME_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'variable names'} - ), - ('inlinevar-rgx', - {'default' : COMP_VAR_RGX, - 'type' :'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match correct ' - 'list comprehension / generator expression variable \ - names'} - ), - # XXX use set - ('good-names', - {'default' : ('i', 'j', 'k', 'ex', 'Run', '_'), - 'type' :'csv', 'metavar' : '', - 'help' : 'Good variable names which should always be accepted,' - ' separated by a comma'} - ), - ('bad-names', - {'default' : ('foo', 'bar', 'baz', 'toto', 'tutu', 'tata'), - 'type' :'csv', 'metavar' : '', - 'help' : 'Bad variable names which should always be refused, ' - 'separated by a comma'} - ), - ) - - def open(self): - self.stats = self.linter.add_stats(badname_module=0, - badname_class=0, badname_function=0, - badname_method=0, badname_attr=0, - badname_const=0, - badname_variable=0, - badname_inlinevar=0, - badname_argument=0) - - @check_messages('C0102', 'C0103') - def visit_module(self, node): - self._check_name('module', node.name.split('.')[-1], node) - - @check_messages('C0102', 'C0103') - def visit_class(self, node): - self._check_name('class', node.name, node) - for attr, anodes in node.instance_attrs.items(): - self._check_name('attr', attr, anodes[0]) - - @check_messages('C0102', 'C0103') - def visit_function(self, node): - self._check_name(node.is_method() and 'method' or 'function', - node.name, node) - # check arguments name - args = node.args.args - if args is not None: - self._recursive_check_names(args, node) - - @check_messages('C0102', 'C0103') - def visit_assname(self, node): - """check module level assigned names""" - frame = node.frame() - ass_type = node.ass_type() - if isinstance(ass_type, (astng.Comprehension, astng.Comprehension)): - self._check_name('inlinevar', node.name, node) - elif isinstance(frame, astng.Module): - if isinstance(ass_type, astng.Assign) and not in_loop(ass_type): - self._check_name('const', node.name, node) - elif isinstance(frame, astng.Function): - # global introduced variable aren't in the function locals - if node.name in frame: - self._check_name('variable', node.name, node) - - def _recursive_check_names(self, args, node): - """check names in a possibly recursive list """ - for arg in args: - if isinstance(arg, astng.AssName): - self._check_name('argument', arg.name, node) - else: - self._recursive_check_names(arg.elts, node) - - def _check_name(self, node_type, name, node): - """check for a name using the type's regexp""" - if name in self.config.good_names: - return - if name in self.config.bad_names: - self.stats['badname_' + node_type] += 1 - self.add_message('C0102', node=node, args=name) - return - regexp = getattr(self.config, node_type + '_rgx') - if regexp.match(name) is None: - self.add_message('C0103', node=node, args=(name, regexp.pattern)) - self.stats['badname_' + node_type] += 1 - - - -class DocStringChecker(_BasicChecker): - msgs = { - 'C0111': ('Missing docstring', # W0131 - 'Used when a module, function, class or method has no docstring.\ - Some special methods like __init__ doesn\'t necessary require a \ - docstring.'), - 'C0112': ('Empty docstring', # W0132 - 'Used when a module, function, class or method has an empty \ - docstring (it would be too easy ;).'), - } - options = (('no-docstring-rgx', - {'default' : NO_REQUIRED_DOC_RGX, - 'type' : 'regexp', 'metavar' : '', - 'help' : 'Regular expression which should only match ' - 'functions or classes name which do not require a ' - 'docstring'} - ), - ) - - def open(self): - self.stats = self.linter.add_stats(undocumented_module=0, - undocumented_function=0, - undocumented_method=0, - undocumented_class=0) - - def visit_module(self, node): - self._check_docstring('module', node) - - def visit_class(self, node): - if self.config.no_docstring_rgx.match(node.name) is None: - self._check_docstring('class', node) - - def visit_function(self, node): - if self.config.no_docstring_rgx.match(node.name) is None: - ftype = node.is_method() and 'method' or 'function' - if isinstance(node.parent.frame(), astng.Class): - overridden = False - # check if node is from a method overridden by its ancestor - for ancestor in node.parent.frame().ancestors(): - if node.name in ancestor and \ - isinstance(ancestor[node.name], astng.Function): - overridden = True - break - if not overridden: - self._check_docstring(ftype, node) - else: - self._check_docstring(ftype, node) - - def _check_docstring(self, node_type, node): - """check the node has a non empty docstring""" - docstring = node.doc - if docstring is None: - self.stats['undocumented_'+node_type] += 1 - self.add_message('C0111', node=node) - elif not docstring.strip(): - self.stats['undocumented_'+node_type] += 1 - self.add_message('C0112', node=node) - - -class PassChecker(_BasicChecker): - """check is the pass statement is really necessary""" - msgs = {'W0107': ('Unnecessary pass statement', - 'Used when a "pass" statement that can be avoided is ' - 'encountered.)'), - } - - def visit_pass(self, node): - if len(node.parent.child_sequence(node)) > 1: - self.add_message('W0107', node=node) - - -def register(linter): - """required method to auto register this checker""" - linter.register_checker(BasicErrorChecker(linter)) - linter.register_checker(BasicChecker(linter)) - linter.register_checker(NameChecker(linter)) - linter.register_checker(DocStringChecker(linter)) - linter.register_checker(PassChecker(linter)) diff --git a/pylibs/pylint/checkers/classes.py b/pylibs/pylint/checkers/classes.py deleted file mode 100644 index 60d20b6c..00000000 --- a/pylibs/pylint/checkers/classes.py +++ /dev/null @@ -1,553 +0,0 @@ -# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""classes checker for Python code -""" -from __future__ import generators - -from logilab import astng -from logilab.astng import YES, Instance, are_exclusive - -from pylint.interfaces import IASTNGChecker -from pylint.checkers import BaseChecker -from pylint.checkers.utils import PYMETHODS, overrides_a_method, check_messages - -def class_is_abstract(node): - """return true if the given class node should be considered as an abstract - class - """ - for method in node.methods(): - if method.parent.frame() is node: - if method.is_abstract(pass_is_abstract=False): - return True - return False - - -MSGS = { - 'F0202': ('Unable to check methods signature (%s / %s)', - 'Used when PyLint has been unable to check methods signature \ - compatibility for an unexpected reason. Please report this kind \ - if you don\'t make sense of it.'), - - 'E0202': ('An attribute affected in %s line %s hide this method', - 'Used when a class defines a method which is hidden by an ' - 'instance attribute from an ancestor class or set by some ' - 'client code.'), - 'E0203': ('Access to member %r before its definition line %s', - 'Used when an instance member is accessed before it\'s actually\ - assigned.'), - 'W0201': ('Attribute %r defined outside __init__', - 'Used when an instance attribute is defined outside the __init__\ - method.'), - - 'W0212': ('Access to a protected member %s of a client class', # E0214 - 'Used when a protected member (i.e. class member with a name \ - beginning with an underscore) is access outside the class or a \ - descendant of the class where it\'s defined.'), - - 'E0211': ('Method has no argument', - 'Used when a method which should have the bound instance as \ - first argument has no argument defined.'), - 'E0213': ('Method should have "self" as first argument', - 'Used when a method has an attribute different the "self" as\ - first argument. This is considered as an error since this is\ - a so common convention that you shouldn\'t break it!'), - 'C0202': ('Class method should have %s as first argument', # E0212 - 'Used when a class method has an attribute different than "cls"\ - as first argument, to easily differentiate them from regular \ - instance methods.'), - 'C0203': ('Metaclass method should have "mcs" as first argument', # E0214 - 'Used when a metaclass method has an attribute different the \ - "mcs" as first argument.'), - - 'W0211': ('Static method with %r as first argument', - 'Used when a static method has "self" or "cls" as first argument.' - ), - 'R0201': ('Method could be a function', - 'Used when a method doesn\'t use its bound instance, and so could\ - be written as a function.' - ), - - 'E0221': ('Interface resolved to %s is not a class', - 'Used when a class claims to implement an interface which is not \ - a class.'), - 'E0222': ('Missing method %r from %s interface', - 'Used when a method declared in an interface is missing from a \ - class implementing this interface'), - 'W0221': ('Arguments number differs from %s method', - 'Used when a method has a different number of arguments than in \ - the implemented interface or in an overridden method.'), - 'W0222': ('Signature differs from %s method', - 'Used when a method signature is different than in the \ - implemented interface or in an overridden method.'), - 'W0223': ('Method %r is abstract in class %r but is not overridden', - 'Used when an abstract method (i.e. raise NotImplementedError) is \ - not overridden in concrete class.' - ), - 'F0220': ('failed to resolve interfaces implemented by %s (%s)', # W0224 - 'Used when a PyLint as failed to find interfaces implemented by \ - a class'), - - - 'W0231': ('__init__ method from base class %r is not called', - 'Used when an ancestor class method has an __init__ method \ - which is not called by a derived class.'), - 'W0232': ('Class has no __init__ method', - 'Used when a class has no __init__ method, neither its parent \ - classes.'), - 'W0233': ('__init__ method from a non direct base class %r is called', - 'Used when an __init__ method is called on a class which is not \ - in the direct ancestors for the analysed class.'), - - } - - -class ClassChecker(BaseChecker): - """checks for : - * methods without self as first argument - * overridden methods signature - * access only to existent members via self - * attributes not defined in the __init__ method - * supported interfaces implementation - * unreachable code - """ - - __implements__ = (IASTNGChecker,) - - # configuration section name - name = 'classes' - # messages - msgs = MSGS - priority = -2 - # configuration options - options = (('ignore-iface-methods', - {'default' : (#zope interface - 'isImplementedBy', 'deferred', 'extends', 'names', - 'namesAndDescriptions', 'queryDescriptionFor', 'getBases', - 'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue', - 'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue', - 'isImplementedByInstancesOf', - # twisted - 'adaptWith', - # logilab.common interface - 'is_implemented_by'), - 'type' : 'csv', - 'metavar' : '', - 'help' : 'List of interface methods to ignore, \ -separated by a comma. This is used for instance to not check methods defines \ -in Zope\'s Interface base class.'} - ), - - ('defining-attr-methods', - {'default' : ('__init__', '__new__', 'setUp'), - 'type' : 'csv', - 'metavar' : '', - 'help' : 'List of method names used to declare (i.e. assign) \ -instance attributes.'} - ), - ('valid-classmethod-first-arg', - {'default' : ('cls',), - 'type' : 'csv', - 'metavar' : '', - 'help' : 'List of valid names for the first argument in \ -a class method.'} - ), - - ) - - def __init__(self, linter=None): - BaseChecker.__init__(self, linter) - self._accessed = [] - self._first_attrs = [] - self._meth_could_be_func = None - - def visit_class(self, node): - """init visit variable _accessed and check interfaces - """ - self._accessed.append({}) - self._check_bases_classes(node) - self._check_interfaces(node) - # if not an interface, exception, metaclass - if node.type == 'class': - try: - node.local_attr('__init__') - except astng.NotFoundError: - self.add_message('W0232', args=node, node=node) - - @check_messages('E0203', 'W0201') - def leave_class(self, cnode): - """close a class node: - check that instance attributes are defined in __init__ and check - access to existent members - """ - # check access to existent members on non metaclass classes - accessed = self._accessed.pop() - if cnode.type != 'metaclass': - self._check_accessed_members(cnode, accessed) - # checks attributes are defined in an allowed method such as __init__ - if 'W0201' not in self.active_msgs: - return - defining_methods = self.config.defining_attr_methods - for attr, nodes in cnode.instance_attrs.items(): - nodes = [n for n in nodes if not - isinstance(n.statement(), (astng.Delete, astng.AugAssign))] - if not nodes: - continue # error detected by typechecking - attr_defined = False - # check if any method attr is defined in is a defining method - for node in nodes: - if node.frame().name in defining_methods: - attr_defined = True - if not attr_defined: - # check attribute is defined in a parent's __init__ - for parent in cnode.instance_attr_ancestors(attr): - attr_defined = False - # check if any parent method attr is defined in is a defining method - for node in parent.instance_attrs[attr]: - if node.frame().name in defining_methods: - attr_defined = True - if attr_defined: - # we're done :) - break - else: - # check attribute is defined as a class attribute - try: - cnode.local_attr(attr) - except astng.NotFoundError: - self.add_message('W0201', args=attr, node=node) - - def visit_function(self, node): - """check method arguments, overriding""" - # ignore actual functions - if not node.is_method(): - return - klass = node.parent.frame() - self._meth_could_be_func = True - # check first argument is self if this is actually a method - self._check_first_arg_for_type(node, klass.type == 'metaclass') - if node.name == '__init__': - self._check_init(node) - return - # check signature if the method overloads inherited method - for overridden in klass.local_attr_ancestors(node.name): - # get astng for the searched method - try: - meth_node = overridden[node.name] - except KeyError: - # we have found the method but it's not in the local - # dictionary. - # This may happen with astng build from living objects - continue - if not isinstance(meth_node, astng.Function): - continue - self._check_signature(node, meth_node, 'overridden') - break - # check if the method overload an attribute - try: - overridden = klass.instance_attr(node.name)[0] # XXX - args = (overridden.root().name, overridden.fromlineno) - self.add_message('E0202', args=args, node=node) - except astng.NotFoundError: - pass - - def leave_function(self, node): - """on method node, check if this method couldn't be a function - - ignore class, static and abstract methods, initializer, - methods overridden from a parent class and any - kind of method defined in an interface for this warning - """ - if node.is_method(): - if node.args.args is not None: - self._first_attrs.pop() - if 'R0201' not in self.active_msgs: - return - class_node = node.parent.frame() - if (self._meth_could_be_func and node.type == 'method' - and not node.name in PYMETHODS - and not (node.is_abstract() or - overrides_a_method(class_node, node.name)) - and class_node.type != 'interface'): - self.add_message('R0201', node=node) - - def visit_getattr(self, node): - """check if the getattr is an access to a class member - if so, register it. Also check for access to protected - class member from outside its class (but ignore __special__ - methods) - """ - attrname = node.attrname - if self._first_attrs and isinstance(node.expr, astng.Name) and \ - node.expr.name == self._first_attrs[-1]: - self._accessed[-1].setdefault(attrname, []).append(node) - return - if 'W0212' not in self.active_msgs: - return - if attrname[0] == '_' and not attrname == '_' and not ( - attrname.startswith('__') and attrname.endswith('__')): - # XXX move this in a reusable function - klass = node.frame() - while klass is not None and not isinstance(klass, astng.Class): - if klass.parent is None: - klass = None - else: - klass = klass.parent.frame() - # XXX infer to be more safe and less dirty ?? - # in classes, check we are not getting a parent method - # through the class object or through super - callee = node.expr.as_string() - if klass is None or not (callee == klass.name or - callee in klass.basenames - or (isinstance(node.expr, astng.CallFunc) - and isinstance(node.expr.func, astng.Name) - and node.expr.func.name == 'super')): - self.add_message('W0212', node=node, args=attrname) - - - def visit_name(self, node): - """check if the name handle an access to a class member - if so, register it - """ - if self._first_attrs and (node.name == self._first_attrs[-1] or - not self._first_attrs[-1]): - self._meth_could_be_func = False - - def _check_accessed_members(self, node, accessed): - """check that accessed members are defined""" - # XXX refactor, probably much simpler now that E0201 is in type checker - for attr, nodes in accessed.items(): - # deactivate "except doesn't do anything", that's expected - # pylint: disable=W0704 - # is it a class attribute ? - try: - node.local_attr(attr) - # yes, stop here - continue - except astng.NotFoundError: - pass - # is it an instance attribute of a parent class ? - try: - node.instance_attr_ancestors(attr).next() - # yes, stop here - continue - except StopIteration: - pass - # is it an instance attribute ? - try: - defstmts = node.instance_attr(attr) - except astng.NotFoundError: - pass - else: - if len(defstmts) == 1: - defstmt = defstmts[0] - # check that if the node is accessed in the same method as - # it's defined, it's accessed after the initial assignment - frame = defstmt.frame() - lno = defstmt.fromlineno - for _node in nodes: - if _node.frame() is frame and _node.fromlineno < lno \ - and not are_exclusive(_node.statement(), defstmt, ('AttributeError', 'Exception', 'BaseException')): - self.add_message('E0203', node=_node, - args=(attr, lno)) - - def _check_first_arg_for_type(self, node, metaclass=0): - """check the name of first argument, expect: - - * 'self' for a regular method - * 'cls' for a class method - * 'mcs' for a metaclass - * not one of the above for a static method - """ - # don't care about functions with unknown argument (builtins) - if node.args.args is None: - return - first_arg = node.args.args and node.argnames()[0] - self._first_attrs.append(first_arg) - first = self._first_attrs[-1] - # static method - if node.type == 'staticmethod': - if first_arg in ('self', 'cls', 'mcs'): - self.add_message('W0211', args=first, node=node) - self._first_attrs[-1] = None - # class / regular method with no args - elif not node.args.args: - self.add_message('E0211', node=node) - # metaclass method - elif metaclass: - if first != 'mcs': - self.add_message('C0203', node=node) - # class method - elif node.type == 'classmethod': - if first not in self.config.valid_classmethod_first_arg: - if len(self.config.valid_classmethod_first_arg) == 1: - valid = repr(self.config.valid_classmethod_first_arg[0]) - else: - valid = ', '.join( - repr(v) - for v in self.config.valid_classmethod_first_arg[:-1]) - valid = '%s or %r' % ( - valid, self.config.valid_classmethod_first_arg[-1]) - self.add_message('C0202', args=valid, node=node) - # regular method without self as argument - elif first != 'self': - self.add_message('E0213', node=node) - - def _check_bases_classes(self, node): - """check that the given class node implements abstract methods from - base classes - """ - # check if this class abstract - if class_is_abstract(node): - return - for method in node.methods(): - owner = method.parent.frame() - if owner is node: - continue - # owner is not this class, it must be a parent class - # check that the ancestor's method is not abstract - if method.is_abstract(pass_is_abstract=False): - self.add_message('W0223', node=node, - args=(method.name, owner.name)) - - def _check_interfaces(self, node): - """check that the given class node really implements declared - interfaces - """ - e0221_hack = [False] - def iface_handler(obj): - """filter interface objects, it should be classes""" - if not isinstance(obj, astng.Class): - e0221_hack[0] = True - self.add_message('E0221', node=node, - args=(obj.as_string(),)) - return False - return True - ignore_iface_methods = self.config.ignore_iface_methods - try: - for iface in node.interfaces(handler_func=iface_handler): - for imethod in iface.methods(): - name = imethod.name - if name.startswith('_') or name in ignore_iface_methods: - # don't check method beginning with an underscore, - # usually belonging to the interface implementation - continue - # get class method astng - try: - method = node_method(node, name) - except astng.NotFoundError: - self.add_message('E0222', args=(name, iface.name), - node=node) - continue - # ignore inherited methods - if method.parent.frame() is not node: - continue - # check signature - self._check_signature(method, imethod, - '%s interface' % iface.name) - except astng.InferenceError: - if e0221_hack[0]: - return - implements = Instance(node).getattr('__implements__')[0] - assignment = implements.parent - assert isinstance(assignment, astng.Assign) - # assignment.expr can be a Name or a Tuple or whatever. - # Use as_string() for the message - # FIXME: in case of multiple interfaces, find which one could not - # be resolved - self.add_message('F0220', node=implements, - args=(node.name, assignment.value.as_string())) - - def _check_init(self, node): - """check that the __init__ method call super or ancestors'__init__ - method - """ - if not set(('W0231', 'W0233')) & self.active_msgs: - return - klass_node = node.parent.frame() - to_call = _ancestors_to_call(klass_node) - not_called_yet = dict(to_call) - for stmt in node.nodes_of_class(astng.CallFunc): - expr = stmt.func - if not isinstance(expr, astng.Getattr) \ - or expr.attrname != '__init__': - continue - # skip the test if using super - if isinstance(expr.expr, astng.CallFunc) and \ - isinstance(expr.expr.func, astng.Name) and \ - expr.expr.func.name == 'super': - return - try: - klass = expr.expr.infer().next() - if klass is YES: - continue - try: - del not_called_yet[klass] - except KeyError: - if klass not in to_call: - self.add_message('W0233', node=expr, args=klass.name) - except astng.InferenceError: - continue - for klass in not_called_yet.keys(): - if klass.name == 'object': - continue - self.add_message('W0231', args=klass.name, node=node) - - def _check_signature(self, method1, refmethod, class_type): - """check that the signature of the two given methods match - - class_type is in 'class', 'interface' - """ - if not (isinstance(method1, astng.Function) - and isinstance(refmethod, astng.Function)): - self.add_message('F0202', args=(method1, refmethod), node=method1) - return - # don't care about functions with unknown argument (builtins) - if method1.args.args is None or refmethod.args.args is None: - return - # if we use *args, **kwargs, skip the below checks - if method1.args.vararg or method1.args.kwarg: - return - if len(method1.args.args) != len(refmethod.args.args): - self.add_message('W0221', args=class_type, node=method1) - elif len(method1.args.defaults) < len(refmethod.args.defaults): - self.add_message('W0222', args=class_type, node=method1) - - -def _ancestors_to_call(klass_node, method='__init__'): - """return a dictionary where keys are the list of base classes providing - the queried method, and so that should/may be called from the method node - """ - to_call = {} - for base_node in klass_node.ancestors(recurs=False): - try: - base_node.local_attr(method) - to_call[base_node] = 1 - except astng.NotFoundError: - continue - return to_call - - -def node_method(node, method_name): - """get astng for on the given class node, ensuring it - is a Function node - """ - for n in node.local_attr(method_name): - if isinstance(n, astng.Function): - return n - raise astng.NotFoundError(method_name) - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(ClassChecker(linter)) diff --git a/pylibs/pylint/checkers/exceptions.py b/pylibs/pylint/checkers/exceptions.py deleted file mode 100644 index bf814385..00000000 --- a/pylibs/pylint/checkers/exceptions.py +++ /dev/null @@ -1,197 +0,0 @@ -# Copyright (c) 2003-2007 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""exceptions handling (raising, catching, exceptions classes) checker -""" -import sys - -from logilab.common.compat import builtins -BUILTINS_NAME = builtins.__name__ -from logilab import astng -from logilab.astng import YES, Instance, unpack_infer - -from pylint.checkers import BaseChecker -from pylint.checkers.utils import is_empty, is_raising -from pylint.interfaces import IASTNGChecker - - -OVERGENERAL_EXCEPTIONS = ('Exception',) - -MSGS = { - 'E0701': ( - 'Bad except clauses order (%s)', - 'Used when except clauses are not in the correct order (from the \ - more specific to the more generic). If you don\'t fix the order, \ - some exceptions may not be catched by the most specific handler.'), - 'E0702': ('Raising %s while only classes, instances or string are allowed', - 'Used when something which is neither a class, an instance or a \ - string is raised (i.e. a `TypeError` will be raised).'), - 'E0710': ('Raising a new style class which doesn\'t inherit from BaseException', - 'Used when a new style class which doesn\'t inherit from \ - BaseException is raised.'), - 'E0711': ('NotImplemented raised - should raise NotImplementedError', - 'Used when NotImplemented is raised instead of \ - NotImplementedError'), - - 'W0701': ('Raising a string exception', - 'Used when a string exception is raised.'), - 'W0702': ('No exception type(s) specified', - 'Used when an except clause doesn\'t specify exceptions type to \ - catch.'), - 'W0703': ('Catching too general exception %s', - 'Used when an except catches a too general exception, \ - possibly burying unrelated errors.'), - 'W0704': ('Except doesn\'t do anything', - 'Used when an except clause does nothing but "pass" and there is\ - no "else" clause.'), - 'W0710': ('Exception doesn\'t inherit from standard "Exception" class', - 'Used when a custom exception class is raised but doesn\'t \ - inherit from the builtin "Exception" class.'), - } - - -if sys.version_info < (3, 0): - EXCEPTIONS_MODULE = "exceptions" -else: - EXCEPTIONS_MODULE = "builtins" - -class ExceptionsChecker(BaseChecker): - """checks for - * excepts without exception filter - * type of raise argument : string, Exceptions, other values - """ - - __implements__ = IASTNGChecker - - name = 'exceptions' - msgs = MSGS - priority = -4 - options = (('overgeneral-exceptions', - {'default' : OVERGENERAL_EXCEPTIONS, - 'type' :'csv', 'metavar' : '', - 'help' : 'Exceptions that will emit a warning ' - 'when being caught. Defaults to "%s"' % ( - ', '.join(OVERGENERAL_EXCEPTIONS),)} - ), - ) - - def visit_raise(self, node): - """visit raise possibly inferring value""" - # ignore empty raise - if node.exc is None: - return - expr = node.exc - if self._check_raise_value(node, expr): - return - else: - try: - value = unpack_infer(expr).next() - except astng.InferenceError: - return - self._check_raise_value(node, value) - - def _check_raise_value(self, node, expr): - """check for bad values, string exception and class inheritance - """ - value_found = True - if isinstance(expr, astng.Const): - value = expr.value - if isinstance(value, str): - self.add_message('W0701', node=node) - else: - self.add_message('E0702', node=node, - args=value.__class__.__name__) - elif (isinstance(expr, astng.Name) and \ - expr.name in ('None', 'True', 'False')) or \ - isinstance(expr, (astng.List, astng.Dict, astng.Tuple, - astng.Module, astng.Function)): - self.add_message('E0702', node=node, args=expr.name) - elif ( (isinstance(expr, astng.Name) and expr.name == 'NotImplemented') - or (isinstance(expr, astng.CallFunc) and - isinstance(expr.func, astng.Name) and - expr.func.name == 'NotImplemented') ): - self.add_message('E0711', node=node) - elif isinstance(expr, astng.BinOp) and expr.op == '%': - self.add_message('W0701', node=node) - elif isinstance(expr, (Instance, astng.Class)): - if isinstance(expr, Instance): - expr = expr._proxied - if (isinstance(expr, astng.Class) and - not inherit_from_std_ex(expr) and - expr.root().name != BUILTINS_NAME): - if expr.newstyle: - self.add_message('E0710', node=node) - else: - self.add_message('W0710', node=node) - else: - value_found = False - else: - value_found = False - return value_found - - - def visit_tryexcept(self, node): - """check for empty except""" - exceptions_classes = [] - nb_handlers = len(node.handlers) - for index, handler in enumerate(node.handlers): - # single except doing nothing but "pass" without else clause - if nb_handlers == 1 and is_empty(handler.body) and not node.orelse: - self.add_message('W0704', node=handler.type or handler.body[0]) - if handler.type is None: - if nb_handlers == 1 and not is_raising(handler.body): - self.add_message('W0702', node=handler.body[0]) - # check if a "except:" is followed by some other - # except - elif index < (nb_handlers - 1): - msg = 'empty except clause should always appear last' - self.add_message('E0701', node=node, args=msg) - else: - try: - excs = list(unpack_infer(handler.type)) - except astng.InferenceError: - continue - for exc in excs: - # XXX skip other non class nodes - if exc is YES or not isinstance(exc, astng.Class): - continue - exc_ancestors = [anc for anc in exc.ancestors() - if isinstance(anc, astng.Class)] - for previous_exc in exceptions_classes: - if previous_exc in exc_ancestors: - msg = '%s is an ancestor class of %s' % ( - previous_exc.name, exc.name) - self.add_message('E0701', node=handler.type, args=msg) - if (exc.name in self.config.overgeneral_exceptions - and exc.root().name == EXCEPTIONS_MODULE - and nb_handlers == 1 and not is_raising(handler.body)): - self.add_message('W0703', args=exc.name, node=handler.type) - exceptions_classes += excs - - -def inherit_from_std_ex(node): - """return true if the given class node is subclass of - exceptions.Exception - """ - if node.name in ('Exception', 'BaseException') \ - and node.root().name == EXCEPTIONS_MODULE: - return True - for parent in node.ancestors(recurs=False): - if inherit_from_std_ex(parent): - return True - return False - -def register(linter): - """required method to auto register this checker""" - linter.register_checker(ExceptionsChecker(linter)) diff --git a/pylibs/pylint/checkers/format.py b/pylibs/pylint/checkers/format.py deleted file mode 100644 index 6a2d5aca..00000000 --- a/pylibs/pylint/checkers/format.py +++ /dev/null @@ -1,361 +0,0 @@ -# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com). -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""Python code format's checker. - -By default try to follow Guido's style guide : - -http://www.python.org/doc/essays/styleguide.html - -Some parts of the process_token method is based from The Tab Nanny std module. -""" - -import re, sys -import tokenize -if not hasattr(tokenize, 'NL'): - raise ValueError("tokenize.NL doesn't exist -- tokenize module too old") - -from logilab.common.textutils import pretty_match -from logilab.astng import nodes - -from pylint.interfaces import IRawChecker, IASTNGChecker -from pylint.checkers import BaseRawChecker -from pylint.checkers.utils import check_messages - -MSGS = { - 'C0301': ('Line too long (%s/%s)', - 'Used when a line is longer than a given number of characters.'), - 'C0302': ('Too many lines in module (%s)', # was W0302 - 'Used when a module has too much lines, reducing its readability.' - ), - - 'W0311': ('Bad indentation. Found %s %s, expected %s', - 'Used when an unexpected number of indentation\'s tabulations or ' - 'spaces has been found.'), - 'W0312': ('Found indentation with %ss instead of %ss', - 'Used when there are some mixed tabs and spaces in a module.'), - 'W0301': ('Unnecessary semicolon', # was W0106 - 'Used when a statement is ended by a semi-colon (";"), which \ - isn\'t necessary (that\'s python, not C ;).'), - 'C0321': ('More than one statement on a single line', - 'Used when more than on statement are found on the same line.'), - 'C0322': ('Operator not preceded by a space\n%s', - 'Used when one of the following operator (!= | <= | == | >= | < ' - '| > | = | \+= | -= | \*= | /= | %) is not preceded by a space.'), - 'C0323': ('Operator not followed by a space\n%s', - 'Used when one of the following operator (!= | <= | == | >= | < ' - '| > | = | \+= | -= | \*= | /= | %) is not followed by a space.'), - 'C0324': ('Comma not followed by a space\n%s', - 'Used when a comma (",") is not followed by a space.'), - } - -if sys.version_info < (3, 0): - - MSGS.update({ - 'W0331': ('Use of the <> operator', - 'Used when the deprecated "<>" operator is used instead \ - of "!=".'), - 'W0332': ('Use l as long integer identifier', - 'Used when a lower case "l" is used to mark a long integer. You ' - 'should use a upper case "L" since the letter "l" looks too much ' - 'like the digit "1"'), - 'W0333': ('Use of the `` operator', - 'Used when the deprecated "``" (backtick) operator is used ' - 'instead of the str() function.'), - }) - -# simple quoted string rgx -SQSTRING_RGX = r'"([^"\\]|\\.)*?"' -# simple apostrophed rgx -SASTRING_RGX = r"'([^'\\]|\\.)*?'" -# triple quoted string rgx -TQSTRING_RGX = r'"""([^"]|("(?!"")))*?(""")' -# triple apostrophed string rgx # FIXME english please -TASTRING_RGX = r"'''([^']|('(?!'')))*?(''')" - -# finally, the string regular expression -STRING_RGX = re.compile('(%s)|(%s)|(%s)|(%s)' % (TQSTRING_RGX, TASTRING_RGX, - SQSTRING_RGX, SASTRING_RGX), - re.MULTILINE|re.DOTALL) - -COMMENT_RGX = re.compile("#.*$", re.M) - -OPERATORS = r'!=|<=|==|>=|<|>|=|\+=|-=|\*=|/=|%' - -OP_RGX_MATCH_1 = r'[^(]*(?|=|\+|-|\*|/|!|%%|&|\|)(%s).*' % OPERATORS -OP_RGX_SEARCH_1 = r'(?|=|\+|-|\*|/|!|%%|&|\|)(%s)' % OPERATORS - -OP_RGX_MATCH_2 = r'[^(]*(%s)(?!\s|=|>|<).*' % OPERATORS -OP_RGX_SEARCH_2 = r'(%s)(?!\s|=|>)' % OPERATORS - -BAD_CONSTRUCT_RGXS = ( - - (re.compile(OP_RGX_MATCH_1, re.M), - re.compile(OP_RGX_SEARCH_1, re.M), - 'C0322'), - - (re.compile(OP_RGX_MATCH_2, re.M), - re.compile(OP_RGX_SEARCH_2, re.M), - 'C0323'), - - (re.compile(r'.*,[^(\s|\]|}|\))].*', re.M), - re.compile(r',[^\s)]', re.M), - 'C0324'), - ) - - -def get_string_coords(line): - """return a list of string positions (tuple (start, end)) in the line - """ - result = [] - for match in re.finditer(STRING_RGX, line): - result.append( (match.start(), match.end()) ) - return result - -def in_coords(match, string_coords): - """return true if the match is in the string coord""" - mstart = match.start() - for start, end in string_coords: - if mstart >= start and mstart < end: - return True - return False - -def check_line(line): - """check a line for a bad construction - if it founds one, return a message describing the problem - else return None - """ - cleanstr = COMMENT_RGX.sub('', STRING_RGX.sub('', line)) - for rgx_match, rgx_search, msg_id in BAD_CONSTRUCT_RGXS: - if rgx_match.match(cleanstr): - string_positions = get_string_coords(line) - for match in re.finditer(rgx_search, line): - if not in_coords(match, string_positions): - return msg_id, pretty_match(match, line.rstrip()) - - -class FormatChecker(BaseRawChecker): - """checks for : - * unauthorized constructions - * strict indentation - * line length - * use of <> instead of != - """ - - __implements__ = (IRawChecker, IASTNGChecker) - - # configuration section name - name = 'format' - # messages - msgs = MSGS - # configuration options - # for available dict keys/values see the optik parser 'add_option' method - options = (('max-line-length', - {'default' : 80, 'type' : "int", 'metavar' : '', - 'help' : 'Maximum number of characters on a single line.'}), - ('max-module-lines', - {'default' : 1000, 'type' : 'int', 'metavar' : '', - 'help': 'Maximum number of lines in a module'} - ), - ('indent-string', - {'default' : ' ', 'type' : "string", 'metavar' : '', - 'help' : 'String used as indentation unit. This is usually \ -" " (4 spaces) or "\\t" (1 tab).'}), - ) - def __init__(self, linter=None): - BaseRawChecker.__init__(self, linter) - self._lines = None - self._visited_lines = None - - def process_module(self, node): - """extracts encoding from the stream and decodes each line, so that - international text's length is properly calculated. - """ - stream = node.file_stream - stream.seek(0) - readline = stream.readline - if sys.version_info < (3, 0): - if node.file_encoding is not None: - readline = lambda: stream.readline().decode(node.file_encoding, 'replace') - self.process_tokens(tokenize.generate_tokens(readline)) - - def new_line(self, tok_type, line, line_num, junk): - """a new line has been encountered, process it if necessary""" - if not tok_type in junk: - self._lines[line_num] = line.split('\n')[0] - self.check_lines(line, line_num) - - def process_tokens(self, tokens): - """process tokens and search for : - - _ non strict indentation (i.e. not always using the parameter as - indent unit) - _ too long lines (i.e. longer than ) - _ optionally bad construct (if given, bad_construct must be a compiled - regular expression). - """ - indent = tokenize.INDENT - dedent = tokenize.DEDENT - newline = tokenize.NEWLINE - junk = (tokenize.COMMENT, tokenize.NL) - indents = [0] - check_equal = 0 - line_num = 0 - previous = None - self._lines = {} - self._visited_lines = {} - for (tok_type, token, start, _, line) in tokens: - if start[0] != line_num: - if previous is not None and previous[0] == tokenize.OP and previous[1] == ';': - self.add_message('W0301', line=previous[2]) - previous = None - line_num = start[0] - self.new_line(tok_type, line, line_num, junk) - if tok_type not in (indent, dedent, newline) + junk: - previous = tok_type, token, start[0] - - if tok_type == tokenize.OP: - if token == '<>': - self.add_message('W0331', line=line_num) - elif tok_type == tokenize.NUMBER: - if token.endswith('l'): - self.add_message('W0332', line=line_num) - - elif tok_type == newline: - # a program statement, or ENDMARKER, will eventually follow, - # after some (possibly empty) run of tokens of the form - # (NL | COMMENT)* (INDENT | DEDENT+)? - # If an INDENT appears, setting check_equal is wrong, and will - # be undone when we see the INDENT. - check_equal = 1 - - elif tok_type == indent: - check_equal = 0 - self.check_indent_level(token, indents[-1]+1, line_num) - indents.append(indents[-1]+1) - - elif tok_type == dedent: - # there's nothing we need to check here! what's important is - # that when the run of DEDENTs ends, the indentation of the - # program statement (or ENDMARKER) that triggered the run is - # equal to what's left at the top of the indents stack - check_equal = 1 - if len(indents) > 1: - del indents[-1] - - elif check_equal and tok_type not in junk: - # this is the first "real token" following a NEWLINE, so it - # must be the first token of the next program statement, or an - # ENDMARKER; the "line" argument exposes the leading whitespace - # for this statement; in the case of ENDMARKER, line is an empty - # string, so will properly match the empty string with which the - # "indents" stack was seeded - check_equal = 0 - self.check_indent_level(line, indents[-1], line_num) - - line_num -= 1 # to be ok with "wc -l" - if line_num > self.config.max_module_lines: - self.add_message('C0302', args=line_num, line=1) - - @check_messages('C0321' ,'C03232', 'C0323', 'C0324') - def visit_default(self, node): - """check the node line number and check it if not yet done""" - if not node.is_statement: - return - if not node.root().pure_python: - return # XXX block visit of child nodes - prev_sibl = node.previous_sibling() - if prev_sibl is not None: - prev_line = prev_sibl.fromlineno - else: - prev_line = node.parent.statement().fromlineno - line = node.fromlineno - assert line, node - if prev_line == line and self._visited_lines.get(line) != 2: - # py2.5 try: except: finally: - if not (isinstance(node, nodes.TryExcept) - and isinstance(node.parent, nodes.TryFinally) - and node.fromlineno == node.parent.fromlineno): - self.add_message('C0321', node=node) - self._visited_lines[line] = 2 - return - if line in self._visited_lines: - return - try: - tolineno = node.blockstart_tolineno - except AttributeError: - tolineno = node.tolineno - assert tolineno, node - lines = [] - for line in xrange(line, tolineno + 1): - self._visited_lines[line] = 1 - try: - lines.append(self._lines[line].rstrip()) - except KeyError: - lines.append('') - try: - msg_def = check_line('\n'.join(lines)) - if msg_def: - self.add_message(msg_def[0], node=node, args=msg_def[1]) - except KeyError: - # FIXME: internal error ! - pass - - @check_messages('W0333') - def visit_backquote(self, node): - self.add_message('W0333', node=node) - - def check_lines(self, lines, i): - """check lines have less than a maximum number of characters - """ - max_chars = self.config.max_line_length - for line in lines.splitlines(): - if len(line) > max_chars: - self.add_message('C0301', line=i, args=(len(line), max_chars)) - i += 1 - - def check_indent_level(self, string, expected, line_num): - """return the indent level of the string - """ - indent = self.config.indent_string - if indent == '\\t': # \t is not interpreted in the configuration file - indent = '\t' - level = 0 - unit_size = len(indent) - while string[:unit_size] == indent: - string = string[unit_size:] - level += 1 - suppl = '' - while string and string[0] in ' \t': - if string[0] != indent[0]: - if string[0] == '\t': - args = ('tab', 'space') - else: - args = ('space', 'tab') - self.add_message('W0312', args=args, line=line_num) - return level - suppl += string[0] - string = string [1:] - if level != expected or suppl: - i_type = 'spaces' - if indent[0] == '\t': - i_type = 'tabs' - self.add_message('W0311', line=line_num, - args=(level * unit_size + len(suppl), i_type, - expected * unit_size)) - - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(FormatChecker(linter)) diff --git a/pylibs/pylint/checkers/imports.py b/pylibs/pylint/checkers/imports.py deleted file mode 100644 index 7e6a4f88..00000000 --- a/pylibs/pylint/checkers/imports.py +++ /dev/null @@ -1,378 +0,0 @@ -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""imports checkers for Python code""" - -from logilab.common.graph import get_cycles, DotBackend -from logilab.common.modutils import is_standard_module -from logilab.common.ureports import VerbatimText, Paragraph - -from logilab import astng -from logilab.astng import are_exclusive - -from pylint.interfaces import IASTNGChecker -from pylint.checkers import BaseChecker, EmptyReport - - -def get_first_import(node, context, name, base, level): - """return the node where [base.] is imported or None if not found - """ - first = None - found = False - for first in context.values(): - if isinstance(first, astng.Import): - if name in [iname[0] for iname in first.names]: - found = True - break - elif isinstance(first, astng.From): - if base == first.modname and level == first.level and \ - name in [iname[0] for iname in first.names]: - found = True - break - if found and first is not node and not are_exclusive(first, node): - return first - -# utilities to represents import dependencies as tree and dot graph ########### - -def filter_dependencies_info(dep_info, package_dir, mode='external'): - """filter external or internal dependencies from dep_info (return a - new dictionary containing the filtered modules only) - """ - if mode == 'external': - filter_func = lambda x: not is_standard_module(x, (package_dir,)) - else: - assert mode == 'internal' - filter_func = lambda x: is_standard_module(x, (package_dir,)) - result = {} - for importee, importers in dep_info.items(): - if filter_func(importee): - result[importee] = importers - return result - -def make_tree_defs(mod_files_list): - """get a list of 2-uple (module, list_of_files_which_import_this_module), - it will return a dictionary to represent this as a tree - """ - tree_defs = {} - for mod, files in mod_files_list: - node = (tree_defs, ()) - for prefix in mod.split('.'): - node = node[0].setdefault(prefix, [{}, []]) - node[1] += files - return tree_defs - -def repr_tree_defs(data, indent_str=None): - """return a string which represents imports as a tree""" - lines = [] - nodes = data.items() - for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])): - if not files: - files = '' - else: - files = '(%s)' % ','.join(files) - if indent_str is None: - lines.append('%s %s' % (mod, files)) - sub_indent_str = ' ' - else: - lines.append('%s\-%s %s' % (indent_str, mod, files)) - if i == len(nodes)-1: - sub_indent_str = '%s ' % indent_str - else: - sub_indent_str = '%s| ' % indent_str - if sub: - lines.append(repr_tree_defs(sub, sub_indent_str)) - return '\n'.join(lines) - - -def dependencies_graph(filename, dep_info): - """write dependencies as a dot (graphviz) file - """ - done = {} - printer = DotBackend(filename[:-4], rankdir = "LR") - printer.emit('URL="." node[shape="box"]') - for modname, dependencies in dep_info.items(): - done[modname] = 1 - printer.emit_node(modname) - for modname in dependencies: - if modname not in done: - done[modname] = 1 - printer.emit_node(modname) - for depmodname, dependencies in dep_info.items(): - for modname in dependencies: - printer.emit_edge(modname, depmodname) - printer.generate(filename) - - -def make_graph(filename, dep_info, sect, gtype): - """generate a dependencies graph and add some information about it in the - report's section - """ - dependencies_graph(filename, dep_info) - sect.append(Paragraph('%simports graph has been written to %s' - % (gtype, filename))) - - -# the import checker itself ################################################### - -MSGS = { - 'F0401': ('Unable to import %s', - 'Used when pylint has been unable to import a module.'), - 'R0401': ('Cyclic import (%s)', - 'Used when a cyclic import between two or more modules is \ - detected.'), - - 'W0401': ('Wildcard import %s', - 'Used when `from module import *` is detected.'), - 'W0402': ('Uses of a deprecated module %r', - 'Used a module marked as deprecated is imported.'), - 'W0403': ('Relative import %r, should be %r', - 'Used when an import relative to the package directory is \ - detected.'), - 'W0404': ('Reimport %r (imported line %s)', - 'Used when a module is reimported multiple times.'), - 'W0406': ('Module import itself', - 'Used when a module is importing itself.'), - - 'W0410': ('__future__ import is not the first non docstring statement', - 'Python 2.5 and greater require __future__ import to be the \ - first non docstring statement in the module.'), - } - -class ImportsChecker(BaseChecker): - """checks for - * external modules dependencies - * relative / wildcard imports - * cyclic imports - * uses of deprecated modules - """ - - __implements__ = IASTNGChecker - - name = 'imports' - msgs = MSGS - priority = -2 - - options = (('deprecated-modules', - {'default' : ('regsub', 'string', 'TERMIOS', - 'Bastion', 'rexec'), - 'type' : 'csv', - 'metavar' : '', - 'help' : 'Deprecated modules which should not be used, \ -separated by a comma'} - ), - ('import-graph', - {'default' : '', - 'type' : 'string', - 'metavar' : '', - 'help' : 'Create a graph of every (i.e. internal and \ -external) dependencies in the given file (report RP0402 must not be disabled)'} - ), - ('ext-import-graph', - {'default' : '', - 'type' : 'string', - 'metavar' : '', - 'help' : 'Create a graph of external dependencies in the \ -given file (report RP0402 must not be disabled)'} - ), - ('int-import-graph', - {'default' : '', - 'type' : 'string', - 'metavar' : '', - 'help' : 'Create a graph of internal dependencies in the \ -given file (report RP0402 must not be disabled)'} - ), - - ) - - def __init__(self, linter=None): - BaseChecker.__init__(self, linter) - self.stats = None - self.import_graph = None - self.__int_dep_info = self.__ext_dep_info = None - self.reports = (('RP0401', 'External dependencies', - self.report_external_dependencies), - ('RP0402', 'Modules dependencies graph', - self.report_dependencies_graph), - ) - - def open(self): - """called before visiting project (i.e set of modules)""" - self.linter.add_stats(dependencies={}) - self.linter.add_stats(cycles=[]) - self.stats = self.linter.stats - self.import_graph = {} - - def close(self): - """called before visiting project (i.e set of modules)""" - # don't try to compute cycles if the associated message is disabled - if self.linter.is_message_enabled('R0401'): - for cycle in get_cycles(self.import_graph): - self.add_message('R0401', args=' -> '.join(cycle)) - - def visit_import(self, node): - """triggered when an import statement is seen""" - modnode = node.root() - for name, _ in node.names: - importedmodnode = self.get_imported_module(modnode, node, name) - if importedmodnode is None: - continue - self._check_relative_import(modnode, node, importedmodnode, name) - self._add_imported_module(node, importedmodnode.name) - self._check_deprecated_module(node, name) - self._check_reimport(node, name) - - - def visit_from(self, node): - """triggered when a from statement is seen""" - basename = node.modname - if basename == '__future__': - # check if this is the first non-docstring statement in the module - prev = node.previous_sibling() - if prev: - # consecutive future statements are possible - if not (isinstance(prev, astng.From) - and prev.modname == '__future__'): - self.add_message('W0410', node=node) - return - modnode = node.root() - importedmodnode = self.get_imported_module(modnode, node, basename) - if importedmodnode is None: - return - self._check_relative_import(modnode, node, importedmodnode, basename) - self._check_deprecated_module(node, basename) - for name, _ in node.names: - if name == '*': - self.add_message('W0401', args=basename, node=node) - continue - self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name)) - self._check_reimport(node, name, basename, node.level) - - def get_imported_module(self, modnode, importnode, modname): - try: - return importnode.do_import_module(modname) - except astng.InferenceError, ex: - if str(ex) != modname: - args = '%r (%s)' % (modname, ex) - else: - args = repr(modname) - self.add_message("F0401", args=args, node=importnode) - - def _check_relative_import(self, modnode, importnode, importedmodnode, - importedasname): - """check relative import. node is either an Import or From node, modname - the imported module name. - """ - if 'W0403' not in self.active_msgs: - return - if importedmodnode.file is None: - return False # built-in module - if modnode is importedmodnode: - return False # module importing itself - if modnode.absolute_import_activated() or getattr(importnode, 'level', None): - return False - if importedmodnode.name != importedasname: - # this must be a relative import... - self.add_message('W0403', args=(importedasname, importedmodnode.name), - node=importnode) - - def _add_imported_module(self, node, importedmodname): - """notify an imported module, used to analyze dependencies""" - context_name = node.root().name - if context_name == importedmodname: - # module importing itself ! - self.add_message('W0406', node=node) - elif not is_standard_module(importedmodname): - # handle dependencies - importedmodnames = self.stats['dependencies'].setdefault( - importedmodname, set()) - if not context_name in importedmodnames: - importedmodnames.add(context_name) - if is_standard_module( importedmodname, (self.package_dir(),) ): - # update import graph - mgraph = self.import_graph.setdefault(context_name, set()) - if not importedmodname in mgraph: - mgraph.add(importedmodname) - - def _check_deprecated_module(self, node, mod_path): - """check if the module is deprecated""" - for mod_name in self.config.deprecated_modules: - if mod_path == mod_name or mod_path.startswith(mod_name + '.'): - self.add_message('W0402', node=node, args=mod_path) - - def _check_reimport(self, node, name, basename=None, level=0): - """check if the import is necessary (i.e. not already done)""" - if 'W0404' not in self.active_msgs: - return - frame = node.frame() - root = node.root() - contexts = [(frame, level)] - if root is not frame: - contexts.append((root, 0)) - for context, level in contexts: - first = get_first_import(node, context, name, basename, level) - if first is not None: - self.add_message('W0404', node=node, - args=(name, first.fromlineno)) - - - def report_external_dependencies(self, sect, _, dummy): - """return a verbatim layout for displaying dependencies""" - dep_info = make_tree_defs(self._external_dependencies_info().items()) - if not dep_info: - raise EmptyReport() - tree_str = repr_tree_defs(dep_info) - sect.append(VerbatimText(tree_str)) - - def report_dependencies_graph(self, sect, _, dummy): - """write dependencies as a dot (graphviz) file""" - dep_info = self.stats['dependencies'] - if not dep_info or not (self.config.import_graph - or self.config.ext_import_graph - or self.config.int_import_graph): - raise EmptyReport() - filename = self.config.import_graph - if filename: - make_graph(filename, dep_info, sect, '') - filename = self.config.ext_import_graph - if filename: - make_graph(filename, self._external_dependencies_info(), - sect, 'external ') - filename = self.config.int_import_graph - if filename: - make_graph(filename, self._internal_dependencies_info(), - sect, 'internal ') - - def _external_dependencies_info(self): - """return cached external dependencies information or build and - cache them - """ - if self.__ext_dep_info is None: - self.__ext_dep_info = filter_dependencies_info( - self.stats['dependencies'], self.package_dir(), 'external') - return self.__ext_dep_info - - def _internal_dependencies_info(self): - """return cached internal dependencies information or build and - cache them - """ - if self.__int_dep_info is None: - self.__int_dep_info = filter_dependencies_info( - self.stats['dependencies'], self.package_dir(), 'internal') - return self.__int_dep_info - - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(ImportsChecker(linter)) diff --git a/pylibs/pylint/checkers/logging.py b/pylibs/pylint/checkers/logging.py deleted file mode 100644 index 89899b65..00000000 --- a/pylibs/pylint/checkers/logging.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (c) 2009-2010 Google, Inc. -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""checker for use of Python logging -""" - -from logilab import astng -from pylint import checkers -from pylint import interfaces -from pylint.checkers import utils - - -MSGS = { - 'W1201': ('Specify string format arguments as logging function parameters', - 'Used when a logging statement has a call form of ' - '"logging.(format_string % (format_args...))". ' - 'Such calls should leave string interpolation to the logging ' - 'method itself and be written ' - '"logging.(format_string, format_args...)" ' - 'so that the program may avoid incurring the cost of the ' - 'interpolation in those cases in which no message will be ' - 'logged. For more, see ' - 'http://www.python.org/dev/peps/pep-0282/.'), - 'E1200': ('Unsupported logging format character %r (%#02x) at index %d', - 'Used when an unsupported format character is used in a logging\ - statement format string.'), - 'E1201': ('Logging format string ends in middle of conversion specifier', - 'Used when a logging statement format string terminates before\ - the end of a conversion specifier.'), - 'E1205': ('Too many arguments for logging format string', - 'Used when a logging format string is given too few arguments.'), - 'E1206': ('Not enough arguments for logging format string', - 'Used when a logging format string is given too many arguments'), - } - - -CHECKED_CONVENIENCE_FUNCTIONS = set([ - 'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn', - 'warning']) - - -class LoggingChecker(checkers.BaseChecker): - """Checks use of the logging module.""" - - __implements__ = interfaces.IASTNGChecker - name = 'logging' - msgs = MSGS - - def visit_module(self, unused_node): - """Clears any state left in this checker from last module checked.""" - # The code being checked can just as easily "import logging as foo", - # so it is necessary to process the imports and store in this field - # what name the logging module is actually given. - self._logging_name = None - - def visit_import(self, node): - """Checks to see if this module uses Python's built-in logging.""" - for module, as_name in node.names: - if module == 'logging': - if as_name: - self._logging_name = as_name - else: - self._logging_name = 'logging' - - def visit_callfunc(self, node): - """Checks calls to (simple forms of) logging methods.""" - if (not isinstance(node.func, astng.Getattr) - or not isinstance(node.func.expr, astng.Name) - or node.func.expr.name != self._logging_name): - return - self._check_convenience_methods(node) - self._check_log_methods(node) - - def _check_convenience_methods(self, node): - """Checks calls to logging convenience methods (like logging.warn).""" - if node.func.attrname not in CHECKED_CONVENIENCE_FUNCTIONS: - return - if node.starargs or node.kwargs or not node.args: - # Either no args, star args, or double-star args. Beyond the - # scope of this checker. - return - if isinstance(node.args[0], astng.BinOp) and node.args[0].op == '%': - self.add_message('W1201', node=node) - elif isinstance(node.args[0], astng.Const): - self._check_format_string(node, 0) - - def _check_log_methods(self, node): - """Checks calls to logging.log(level, format, *format_args).""" - if node.func.attrname != 'log': - return - if node.starargs or node.kwargs or len(node.args) < 2: - # Either a malformed call, star args, or double-star args. Beyond - # the scope of this checker. - return - if isinstance(node.args[1], astng.BinOp) and node.args[1].op == '%': - self.add_message('W1201', node=node) - elif isinstance(node.args[1], astng.Const): - self._check_format_string(node, 1) - - def _check_format_string(self, node, format_arg): - """Checks that format string tokens match the supplied arguments. - - Args: - node: AST node to be checked. - format_arg: Index of the format string in the node arguments. - """ - num_args = self._count_supplied_tokens(node.args[format_arg + 1:]) - if not num_args: - # If no args were supplied, then all format strings are valid - - # don't check any further. - return - format_string = node.args[format_arg].value - if not isinstance(format_string, basestring): - # If the log format is constant non-string (e.g. logging.debug(5)), - # ensure there are no arguments. - required_num_args = 0 - else: - try: - keyword_args, required_num_args = \ - utils.parse_format_string(format_string) - if keyword_args: - # Keyword checking on logging strings is complicated by - # special keywords - out of scope. - return - except utils.UnsupportedFormatCharacter, e: - c = format_string[e.index] - self.add_message('E1200', node=node, args=(c, ord(c), e.index)) - return - except utils.IncompleteFormatString: - self.add_message('E1201', node=node) - return - if num_args > required_num_args: - self.add_message('E1205', node=node) - elif num_args < required_num_args: - self.add_message('E1206', node=node) - - def _count_supplied_tokens(self, args): - """Counts the number of tokens in an args list. - - The Python log functions allow for special keyword arguments: func, - exc_info and extra. To handle these cases correctly, we only count - arguments that aren't keywords. - - Args: - args: List of AST nodes that are arguments for a log format string. - - Returns: - Number of AST nodes that aren't keywords. - """ - return sum(1 for arg in args if not isinstance(arg, astng.Keyword)) - - -def register(linter): - """Required method to auto-register this checker.""" - linter.register_checker(LoggingChecker(linter)) diff --git a/pylibs/pylint/checkers/misc.py b/pylibs/pylint/checkers/misc.py deleted file mode 100644 index 8f6ad2dc..00000000 --- a/pylibs/pylint/checkers/misc.py +++ /dev/null @@ -1,77 +0,0 @@ -# pylint: disable=W0511 -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE). - http://www.logilab.fr/ -- mailto:contact@logilab.fr - -Check source code is ascii only or has an encoding declaration (PEP 263) -""" - -import re, sys - -from pylint.interfaces import IRawChecker -from pylint.checkers import BaseChecker - - -MSGS = { - 'W0511': ('%s', - 'Used when a warning note as FIXME or XXX is detected.'), - } - -class EncodingChecker(BaseChecker): - """checks for: - * warning notes in the code like FIXME, XXX - * PEP 263: source code with non ascii character but no encoding declaration - """ - __implements__ = IRawChecker - - # configuration section name - name = 'miscellaneous' - msgs = MSGS - - options = (('notes', - {'type' : 'csv', 'metavar' : '', - 'default' : ('FIXME', 'XXX', 'TODO'), - 'help' : 'List of note tags to take in consideration, \ -separated by a comma.' - }), - ) - - def __init__(self, linter=None): - BaseChecker.__init__(self, linter) - - def process_module(self, node): - """inspect the source file to found encoding problem or fixmes like - notes - """ - stream = node.file_stream - stream.seek(0) - # warning notes in the code - notes = [] - for note in self.config.notes: - notes.append(re.compile(note)) - linenum = 1 - for line in stream.readlines(): - for note in notes: - match = note.search(line) - if match: - self.add_message('W0511', args=line[match.start():-1], - line=linenum) - break - linenum += 1 - - - -def register(linter): - """required method to auto register this checker""" - linter.register_checker(EncodingChecker(linter)) diff --git a/pylibs/pylint/checkers/newstyle.py b/pylibs/pylint/checkers/newstyle.py deleted file mode 100644 index 7bb146da..00000000 --- a/pylibs/pylint/checkers/newstyle.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (c) 2005-2006 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""check for new / old style related problems -""" - -from logilab import astng - -from pylint.interfaces import IASTNGChecker -from pylint.checkers import BaseChecker -from pylint.checkers.utils import check_messages - -MSGS = { - 'E1001': ('Use of __slots__ on an old style class', - 'Used when an old style class uses the __slots__ attribute.'), - 'E1002': ('Use of super on an old style class', - 'Used when an old style class uses the super builtin.'), - 'E1003': ('Bad first argument %r given to super class', - 'Used when another argument than the current class is given as \ - first argument of the super builtin.'), - 'W1001': ('Use of "property" on an old style class', - 'Used when PyLint detect the use of the builtin "property" \ - on an old style class while this is relying on new style \ - classes features'), - } - - -class NewStyleConflictChecker(BaseChecker): - """checks for usage of new style capabilities on old style classes and - other new/old styles conflicts problems - * use of property, __slots__, super - * "super" usage - """ - - __implements__ = (IASTNGChecker,) - - # configuration section name - name = 'newstyle' - # messages - msgs = MSGS - priority = -2 - # configuration options - options = () - - @check_messages('E1001') - def visit_class(self, node): - """check __slots__ usage - """ - if '__slots__' in node and not node.newstyle: - self.add_message('E1001', node=node) - - @check_messages('W1001') - def visit_callfunc(self, node): - """check property usage""" - parent = node.parent.frame() - if (isinstance(parent, astng.Class) and - not parent.newstyle and - isinstance(node.func, astng.Name)): - name = node.func.name - if name == 'property': - self.add_message('W1001', node=node) - - @check_messages('E1002', 'E1003') - def visit_function(self, node): - """check use of super""" - # ignore actual functions or method within a new style class - if not node.is_method(): - return - klass = node.parent.frame() - for stmt in node.nodes_of_class(astng.CallFunc): - expr = stmt.func - if not isinstance(expr, astng.Getattr): - continue - call = expr.expr - # skip the test if using super - if isinstance(call, astng.CallFunc) and \ - isinstance(call.func, astng.Name) and \ - call.func.name == 'super': - if not klass.newstyle: - # super should not be used on an old style class - self.add_message('E1002', node=node) - else: - # super first arg should be the class - try: - supcls = (call.args and call.args[0].infer().next() - or None) - except astng.InferenceError: - continue - if klass is not supcls: - supcls = getattr(supcls, 'name', supcls) - self.add_message('E1003', node=node, args=supcls) - - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(NewStyleConflictChecker(linter)) diff --git a/pylibs/pylint/checkers/string_format.py b/pylibs/pylint/checkers/string_format.py deleted file mode 100644 index c420a604..00000000 --- a/pylibs/pylint/checkers/string_format.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright (c) 2009-2010 Arista Networks, Inc. - James Lingard -# Copyright (c) 2004-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. - - -"""Checker for string formatting operations. -""" - -import string -from logilab import astng -from pylint.interfaces import IASTNGChecker -from pylint.checkers import BaseChecker -from pylint.checkers import utils - - -MSGS = { - 'E1300': ("Unsupported format character %r (%#02x) at index %d", - "Used when a unsupported format character is used in a format\ - string."), - 'E1301': ("Format string ends in middle of conversion specifier", - "Used when a format string terminates before the end of a \ - conversion specifier."), - 'E1302': ("Mixing named and unnamed conversion specifiers in format string", - "Used when a format string contains both named (e.g. '%(foo)d') \ - and unnamed (e.g. '%d') conversion specifiers. This is also \ - used when a named conversion specifier contains * for the \ - minimum field width and/or precision."), - 'E1303': ("Expected mapping for format string, not %s", - "Used when a format string that uses named conversion specifiers \ - is used with an argument that is not a mapping."), - 'W1300': ("Format string dictionary key should be a string, not %s", - "Used when a format string that uses named conversion specifiers \ - is used with a dictionary whose keys are not all strings."), - 'W1301': ("Unused key %r in format string dictionary", - "Used when a format string that uses named conversion specifiers \ - is used with a dictionary that conWtains keys not required by the \ - format string."), - 'E1304': ("Missing key %r in format string dictionary", - "Used when a format string that uses named conversion specifiers \ - is used with a dictionary that doesn't contain all the keys \ - required by the format string."), - 'E1305': ("Too many arguments for format string", - "Used when a format string that uses unnamed conversion \ - specifiers is given too few arguments."), - 'E1306': ("Not enough arguments for format string", - "Used when a format string that uses unnamed conversion \ - specifiers is given too many arguments"), - } - -OTHER_NODES = (astng.Const, astng.List, astng.Backquote, - astng.Lambda, astng.Function, - astng.ListComp, astng.SetComp, astng.GenExpr) - -class StringFormatChecker(BaseChecker): - """Checks string formatting operations to ensure that the format string - is valid and the arguments match the format string. - """ - - __implements__ = (IASTNGChecker,) - name = 'string_format' - msgs = MSGS - - def visit_binop(self, node): - if node.op != '%': - return - left = node.left - args = node.right - - if not (isinstance(left, astng.Const) - and isinstance(left.value, basestring)): - return - format_string = left.value - try: - required_keys, required_num_args = \ - utils.parse_format_string(format_string) - except utils.UnsupportedFormatCharacter, e: - c = format_string[e.index] - self.add_message('E1300', node=node, args=(c, ord(c), e.index)) - return - except utils.IncompleteFormatString: - self.add_message('E1301', node=node) - return - if required_keys and required_num_args: - # The format string uses both named and unnamed format - # specifiers. - self.add_message('E1302', node=node) - elif required_keys: - # The format string uses only named format specifiers. - # Check that the RHS of the % operator is a mapping object - # that contains precisely the set of keys required by the - # format string. - if isinstance(args, astng.Dict): - keys = set() - unknown_keys = False - for k, v in args.items: - if isinstance(k, astng.Const): - key = k.value - if isinstance(key, basestring): - keys.add(key) - else: - self.add_message('W1300', node=node, args=key) - else: - # One of the keys was something other than a - # constant. Since we can't tell what it is, - # supress checks for missing keys in the - # dictionary. - unknown_keys = True - if not unknown_keys: - for key in required_keys: - if key not in keys: - self.add_message('E1304', node=node, args=key) - for key in keys: - if key not in required_keys: - self.add_message('W1301', node=node, args=key) - elif isinstance(args, OTHER_NODES + (astng.Tuple,)): - type_name = type(args).__name__ - self.add_message('E1303', node=node, args=type_name) - # else: - # The RHS of the format specifier is a name or - # expression. It may be a mapping object, so - # there's nothing we can check. - else: - # The format string uses only unnamed format specifiers. - # Check that the number of arguments passed to the RHS of - # the % operator matches the number required by the format - # string. - if isinstance(args, astng.Tuple): - num_args = len(args.elts) - elif isinstance(args, OTHER_NODES + (astng.Dict, astng.DictComp)): - num_args = 1 - else: - # The RHS of the format specifier is a name or - # expression. It could be a tuple of unknown size, so - # there's nothing we can check. - num_args = None - if num_args is not None: - if num_args > required_num_args: - self.add_message('E1305', node=node) - elif num_args < required_num_args: - self.add_message('E1306', node=node) - - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(StringFormatChecker(linter)) diff --git a/pylibs/pylint/checkers/typecheck.py b/pylibs/pylint/checkers/typecheck.py deleted file mode 100644 index 8f00ca2b..00000000 --- a/pylibs/pylint/checkers/typecheck.py +++ /dev/null @@ -1,374 +0,0 @@ -# Copyright (c) 2006-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""try to find more bugs in the code using astng inference capabilities -""" - -import re -import shlex - -from logilab import astng -from logilab.astng import InferenceError, NotFoundError, YES, Instance - -from pylint.interfaces import IASTNGChecker -from pylint.checkers import BaseChecker -from pylint.checkers.utils import safe_infer, is_super, check_messages - -MSGS = { - 'E1101': ('%s %r has no %r member', - 'Used when a variable is accessed for an unexistent member.'), - 'E1102': ('%s is not callable', - 'Used when an object being called has been inferred to a non \ - callable object'), - 'E1103': ('%s %r has no %r member (but some types could not be inferred)', - 'Used when a variable is accessed for an unexistent member, but \ - astng was not able to interpret all possible types of this \ - variable.'), - 'E1111': ('Assigning to function call which doesn\'t return', - 'Used when an assignment is done on a function call but the \ - inferred function doesn\'t return anything.'), - 'W1111': ('Assigning to function call which only returns None', - 'Used when an assignment is done on a function call but the \ - inferred function returns nothing but None.'), - - 'E1120': ('No value passed for parameter %s in function call', - 'Used when a function call passes too few arguments.'), - 'E1121': ('Too many positional arguments for function call', - 'Used when a function call passes too many positional \ - arguments.'), - 'E1122': ('Duplicate keyword argument %r in function call', - 'Used when a function call passes the same keyword argument \ - multiple times.'), - 'E1123': ('Passing unexpected keyword argument %r in function call', - 'Used when a function call passes a keyword argument that \ - doesn\'t correspond to one of the function\'s parameter names.'), - 'E1124': ('Multiple values passed for parameter %r in function call', - 'Used when a function call would result in assigning multiple \ - values to a function parameter, one value from a positional \ - argument and one from a keyword argument.'), - } - -class TypeChecker(BaseChecker): - """try to find bugs in the code using type inference - """ - - __implements__ = (IASTNGChecker,) - - # configuration section name - name = 'typecheck' - # messages - msgs = MSGS - priority = -1 - # configuration options - options = (('ignore-mixin-members', - {'default' : True, 'type' : 'yn', 'metavar': '', - 'help' : 'Tells whether missing members accessed in mixin \ -class should be ignored. A mixin class is detected if its name ends with \ -"mixin" (case insensitive).'} - ), - - ('ignored-classes', - {'default' : ('SQLObject',), - 'type' : 'csv', - 'metavar' : '', - 'help' : 'List of classes names for which member attributes \ -should not be checked (useful for classes with attributes dynamically set).'} - ), - - ('zope', - {'default' : False, 'type' : 'yn', 'metavar': '', - 'help' : 'When zope mode is activated, add a predefined set \ -of Zope acquired attributes to generated-members.'} - ), - ('generated-members', - {'default' : ( - 'REQUEST', 'acl_users', 'aq_parent'), - 'type' : 'string', - 'metavar' : '', - 'help' : 'List of members which are set dynamically and \ -missed by pylint inference system, and so shouldn\'t trigger E0201 when \ -accessed. Python regular expressions are accepted.'} - ), - ) - - def open(self): - # do this in open since config not fully initialized in __init__ - self.generated_members = list(self.config.generated_members) - if self.config.zope: - self.generated_members.extend(('REQUEST', 'acl_users', 'aq_parent')) - - def visit_assattr(self, node): - if isinstance(node.ass_type(), astng.AugAssign): - self.visit_getattr(node) - - def visit_delattr(self, node): - self.visit_getattr(node) - - @check_messages('E1101', 'E1103') - def visit_getattr(self, node): - """check that the accessed attribute exists - - to avoid to much false positives for now, we'll consider the code as - correct if a single of the inferred nodes has the accessed attribute. - - function/method, super call and metaclasses are ignored - """ - # generated_members may containt regular expressions - # (surrounded by quote `"` and followed by a comma `,`) - # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' => - # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}') - if isinstance(self.config.generated_members, str): - gen = shlex.shlex(self.config.generated_members) - gen.whitespace += ',' - self.config.generated_members = tuple(tok.strip('"') for tok in gen) - for pattern in self.config.generated_members: - # attribute is marked as generated, stop here - if re.match(pattern, node.attrname): - return - try: - infered = list(node.expr.infer()) - except InferenceError: - return - # list of (node, nodename) which are missing the attribute - missingattr = set() - ignoremim = self.config.ignore_mixin_members - inference_failure = False - for owner in infered: - # skip yes object - if owner is YES: - inference_failure = True - continue - # skip None anyway - if isinstance(owner, astng.Const) and owner.value is None: - continue - # XXX "super" / metaclass call - if is_super(owner) or getattr(owner, 'type', None) == 'metaclass': - continue - name = getattr(owner, 'name', 'None') - if name in self.config.ignored_classes: - continue - if ignoremim and name[-5:].lower() == 'mixin': - continue - try: - if not [n for n in owner.getattr(node.attrname) - if not isinstance(n.statement(), astng.AugAssign)]: - missingattr.add((owner, name)) - continue - except AttributeError: - # XXX method / function - continue - except NotFoundError: - if isinstance(owner, Instance) and owner.has_dynamic_getattr(): - continue - # explicit skipping of optparse'Values class - if owner.name == 'Values' and owner.root().name == 'optparse': - continue - missingattr.add((owner, name)) - continue - # stop on the first found - break - else: - # we have not found any node with the attributes, display the - # message for infered nodes - done = set() - for owner, name in missingattr: - if isinstance(owner, Instance): - actual = owner._proxied - else: - actual = owner - if actual in done: - continue - done.add(actual) - if inference_failure: - msgid = 'E1103' - else: - msgid = 'E1101' - self.add_message(msgid, node=node, - args=(owner.display_type(), name, - node.attrname)) - - - def visit_assign(self, node): - """check that if assigning to a function call, the function is - possibly returning something valuable - """ - if not isinstance(node.value, astng.CallFunc): - return - function_node = safe_infer(node.value.func) - # skip class, generator and incomplete function definition - if not (isinstance(function_node, astng.Function) and - function_node.root().fully_defined()): - return - if function_node.is_generator() \ - or function_node.is_abstract(pass_is_abstract=False): - return - returns = list(function_node.nodes_of_class(astng.Return, - skip_klass=astng.Function)) - if len(returns) == 0: - self.add_message('E1111', node=node) - else: - for rnode in returns: - if not (isinstance(rnode.value, astng.Const) - and rnode.value.value is None): - break - else: - self.add_message('W1111', node=node) - - def visit_callfunc(self, node): - """check that called functions/methods are inferred to callable objects, - and that the arguments passed to the function match the parameters in - the inferred function's definition - """ - - # Build the set of keyword arguments, checking for duplicate keywords, - # and count the positional arguments. - keyword_args = set() - num_positional_args = 0 - for arg in node.args: - if isinstance(arg, astng.Keyword): - keyword = arg.arg - if keyword in keyword_args: - self.add_message('E1122', node=node, args=keyword) - keyword_args.add(keyword) - else: - num_positional_args += 1 - - called = safe_infer(node.func) - # only function, generator and object defining __call__ are allowed - if called is not None and not called.callable(): - self.add_message('E1102', node=node, args=node.func.as_string()) - - # Note that BoundMethod is a subclass of UnboundMethod (huh?), so must - # come first in this 'if..else'. - if isinstance(called, astng.BoundMethod): - # Bound methods have an extra implicit 'self' argument. - num_positional_args += 1 - elif isinstance(called, astng.UnboundMethod): - if called.decorators is not None: - for d in called.decorators.nodes: - if isinstance(d, astng.Name) and (d.name == 'classmethod'): - # Class methods have an extra implicit 'cls' argument. - num_positional_args += 1 - break - elif (isinstance(called, astng.Function) or - isinstance(called, astng.Lambda)): - pass - else: - return - - if called.args.args is None: - # Built-in functions have no argument information. - return - - if len( called.argnames() ) != len( set( called.argnames() ) ): - # Duplicate parameter name (see E9801). We can't really make sense - # of the function call in this case, so just return. - return - - # Analyze the list of formal parameters. - num_mandatory_parameters = len(called.args.args) - len(called.args.defaults) - parameters = [] - parameter_name_to_index = {} - for i, arg in enumerate(called.args.args): - if isinstance(arg, astng.Tuple): - name = None - # Don't store any parameter names within the tuple, since those - # are not assignable from keyword arguments. - else: - if isinstance(arg, astng.Keyword): - name = arg.arg - else: - assert isinstance(arg, astng.AssName) - # This occurs with: - # def f( (a), (b) ): pass - name = arg.name - parameter_name_to_index[name] = i - if i >= num_mandatory_parameters: - defval = called.args.defaults[i - num_mandatory_parameters] - else: - defval = None - parameters.append([(name, defval), False]) - - # Match the supplied arguments against the function parameters. - - # 1. Match the positional arguments. - for i in range(num_positional_args): - if i < len(parameters): - parameters[i][1] = True - elif called.args.vararg is not None: - # The remaining positional arguments get assigned to the *args - # parameter. - break - else: - # Too many positional arguments. - self.add_message('E1121', node=node) - break - - # 2. Match the keyword arguments. - for keyword in keyword_args: - if keyword in parameter_name_to_index: - i = parameter_name_to_index[keyword] - if parameters[i][1]: - # Duplicate definition of function parameter. - self.add_message('E1124', node=node, args=keyword) - else: - parameters[i][1] = True - elif called.args.kwarg is not None: - # The keyword argument gets assigned to the **kwargs parameter. - pass - else: - # Unexpected keyword argument. - self.add_message('E1123', node=node, args=keyword) - - # 3. Match the *args, if any. Note that Python actually processes - # *args _before_ any keyword arguments, but we wait until after - # looking at the keyword arguments so as to make a more conservative - # guess at how many values are in the *args sequence. - if node.starargs is not None: - for i in range(num_positional_args, len(parameters)): - [(name, defval), assigned] = parameters[i] - # Assume that *args provides just enough values for all - # non-default parameters after the last parameter assigned by - # the positional arguments but before the first parameter - # assigned by the keyword arguments. This is the best we can - # get without generating any false positives. - if (defval is not None) or assigned: - break - parameters[i][1] = True - - # 4. Match the **kwargs, if any. - if node.kwargs is not None: - for i, [(name, defval), assigned] in enumerate(parameters): - # Assume that *kwargs provides values for all remaining - # unassigned named parameters. - if name is not None: - parameters[i][1] = True - else: - # **kwargs can't assign to tuples. - pass - - # Check that any parameters without a default have been assigned - # values. - for [(name, defval), assigned] in parameters: - if (defval is None) and not assigned: - if name is None: - display = '' - else: - display_name = repr(name) - self.add_message('E1120', node=node, args=display_name) - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(TypeChecker(linter)) diff --git a/pylibs/pylint/checkers/utils.py b/pylibs/pylint/checkers/utils.py deleted file mode 100644 index 43d619f6..00000000 --- a/pylibs/pylint/checkers/utils.py +++ /dev/null @@ -1,289 +0,0 @@ -# pylint: disable=W0611 -# -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""some functions that may be useful for various checkers -""" - -import string -from logilab import astng -from logilab.common.compat import builtins -BUILTINS_NAME = builtins.__name__ - -COMP_NODE_TYPES = astng.ListComp, astng.SetComp, astng.DictComp, astng.GenExpr - -def safe_infer(node): - """return the inferred value for the given node. - Return None if inference failed or if there is some ambiguity (more than - one node has been inferred) - """ - try: - inferit = node.infer() - value = inferit.next() - except astng.InferenceError: - return - try: - inferit.next() - return # None if there is ambiguity on the inferred node - except StopIteration: - return value - -def is_super(node): - """return True if the node is referencing the "super" builtin function - """ - if getattr(node, 'name', None) == 'super' and \ - node.root().name == BUILTINS_NAME: - return True - return False - -def is_error(node): - """return true if the function does nothing but raising an exception""" - for child_node in node.get_children(): - if isinstance(child_node, astng.Raise): - return True - return False - -def is_raising(body): - """return true if the given statement node raise an exception""" - for node in body: - if isinstance(node, astng.Raise): - return True - return False - -def is_empty(body): - """return true if the given node does nothing but 'pass'""" - return len(body) == 1 and isinstance(body[0], astng.Pass) - -builtins = __builtins__.copy() -SPECIAL_BUILTINS = ('__builtins__',) # '__path__', '__file__') - -def is_builtin(name): # was is_native_builtin - """return true if could be considered as a builtin defined by python - """ - if name in builtins: - return True - if name in SPECIAL_BUILTINS: - return True - return False - -def is_defined_before(var_node): - """return True if the variable node is defined by a parent node (list, - set, dict, or generator comprehension, lambda) or in a previous sibling - node on the same line (statement_defining ; statement_using) - """ - varname = var_node.name - _node = var_node.parent - while _node: - if isinstance(_node, COMP_NODE_TYPES): - for ass_node in _node.nodes_of_class(astng.AssName): - if ass_node.name == varname: - return True - elif isinstance(_node, astng.For): - for ass_node in _node.target.nodes_of_class(astng.AssName): - if ass_node.name == varname: - return True - elif isinstance(_node, astng.With): - if _node.vars is None: - # quickfix : case in which 'with' is used without 'as' - return False - if _node.vars.name == varname: - return True - elif isinstance(_node, (astng.Lambda, astng.Function)): - if _node.args.is_argument(varname): - return True - if getattr(_node, 'name', None) == varname: - return True - break - _node = _node.parent - # possibly multiple statements on the same line using semi colon separator - stmt = var_node.statement() - _node = stmt.previous_sibling() - lineno = stmt.fromlineno - while _node and _node.fromlineno == lineno: - for ass_node in _node.nodes_of_class(astng.AssName): - if ass_node.name == varname: - return True - for imp_node in _node.nodes_of_class( (astng.From, astng.Import)): - if varname in [name[1] or name[0] for name in imp_node.names]: - return True - _node = _node.previous_sibling() - return False - -def is_func_default(node): - """return true if the given Name node is used in function default argument's - value - """ - parent = node.scope() - if isinstance(parent, astng.Function): - for default_node in parent.args.defaults: - for default_name_node in default_node.nodes_of_class(astng.Name): - if default_name_node is node: - return True - return False - -def is_func_decorator(node): - """return true if the name is used in function decorator""" - parent = node.parent - while parent is not None: - if isinstance(parent, astng.Decorators): - return True - if parent.is_statement or isinstance(parent, astng.Lambda): - break - parent = parent.parent - return False - -def is_ancestor_name(frame, node): - """return True if `frame` is a astng.Class node with `node` in the - subtree of its bases attribute - """ - try: - bases = frame.bases - except AttributeError: - return False - for base in bases: - if node in base.nodes_of_class(astng.Name): - return True - return False - -def assign_parent(node): - """return the higher parent which is not an AssName, Tuple or List node - """ - while node and isinstance(node, (astng.AssName, - astng.Tuple, - astng.List)): - node = node.parent - return node - -def overrides_an_abstract_method(class_node, name): - """return True if pnode is a parent of node""" - for ancestor in class_node.ancestors(): - if name in ancestor and isinstance(ancestor[name], astng.Function) and \ - ancestor[name].is_abstract(pass_is_abstract=False): - return True - return False - -def overrides_a_method(class_node, name): - """return True if is a method overridden from an ancestor""" - for ancestor in class_node.ancestors(): - if name in ancestor and isinstance(ancestor[name], astng.Function): - return True - return False - -PYMETHODS = set(('__new__', '__init__', '__del__', '__hash__', - '__str__', '__repr__', - '__len__', '__iter__', - '__delete__', '__get__', '__set__', - '__getitem__', '__setitem__', '__delitem__', '__contains__', - '__getattribute__', '__getattr__', '__setattr__', '__delattr__', - '__call__', - '__enter__', '__exit__', - '__cmp__', '__ge__', '__gt__', '__le__', '__lt__', '__eq__', - '__nonzero__', '__neg__', '__invert__', - '__mul__', '__imul__', '__rmul__', - '__div__', '__idiv__', '__rdiv__', - '__add__', '__iadd__', '__radd__', - '__sub__', '__isub__', '__rsub__', - '__pow__', '__ipow__', '__rpow__', - '__mod__', '__imod__', '__rmod__', - '__and__', '__iand__', '__rand__', - '__or__', '__ior__', '__ror__', - '__xor__', '__ixor__', '__rxor__', - # XXX To be continued - )) - -def check_messages(*messages): - """decorator to store messages that are handled by a checker method""" - - def store_messages(func): - func.checks_msgs = messages - return func - return store_messages - -class IncompleteFormatString(Exception): - """A format string ended in the middle of a format specifier.""" - pass - -class UnsupportedFormatCharacter(Exception): - """A format character in a format string is not one of the supported - format characters.""" - def __init__(self, index): - Exception.__init__(self, index) - self.index = index - -def parse_format_string(format_string): - """Parses a format string, returning a tuple of (keys, num_args), where keys - is the set of mapping keys in the format string, and num_args is the number - of arguments required by the format string. Raises - IncompleteFormatString or UnsupportedFormatCharacter if a - parse error occurs.""" - keys = set() - num_args = 0 - def next_char(i): - i += 1 - if i == len(format_string): - raise IncompleteFormatString - return (i, format_string[i]) - i = 0 - while i < len(format_string): - c = format_string[i] - if c == '%': - i, c = next_char(i) - # Parse the mapping key (optional). - key = None - if c == '(': - depth = 1 - i, c = next_char(i) - key_start = i - while depth != 0: - if c == '(': - depth += 1 - elif c == ')': - depth -= 1 - i, c = next_char(i) - key_end = i - 1 - key = format_string[key_start:key_end] - - # Parse the conversion flags (optional). - while c in '#0- +': - i, c = next_char(i) - # Parse the minimum field width (optional). - if c == '*': - num_args += 1 - i, c = next_char(i) - else: - while c in string.digits: - i, c = next_char(i) - # Parse the precision (optional). - if c == '.': - i, c = next_char(i) - if c == '*': - num_args += 1 - i, c = next_char(i) - else: - while c in string.digits: - i, c = next_char(i) - # Parse the length modifier (optional). - if c in 'hlL': - i, c = next_char(i) - # Parse the conversion type (mandatory). - if c not in 'diouxXeEfFgGcrs%': - raise UnsupportedFormatCharacter(i) - if key: - keys.add(key) - elif c != '%': - num_args += 1 - i += 1 - return keys, num_args diff --git a/pylibs/pylint/checkers/variables.py b/pylibs/pylint/checkers/variables.py deleted file mode 100644 index ce98e04b..00000000 --- a/pylibs/pylint/checkers/variables.py +++ /dev/null @@ -1,533 +0,0 @@ -# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""variables checkers for Python code -""" - -import sys -from copy import copy - -from logilab import astng -from logilab.astng import are_exclusive, builtin_lookup, ASTNGBuildingException - -from pylint.interfaces import IASTNGChecker -from pylint.checkers import BaseChecker -from pylint.checkers.utils import (PYMETHODS, is_ancestor_name, is_builtin, - is_defined_before, is_error, is_func_default, is_func_decorator, - assign_parent, check_messages) - -def overridden_method(klass, name): - """get overridden method if any""" - try: - parent = klass.local_attr_ancestors(name).next() - except (StopIteration, KeyError): - return None - try: - meth_node = parent[name] - except KeyError: - # We have found an ancestor defining but it's not in the local - # dictionary. This may happen with astng built from living objects. - return None - if isinstance(meth_node, astng.Function): - return meth_node - return None - - -MSGS = { - 'E0601': ('Using variable %r before assignment', - 'Used when a local variable is accessed before it\'s \ - assignment.'), - 'E0602': ('Undefined variable %r', - 'Used when an undefined variable is accessed.'), - - 'E0611': ('No name %r in module %r', - 'Used when a name cannot be found in a module.'), - - 'W0601': ('Global variable %r undefined at the module level', - 'Used when a variable is defined through the "global" statement \ - but the variable is not defined in the module scope.'), - 'W0602': ('Using global for %r but no assignment is done', - 'Used when a variable is defined through the "global" statement \ - but no assignment to this variable is done.'), - 'W0603': ('Using the global statement', # W0121 - 'Used when you use the "global" statement to update a global \ - variable. PyLint just try to discourage this \ - usage. That doesn\'t mean you can not use it !'), - 'W0604': ('Using the global statement at the module level', # W0103 - 'Used when you use the "global" statement at the module level \ - since it has no effect'), - 'W0611': ('Unused import %s', - 'Used when an imported module or variable is not used.'), - 'W0612': ('Unused variable %r', - 'Used when a variable is defined but not used.'), - 'W0613': ('Unused argument %r', - 'Used when a function or method argument is not used.'), - 'W0614': ('Unused import %s from wildcard import', - 'Used when an imported module or variable is not used from a \ - \'from X import *\' style import.'), - - 'W0621': ('Redefining name %r from outer scope (line %s)', - 'Used when a variable\'s name hide a name defined in the outer \ - scope.'), - 'W0622': ('Redefining built-in %r', - 'Used when a variable or function override a built-in.'), - - 'W0631': ('Using possibly undefined loop variable %r', - 'Used when an loop variable (i.e. defined by a for loop or \ - a list comprehension or a generator expression) is used outside \ - the loop.'), - } - -class VariablesChecker(BaseChecker): - """checks for - * unused variables / imports - * undefined variables - * redefinition of variable from builtins or from an outer scope - * use of variable before assignment - """ - - __implements__ = IASTNGChecker - - name = 'variables' - msgs = MSGS - priority = -1 - options = ( - ("init-import", - {'default': 0, 'type' : 'yn', 'metavar' : '', - 'help' : 'Tells whether we should check for unused import in \ -__init__ files.'}), - ("dummy-variables-rgx", - {'default': ('_|dummy'), - 'type' :'regexp', 'metavar' : '', - 'help' : 'A regular expression matching the beginning of \ - the name of dummy variables (i.e. not used).'}), - ("additional-builtins", - {'default': (), 'type' : 'csv', - 'metavar' : '', - 'help' : 'List of additional names supposed to be defined in \ -builtins. Remember that you should avoid to define new builtins when possible.' - }), - ) - def __init__(self, linter=None): - BaseChecker.__init__(self, linter) - self._to_consume = None - self._checking_mod_attr = None - self._vars = None - - def visit_module(self, node): - """visit module : update consumption analysis variable - checks globals doesn't overrides builtins - """ - self._to_consume = [(copy(node.locals), {}, 'module')] - self._vars = [] - for name, stmts in node.locals.items(): - if is_builtin(name): - # do not print Redefining builtin for additional builtins - self.add_message('W0622', args=name, node=stmts[0]) - - @check_messages('W0611', 'W0614') - def leave_module(self, node): - """leave module: check globals - """ - assert len(self._to_consume) == 1 - not_consumed = self._to_consume.pop()[0] - # don't check unused imports in __init__ files - if not self.config.init_import and node.package: - return - for name, stmts in not_consumed.items(): - stmt = stmts[0] - if isinstance(stmt, astng.Import): - self.add_message('W0611', args=name, node=stmt) - elif isinstance(stmt, astng.From) and stmt.modname != '__future__': - if stmt.names[0][0] == '*': - self.add_message('W0614', args=name, node=stmt) - else: - self.add_message('W0611', args=name, node=stmt) - del self._to_consume - del self._vars - - def visit_class(self, node): - """visit class: update consumption analysis variable - """ - self._to_consume.append((copy(node.locals), {}, 'class')) - - def leave_class(self, _): - """leave class: update consumption analysis variable - """ - # do not check for not used locals here (no sense) - self._to_consume.pop() - - def visit_lambda(self, node): - """visit lambda: update consumption analysis variable - """ - self._to_consume.append((copy(node.locals), {}, 'lambda')) - - def leave_lambda(self, _): - """leave lambda: update consumption analysis variable - """ - # do not check for not used locals here - self._to_consume.pop() - - def visit_genexpr(self, node): - """visit genexpr: update consumption analysis variable - """ - self._to_consume.append((copy(node.locals), {}, 'comprehension')) - - def leave_genexpr(self, _): - """leave genexpr: update consumption analysis variable - """ - # do not check for not used locals here - self._to_consume.pop() - - def visit_dictcomp(self, node): - """visit dictcomp: update consumption analysis variable - """ - self._to_consume.append((copy(node.locals), {}, 'comprehension')) - - def leave_dictcomp(self, _): - """leave dictcomp: update consumption analysis variable - """ - # do not check for not used locals here - self._to_consume.pop() - - def visit_setcomp(self, node): - """visit setcomp: update consumption analysis variable - """ - self._to_consume.append((copy(node.locals), {}, 'comprehension')) - - def leave_setcomp(self, _): - """leave setcomp: update consumption analysis variable - """ - # do not check for not used locals here - self._to_consume.pop() - - def visit_function(self, node): - """visit function: update consumption analysis variable and check locals - """ - self._to_consume.append((copy(node.locals), {}, 'function')) - self._vars.append({}) - if not set(('W0621', 'W0622')) & self.active_msgs: - return - globs = node.root().globals - for name, stmt in node.items(): - if name in globs and not isinstance(stmt, astng.Global): - line = globs[name][0].lineno - self.add_message('W0621', args=(name, line), node=stmt) - elif is_builtin(name): - # do not print Redefining builtin for additional builtins - self.add_message('W0622', args=name, node=stmt) - - def leave_function(self, node): - """leave function: check function's locals are consumed""" - not_consumed = self._to_consume.pop()[0] - self._vars.pop(0) - if not set(('W0612', 'W0613')) & self.active_msgs: - return - # don't check arguments of function which are only raising an exception - if is_error(node): - return - # don't check arguments of abstract methods or within an interface - is_method = node.is_method() - klass = node.parent.frame() - if is_method and (klass.type == 'interface' or node.is_abstract()): - return - authorized_rgx = self.config.dummy_variables_rgx - called_overridden = False - argnames = node.argnames() - for name, stmts in not_consumed.iteritems(): - # ignore some special names specified by user configuration - if authorized_rgx.match(name): - continue - # ignore names imported by the global statement - # FIXME: should only ignore them if it's assigned latter - stmt = stmts[0] - if isinstance(stmt, astng.Global): - continue - # care about functions with unknown argument (builtins) - if name in argnames: - if is_method: - # don't warn for the first argument of a (non static) method - if node.type != 'staticmethod' and name == argnames[0]: - continue - # don't warn for argument of an overridden method - if not called_overridden: - overridden = overridden_method(klass, node.name) - called_overridden = True - if overridden is not None and name in overridden.argnames(): - continue - if node.name in PYMETHODS and node.name not in ('__init__', '__new__'): - continue - # don't check callback arguments XXX should be configurable - if node.name.startswith('cb_') or node.name.endswith('_cb'): - continue - self.add_message('W0613', args=name, node=stmt) - else: - self.add_message('W0612', args=name, node=stmt) - - @check_messages('W0601', 'W0602', 'W0603', 'W0604', 'W0622') - def visit_global(self, node): - """check names imported exists in the global scope""" - frame = node.frame() - if isinstance(frame, astng.Module): - self.add_message('W0604', node=node) - return - module = frame.root() - default_message = True - for name in node.names: - try: - assign_nodes = module.getattr(name) - except astng.NotFoundError: - # unassigned global, skip - assign_nodes = [] - for anode in assign_nodes: - if anode.parent is None: - # node returned for builtin attribute such as __file__, - # __doc__, etc... - continue - if anode.frame() is frame: - # same scope level assignment - break - else: - # global but no assignment - self.add_message('W0602', args=name, node=node) - default_message = False - if not assign_nodes: - continue - for anode in assign_nodes: - if anode.parent is None: - self.add_message('W0622', args=name, node=node) - break - if anode.frame() is module: - # module level assignment - break - else: - # global undefined at the module scope - self.add_message('W0601', args=name, node=node) - default_message = False - if default_message: - self.add_message('W0603', node=node) - - def _loopvar_name(self, node, name): - # filter variables according to node's scope - # XXX used to filter parents but don't remember why, and removing this - # fixes a W0631 false positive reported by Paul Hachmann on 2008/12 on - # python-projects (added to func_use_for_or_listcomp_var test) - #astmts = [stmt for stmt in node.lookup(name)[1] - # if hasattr(stmt, 'ass_type')] and - # not stmt.statement().parent_of(node)] - if 'W0631' not in self.active_msgs: - return - astmts = [stmt for stmt in node.lookup(name)[1] - if hasattr(stmt, 'ass_type')] - # filter variables according their respective scope test is_statement - # and parent to avoid #74747. This is not a total fix, which would - # introduce a mechanism similar to special attribute lookup in - # modules. Also, in order to get correct inference in this case, the - # scope lookup rules would need to be changed to return the initial - # assignment (which does not exist in code per se) as well as any later - # modifications. - if not astmts or (astmts[0].is_statement or astmts[0].parent) \ - and astmts[0].statement().parent_of(node): - _astmts = [] - else: - _astmts = astmts[:1] - for i, stmt in enumerate(astmts[1:]): - if astmts[i].statement().parent_of(stmt): - continue - _astmts.append(stmt) - astmts = _astmts - if len(astmts) == 1: - ass = astmts[0].ass_type() - if isinstance(ass, (astng.For, astng.Comprehension, astng.GenExpr)) \ - and not ass.statement() is node.statement(): - self.add_message('W0631', args=name, node=node) - - def visit_assname(self, node): - if isinstance(node.ass_type(), astng.AugAssign): - self.visit_name(node) - - def visit_delname(self, node): - self.visit_name(node) - - def visit_name(self, node): - """check that a name is defined if the current scope and doesn't - redefine a built-in - """ - stmt = node.statement() - if stmt.fromlineno is None: - # name node from a astng built from live code, skip - assert not stmt.root().file.endswith('.py') - return - name = node.name - frame = stmt.scope() - # if the name node is used as a function default argument's value or as - # a decorator, then start from the parent frame of the function instead - # of the function frame - and thus open an inner class scope - if (is_func_default(node) or is_func_decorator(node) - or is_ancestor_name(frame, node)): - start_index = len(self._to_consume) - 2 - else: - start_index = len(self._to_consume) - 1 - # iterates through parent scopes, from the inner to the outer - base_scope_type = self._to_consume[start_index][-1] - for i in range(start_index, -1, -1): - to_consume, consumed, scope_type = self._to_consume[i] - # if the current scope is a class scope but it's not the inner - # scope, ignore it. This prevents to access this scope instead of - # the globals one in function members when there are some common - # names. The only exception is when the starting scope is a - # comprehension and its direct outer scope is a class - if scope_type == 'class' and i != start_index and not ( - base_scope_type == 'comprehension' and i == start_index-1): - # XXX find a way to handle class scope in a smoother way - continue - # the name has already been consumed, only check it's not a loop - # variable used outside the loop - if name in consumed: - self._loopvar_name(node, name) - break - # mark the name as consumed if it's defined in this scope - # (i.e. no KeyError is raised by "to_consume[name]") - try: - consumed[name] = to_consume[name] - except KeyError: - continue - # checks for use before assignment - defnode = assign_parent(to_consume[name][0]) - if defnode is not None: - defstmt = defnode.statement() - defframe = defstmt.frame() - maybee0601 = True - if not frame is defframe: - maybee0601 = False - elif defframe.parent is None: - # we are at the module level, check the name is not - # defined in builtins - if name in defframe.scope_attrs or builtin_lookup(name)[1]: - maybee0601 = False - else: - # we are in a local scope, check the name is not - # defined in global or builtin scope - if defframe.root().lookup(name)[1]: - maybee0601 = False - if (maybee0601 - and stmt.fromlineno <= defstmt.fromlineno - and not is_defined_before(node) - and not are_exclusive(stmt, defstmt, ('NameError', 'Exception', 'BaseException'))): - if defstmt is stmt and isinstance(node, (astng.DelName, - astng.AssName)): - self.add_message('E0602', args=name, node=node) - elif self._to_consume[-1][-1] != 'lambda': - # E0601 may *not* occurs in lambda scope - self.add_message('E0601', args=name, node=node) - if not isinstance(node, astng.AssName): # Aug AssName - del to_consume[name] - else: - del consumed[name] - # check it's not a loop variable used outside the loop - self._loopvar_name(node, name) - break - else: - # we have not found the name, if it isn't a builtin, that's an - # undefined name ! - if not (name in astng.Module.scope_attrs or is_builtin(name) - or name in self.config.additional_builtins): - self.add_message('E0602', args=name, node=node) - - @check_messages('E0611') - def visit_import(self, node): - """check modules attribute accesses""" - for name, _ in node.names: - parts = name.split('.') - try: - module = node.infer_name_module(parts[0]).next() - except astng.ResolveError: - continue - self._check_module_attrs(node, module, parts[1:]) - - @check_messages('E0611') - def visit_from(self, node): - """check modules attribute accesses""" - name_parts = node.modname.split('.') - level = getattr(node, 'level', None) - try: - module = node.root().import_module(name_parts[0], level=level) - except ASTNGBuildingException: - return - except Exception, exc: - print 'Unhandled exception in VariablesChecker:', exc - return - module = self._check_module_attrs(node, module, name_parts[1:]) - if not module: - return - for name, _ in node.names: - if name == '*': - continue - self._check_module_attrs(node, module, name.split('.')) - - def _check_module_attrs(self, node, module, module_names): - """check that module_names (list of string) are accessible through the - given module - if the latest access name corresponds to a module, return it - """ - assert isinstance(module, astng.Module), module - while module_names: - name = module_names.pop(0) - if name == '__dict__': - module = None - break - try: - module = module.getattr(name)[0].infer().next() - if module is astng.YES: - return None - except astng.NotFoundError: - self.add_message('E0611', args=(name, module.name), node=node) - return None - except astng.InferenceError: - return None - if module_names: - # FIXME: other message if name is not the latest part of - # module_names ? - modname = module and module.name or '__dict__' - self.add_message('E0611', node=node, - args=('.'.join(module_names), modname)) - return None - if isinstance(module, astng.Module): - return module - return None - - -class VariablesChecker3k(VariablesChecker): - '''Modified variables checker for 3k''' - # listcomp have now also their scope - - def visit_listcomp(self, node): - """visit dictcomp: update consumption analysis variable - """ - self._to_consume.append((copy(node.locals), {}, 'comprehension')) - - def leave_listcomp(self, _): - """leave dictcomp: update consumption analysis variable - """ - # do not check for not used locals here - self._to_consume.pop() - -if sys.version_info >= (3, 0): - VariablesChecker = VariablesChecker3k - - -def register(linter): - """required method to auto register this checker""" - linter.register_checker(VariablesChecker(linter)) diff --git a/pylibs/pylint/config.py b/pylibs/pylint/config.py deleted file mode 100644 index 60b51ee9..00000000 --- a/pylibs/pylint/config.py +++ /dev/null @@ -1,153 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" Copyright (c) 2003-2006 LOGILAB S.A. (Paris, FRANCE). - http://www.logilab.fr/ -- mailto:contact@logilab.fr - - utilities for PyLint configuration : - _ pylintrc - _ pylint.d (PYLINT_HOME) -""" - -import pickle -import os -import sys -from os.path import exists, isfile, join, expanduser, abspath, dirname - -# pylint home is used to save old runs results ################################ - -USER_HOME = expanduser('~') -if 'PYLINTHOME' in os.environ: - PYLINT_HOME = os.environ['PYLINTHOME'] - if USER_HOME == '~': - USER_HOME = dirname(PYLINT_HOME) -elif USER_HOME == '~': - PYLINT_HOME = ".pylint.d" -else: - PYLINT_HOME = join(USER_HOME, '.pylint.d') - -if not exists(PYLINT_HOME): - try: - os.mkdir(PYLINT_HOME) - except OSError: - print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME - -def get_pdata_path(base_name, recurs): - """return the path of the file which should contain old search data for the - given base_name with the given options values - """ - base_name = base_name.replace(os.sep, '_') - return join(PYLINT_HOME, "%s%s%s"%(base_name, recurs, '.stats')) - -def load_results(base): - """try to unpickle and return data from file if it exists and is not - corrupted - - return an empty dictionary if it doesn't exists - """ - data_file = get_pdata_path(base, 1) - try: - return pickle.load(open(data_file)) - except: - return {} - -if sys.version_info < (3, 0): - _PICK_MOD = 'w' -else: - _PICK_MOD = 'wb' - -def save_results(results, base): - """pickle results""" - data_file = get_pdata_path(base, 1) - try: - pickle.dump(results, open(data_file, _PICK_MOD)) - except (IOError, OSError), ex: - print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex) - -# location of the configuration file ########################################## - - -def find_pylintrc(): - """search the pylint rc file and return its path if it find it, else None - """ - # is there a pylint rc file in the current directory ? - if exists('pylintrc'): - return abspath('pylintrc') - if isfile('__init__.py'): - curdir = abspath(os.getcwd()) - while isfile(join(curdir, '__init__.py')): - curdir = abspath(join(curdir, '..')) - if isfile(join(curdir, 'pylintrc')): - return join(curdir, 'pylintrc') - if 'PYLINTRC' in os.environ and exists(os.environ['PYLINTRC']): - pylintrc = os.environ['PYLINTRC'] - else: - user_home = expanduser('~') - if user_home == '~' or user_home == '/root': - pylintrc = ".pylintrc" - else: - pylintrc = join(user_home, '.pylintrc') - if not isfile(pylintrc): - if isfile('/etc/pylintrc'): - pylintrc = '/etc/pylintrc' - else: - pylintrc = None - return pylintrc - -PYLINTRC = find_pylintrc() - -ENV_HELP = ''' -The following environment variables are used : - * PYLINTHOME - path to the directory where data of persistent run will be stored. If not -found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working -directory). - * PYLINTRC - path to the configuration file. If not found, it will use the first -existent file in ~/.pylintrc, /etc/pylintrc. -''' % globals() - -# evaluation messages ######################################################### - -def get_note_message(note): - """return a message according to note - note is a float < 10 (10 is the highest note) - """ - assert note <= 10, "Note is %.2f. Either you cheated, or pylint's \ -broken!" % note - if note < 0: - msg = 'You have to do something quick !' - elif note < 1: - msg = 'Hey! This is really dreadful. Or maybe pylint is buggy?' - elif note < 2: - msg = "Come on! You can't be proud of this code" - elif note < 3: - msg = 'Hum... Needs work.' - elif note < 4: - msg = 'Wouldn\'t you be a bit lazy?' - elif note < 5: - msg = 'A little more work would make it acceptable.' - elif note < 6: - msg = 'Just the bare minimum. Give it a bit more polish. ' - elif note < 7: - msg = 'This is okay-ish, but I\'m sure you can do better.' - elif note < 8: - msg = 'If you commit now, people should not be making nasty \ -comments about you on c.l.py' - elif note < 9: - msg = 'That\'s pretty good. Good work mate.' - elif note < 10: - msg = 'So close to being perfect...' - else: - msg = 'Wow ! Now this deserves our uttermost respect.\nPlease send \ -your code to python-projects@logilab.org' - return msg diff --git a/pylibs/pylint/epylint.py b/pylibs/pylint/epylint.py deleted file mode 100644 index f6b16e7a..00000000 --- a/pylibs/pylint/epylint.py +++ /dev/null @@ -1,149 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8; mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=python:et:sw=4:ts=4:sts=4 -"""Emacs and Flymake compatible Pylint. - -This script is for integration with emacs and is compatible with flymake mode. - -epylint walks out of python packages before invoking pylint. This avoids -reporting import errors that occur when a module within a package uses the -absolute import path to get another module within this package. - -For example: - - Suppose a package is structured as - - a/__init__.py - a/b/x.py - a/c/y.py - - - Then if y.py imports x as "from a.b import x" the following produces pylint errors - - cd a/c; pylint y.py - - - The following obviously doesn't - - pylint a/c/y.py - - - As this script will be invoked by emacs within the directory of the file - we are checking we need to go out of it to avoid these false positives. - - -You may also use py_run to run pylint with desired options and get back (or not) its output. -""" - -import sys, os, re -from subprocess import Popen, PIPE - - -def lint(filename): - """Pylint the given file. - - When run from emacs we will be in the directory of a file, and passed its filename. - If this file is part of a package and is trying to import other modules from within - its own package or another package rooted in a directory below it, pylint will classify - it as a failed import. - - To get around this, we traverse down the directory tree to find the root of the package this - module is in. We then invoke pylint from this directory. - - Finally, we must correct the filenames in the output generated by pylint so Emacs doesn't - become confused (it will expect just the original filename, while pylint may extend it with - extra directories if we've traversed down the tree) - """ - # traverse downwards until we are out of a python package - fullPath = os.path.abspath(filename) - parentPath, childPath = os.path.dirname(fullPath), os.path.basename(fullPath) - - while parentPath != "/" and os.path.exists(os.path.join(parentPath, '__init__.py')): - childPath = os.path.join(os.path.basename(parentPath), childPath) - parentPath = os.path.dirname(parentPath) - - # Start pylint - process = Popen('pylint -f parseable -r n --disable=C,R,I "%s"' % - childPath, shell=True, stdout=PIPE, stderr=PIPE, - cwd=parentPath) - p = process.stdout - - # The parseable line format is '%(path)s:%(line)s: [%(sigle)s%(obj)s] %(msg)s' - # NOTE: This would be cleaner if we added an Emacs reporter to pylint.reporters.text .. - regex = re.compile(r"\[(?P[WE])(?P.*?)\]") - - def _replacement(mObj): - "Alter to include 'Error' or 'Warning'" - if mObj.group("type") == "W": - replacement = "Warning" - else: - replacement = "Error" - # replace as "Warning (W0511, funcName): Warning Text" - return "%s (%s%s):" % (replacement, mObj.group("type"), mObj.group("remainder")) - - for line in p: - # remove pylintrc warning - if line.startswith("No config file found"): - continue - line = regex.sub(_replacement, line, 1) - # modify the file name thats output to reverse the path traversal we made - parts = line.split(":") - if parts and parts[0] == childPath: - line = ":".join([filename] + parts[1:]) - print line, - - p.close() - -def Run(): - lint(sys.argv[1]) - - -def py_run(command_options='', return_std=False, stdout=None, stderr=None, - script='epylint'): - """Run pylint from python (needs Python >= 2.4). - - ``command_options`` is a string containing ``pylint`` command line options; - ``return_std`` (boolean) indicates return of created standart output - and error (see below); - ``stdout`` and ``stderr`` are 'file-like' objects in which standart output - could be written. - - Calling agent is responsible for stdout/err management (creation, close). - Default standart output and error are those from sys, - or standalone ones (``subprocess.PIPE``) are used - if they are not set and ``return_std``. - - If ``return_std`` is set to ``True``, this function returns a 2-uple - containing standart output and error related to created process, - as follows: ``(stdout, stderr)``. - - A trivial usage could be as follows: - >>> py_run( '--version') - No config file found, using default configuration - pylint 0.18.1, - ... - - To silently run Pylint on a module, and get its standart output and error: - >>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True) - """ - # Create command line to call pylint - if os.name == 'nt': - script += '.bat' - command_line = script + ' ' + command_options - # Providing standart output and/or error if not set - if stdout is None: - if return_std: - stdout = PIPE - else: - stdout = sys.stdout - if stderr is None: - if return_std: - stderr = PIPE - else: - stderr = sys.stderr - # Call pylint in a subprocess - p = Popen(command_line, shell=True, stdout=stdout, stderr=stderr) - p.wait() - # Return standart output and error - if return_std: - return (p.stdout, p.stderr) - - -if __name__ == '__main__': - lint(sys.argv[1]) - diff --git a/pylibs/pylint/interfaces.py b/pylibs/pylint/interfaces.py deleted file mode 100644 index 3d7bdad6..00000000 --- a/pylibs/pylint/interfaces.py +++ /dev/null @@ -1,98 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" Copyright (c) 2002-2003 LOGILAB S.A. (Paris, FRANCE). - http://www.logilab.fr/ -- mailto:contact@logilab.fr - -Interfaces for PyLint objects -""" - -__revision__ = "$Id: interfaces.py,v 1.9 2004-04-24 12:14:53 syt Exp $" - -from logilab.common.interface import Interface - - -class IChecker(Interface): - """This is an base interface, not designed to be used elsewhere than for - sub interfaces definition. - """ - - def open(self): - """called before visiting project (i.e set of modules)""" - - def close(self): - """called after visiting project (i.e set of modules)""" - -## def open_module(self): -## """called before visiting a module""" - -## def close_module(self): -## """called after visiting a module""" - - -class IRawChecker(IChecker): - """interface for checker which need to parse the raw file - """ - - def process_module(self, astng): - """ process a module - - the module's content is accessible via astng.file_stream - """ - - -class IASTNGChecker(IChecker): - """ interface for checker which prefers receive events according to - statement type - """ - - -class ILinter(Interface): - """interface for the linter class - - the linter class will generate events to its registered checkers. - Each checker may interact with the linter instance using this API - """ - - def register_checker(self, checker): - """register a new checker class - - checker is a class implementing IrawChecker or / and IASTNGChecker - """ - - def add_message(self, msg_id, line=None, node=None, args=None): - """add the message corresponding to the given id. - - If provided, msg is expanded using args - - astng checkers should provide the node argument, - raw checkers should provide the line argument. - """ - - -class IReporter(Interface): - """ reporter collect messages and display results encapsulated in a layout - """ - def add_message(self, msg_id, location, msg): - """add a message of a given type - - msg_id is a message identifier - location is a 3-uple (module, object, line) - msg is the actual message - """ - - def display_results(self, layout): - """display results encapsulated in the layout tree - """ - - -__all__ = ('IRawChecker', 'IStatable', 'ILinter', 'IReporter') diff --git a/pylibs/pylint/lint.py b/pylibs/pylint/lint.py deleted file mode 100644 index d11d7167..00000000 --- a/pylibs/pylint/lint.py +++ /dev/null @@ -1,929 +0,0 @@ -# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com). -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" %prog [options] module_or_package - - Check that a module satisfy a coding standard (and more !). - - %prog --help - - Display this help message and exit. - - %prog --help-msg [,] - - Display help messages about given message identifiers and exit. -""" - -# import this first to avoid builtin namespace pollution -from pylint.checkers import utils - -import sys -import os -import re -import tokenize -from warnings import warn - -from logilab.common.configuration import UnsupportedAction, OptionsManagerMixIn -from logilab.common.optik_ext import check_csv -from logilab.common.modutils import load_module_from_name -from logilab.common.interface import implements -from logilab.common.textutils import splitstrip -from logilab.common.ureports import Table, Text, Section -from logilab.common.__pkginfo__ import version as common_version - -from logilab.astng import MANAGER, nodes, ASTNGBuildingException -from logilab.astng.__pkginfo__ import version as astng_version - -from pylint.utils import PyLintASTWalker, UnknownMessage, MessagesHandlerMixIn,\ - ReportsHandlerMixIn, MSG_TYPES, expand_modules -from pylint.interfaces import ILinter, IRawChecker, IASTNGChecker -from pylint.checkers import BaseRawChecker, EmptyReport, \ - table_lines_from_stats -from pylint.reporters.text import TextReporter, ParseableTextReporter, \ - VSTextReporter, ColorizedTextReporter -from pylint.reporters.html import HTMLReporter -from pylint import config - -from pylint.__pkginfo__ import version - - -OPTION_RGX = re.compile('\s*#*\s*pylint:(.*)') -REPORTER_OPT_MAP = {'text': TextReporter, - 'parseable': ParseableTextReporter, - 'msvs': VSTextReporter, - 'colorized': ColorizedTextReporter, - 'html': HTMLReporter,} - - -# Python Linter class ######################################################### - -MSGS = { - 'F0001': ('%s', - 'Used when an error occurred preventing the analysis of a \ - module (unable to find it for instance).'), - 'F0002': ('%s: %s', - 'Used when an unexpected error occurred while building the ASTNG \ - representation. This is usually accompanied by a traceback. \ - Please report such errors !'), - 'F0003': ('ignored builtin module %s', - 'Used to indicate that the user asked to analyze a builtin module\ - which has been skipped.'), - 'F0004': ('unexpected inferred value %s', - 'Used to indicate that some value of an unexpected type has been \ - inferred.'), - 'F0010': ('error while code parsing: %s', - 'Used when an exception occured while building the ASTNG \ - representation which could be handled by astng.'), - - - 'I0001': ('Unable to run raw checkers on built-in module %s', - 'Used to inform that a built-in module has not been checked \ - using the raw checkers.'), - - 'I0010': ('Unable to consider inline option %r', - 'Used when an inline option is either badly formatted or can\'t \ - be used inside modules.'), - - 'I0011': ('Locally disabling %s', - 'Used when an inline option disables a message or a messages \ - category.'), - 'I0012': ('Locally enabling %s', - 'Used when an inline option enables a message or a messages \ - category.'), - 'I0013': ('Ignoring entire file', - 'Used to inform that the file will not be checked'), - - - 'E0001': ('%s', - 'Used when a syntax error is raised for a module.'), - - 'E0011': ('Unrecognized file option %r', - 'Used when an unknown inline option is encountered.'), - 'E0012': ('Bad option value %r', - 'Used when a bad value for an inline option is encountered.'), - } - - -class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn, - BaseRawChecker): - """lint Python modules using external checkers. - - This is the main checker controlling the other ones and the reports - generation. It is itself both a raw checker and an astng checker in order - to: - * handle message activation / deactivation at the module level - * handle some basic but necessary stats'data (number of classes, methods...) - - IDE plugins developpers: you may have to call - `logilab.astng.builder.MANAGER.astng_cache.clear()` accross run if you want - to ensure the latest code version is actually checked. - """ - - __implements__ = (ILinter, IRawChecker) - - name = 'master' - priority = 0 - level = 0 - msgs = MSGS - may_be_disabled = False - - options = (('ignore', - {'type' : 'csv', 'metavar' : '[,...]', - 'dest' : 'black_list', 'default' : ('CVS',), - 'help' : 'Add files or directories to the blacklist. \ -They should be base names, not paths.'}), - ('persistent', - {'default': True, 'type' : 'yn', 'metavar' : '', - 'level': 1, - 'help' : 'Pickle collected data for later comparisons.'}), - - ('load-plugins', - {'type' : 'csv', 'metavar' : '', 'default' : (), - 'level': 1, - 'help' : 'List of plugins (as comma separated values of \ -python modules names) to load, usually to register additional checkers.'}), - - ('output-format', - {'default': 'text', 'type': 'choice', 'metavar' : '', - 'choices': ('text', 'parseable', 'msvs', 'colorized', 'html'), - 'short': 'f', - 'group': 'Reports', - 'help' : 'Set the output format. Available formats are text,\ - parseable, colorized, msvs (visual studio) and html'}), - - ('include-ids', - {'type' : 'yn', 'metavar' : '', 'default' : 0, - 'short': 'i', - 'group': 'Reports', - 'help' : 'Include message\'s id in output'}), - - ('files-output', - {'default': 0, 'type' : 'yn', 'metavar' : '', - 'group': 'Reports', 'level': 1, - 'help' : 'Put messages in a separate file for each module / \ -package specified on the command line instead of printing them on stdout. \ -Reports (if any) will be written in a file name "pylint_global.[txt|html]".'}), - - ('reports', - {'default': 1, 'type' : 'yn', 'metavar' : '', - 'short': 'r', - 'group': 'Reports', - 'help' : 'Tells whether to display a full report or only the\ - messages'}), - - ('evaluation', - {'type' : 'string', 'metavar' : '', - 'group': 'Reports', 'level': 1, - 'default': '10.0 - ((float(5 * error + warning + refactor + \ -convention) / statement) * 10)', - 'help' : 'Python expression which should return a note less \ -than 10 (10 is the highest note). You have access to the variables errors \ -warning, statement which respectively contain the number of errors / warnings\ - messages and the total number of statements analyzed. This is used by the \ - global evaluation report (RP0004).'}), - - ('comment', - {'default': 0, 'type' : 'yn', 'metavar' : '', - 'group': 'Reports', 'level': 1, - 'help' : 'Add a comment according to your evaluation note. \ -This is used by the global evaluation report (RP0004).'}), - - ('enable', - {'type' : 'csv', 'metavar': '', - 'short': 'e', - 'group': 'Messages control', - 'help' : 'Enable the message, report, category or checker with the ' - 'given id(s). You can either give multiple identifier ' - 'separated by comma (,) or put this option multiple time.'}), - - ('disable', - {'type' : 'csv', 'metavar': '', - 'short': 'd', - 'group': 'Messages control', - 'help' : 'Disable the message, report, category or checker ' - 'with the given id(s). You can either give multiple identifier' - ' separated by comma (,) or put this option multiple time ' - '(only on the command line, not in the configuration file ' - 'where it should appear only once).'}), - ) - - option_groups = ( - ('Messages control', 'Options controling analysis messages'), - ('Reports', 'Options related to output formating and reporting'), - ) - - def __init__(self, options=(), reporter=None, option_groups=(), - pylintrc=None): - # some stuff has to be done before ancestors initialization... - # - # checkers / reporter / astng manager - self.reporter = None - self._checkers = {} - self._ignore_file = False - # visit variables - self.base_name = None - self.base_file = None - self.current_name = None - self.current_file = None - self.stats = None - # init options - self.options = options + PyLinter.options - self.option_groups = option_groups + PyLinter.option_groups - self._options_methods = { - 'enable': self.enable, - 'disable': self.disable} - self._bw_options_methods = {'disable-msg': self.disable, - 'enable-msg': self.enable} - full_version = '%%prog %s, \nastng %s, common %s\nPython %s' % ( - version, astng_version, common_version, sys.version) - OptionsManagerMixIn.__init__(self, usage=__doc__, - version=full_version, - config_file=pylintrc or config.PYLINTRC) - MessagesHandlerMixIn.__init__(self) - ReportsHandlerMixIn.__init__(self) - BaseRawChecker.__init__(self) - # provided reports - self.reports = (('RP0001', 'Messages by category', - report_total_messages_stats), - ('RP0002', '% errors / warnings by module', - report_messages_by_module_stats), - ('RP0003', 'Messages', - report_messages_stats), - ('RP0004', 'Global evaluation', - self.report_evaluation), - ) - self.register_checker(self) - self._dynamic_plugins = [] - self.load_provider_defaults() - self.set_reporter(reporter or TextReporter(sys.stdout)) - - def load_plugin_modules(self, modnames): - """take a list of module names which are pylint plugins and load - and register them - """ - for modname in modnames: - if modname in self._dynamic_plugins: - continue - self._dynamic_plugins.append(modname) - module = load_module_from_name(modname) - module.register(self) - - def set_reporter(self, reporter): - """set the reporter used to display messages and reports""" - self.reporter = reporter - reporter.linter = self - - def set_option(self, optname, value, action=None, optdict=None): - """overridden from configuration.OptionsProviderMixin to handle some - special options - """ - if optname in self._options_methods or optname in self._bw_options_methods: - if value: - try: - meth = self._options_methods[optname] - except KeyError: - meth = self._bw_options_methods[optname] - warn('%s is deprecated, replace it by %s' % ( - optname, optname.split('-')[0]), DeprecationWarning) - value = check_csv(None, optname, value) - if isinstance(value, (list, tuple)): - for _id in value : - meth(_id) - else : - meth(value) - elif optname == 'output-format': - self.set_reporter(REPORTER_OPT_MAP[value.lower()]()) - try: - BaseRawChecker.set_option(self, optname, value, action, optdict) - except UnsupportedAction: - print >> sys.stderr, 'option %s can\'t be read from config file' % \ - optname - - # checkers manipulation methods ############################################ - - def register_checker(self, checker): - """register a new checker - - checker is an object implementing IRawChecker or / and IASTNGChecker - """ - assert checker.priority <= 0, 'checker priority can\'t be >= 0' - self._checkers.setdefault(checker.name, []).append(checker) - for r_id, r_title, r_cb in checker.reports: - self.register_report(r_id, r_title, r_cb, checker) - self.register_options_provider(checker) - if hasattr(checker, 'msgs'): - self.register_messages(checker) - checker.load_defaults() - - def disable_noerror_messages(self): - for msgcat, msgids in self._msgs_by_category.iteritems(): - if msgcat == 'E': - for msgid in msgids: - self.enable(msgid) - else: - for msgid in msgids: - self.disable(msgid) - - def disable_reporters(self): - """disable all reporters""" - for reporters in self._reports.values(): - for report_id, _title, _cb in reporters: - self.disable_report(report_id) - - def error_mode(self): - """error mode: enable only errors; no reports, no persistent""" - self.disable_noerror_messages() - self.disable('miscellaneous') - self.set_option('reports', False) - self.set_option('persistent', False) - - # block level option handling ############################################# - # - # see func_block_disable_msg.py test case for expected behaviour - - def process_tokens(self, tokens): - """process tokens from the current module to search for module/block - level options - """ - comment = tokenize.COMMENT - newline = tokenize.NEWLINE - for (tok_type, _, start, _, line) in tokens: - if tok_type not in (comment, newline): - continue - match = OPTION_RGX.search(line) - if match is None: - continue - if match.group(1).strip() == "disable-all": - self.add_message('I0013', line=start[0]) - self._ignore_file = True - return - try: - opt, value = match.group(1).split('=', 1) - except ValueError: - self.add_message('I0010', args=match.group(1).strip(), - line=start[0]) - continue - opt = opt.strip() - if opt in self._options_methods or opt in self._bw_options_methods: - try: - meth = self._options_methods[opt] - except KeyError: - meth = self._bw_options_methods[opt] - warn('%s is deprecated, replace it by %s (%s, line %s)' % ( - opt, opt.split('-')[0], self.current_file, line), - DeprecationWarning) - for msgid in splitstrip(value): - try: - meth(msgid, 'module', start[0]) - except UnknownMessage: - self.add_message('E0012', args=msgid, line=start[0]) - else: - self.add_message('E0011', args=opt, line=start[0]) - - def collect_block_lines(self, node, msg_state): - """walk ast to collect block level options line numbers""" - # recurse on children (depth first) - for child in node.get_children(): - self.collect_block_lines(child, msg_state) - first = node.fromlineno - last = node.tolineno - # first child line number used to distinguish between disable - # which are the first child of scoped node with those defined later. - # For instance in the code below: - # - # 1. def meth8(self): - # 2. """test late disabling""" - # 3. # pylint: disable=E1102 - # 4. print self.blip - # 5. # pylint: disable=E1101 - # 6. print self.bla - # - # E1102 should be disabled from line 1 to 6 while E1101 from line 5 to 6 - # - # this is necessary to disable locally messages applying to class / - # function using their fromlineno - if isinstance(node, (nodes.Module, nodes.Class, nodes.Function)) and node.body: - firstchildlineno = node.body[0].fromlineno - else: - firstchildlineno = last - for msgid, lines in msg_state.iteritems(): - for lineno, state in lines.items(): - if first <= lineno <= last: - if lineno > firstchildlineno: - state = True - # set state for all lines for this block - first, last = node.block_range(lineno) - for line in xrange(first, last+1): - # do not override existing entries - if not line in self._module_msgs_state.get(msgid, ()): - if line in lines: # state change in the same block - state = lines[line] - try: - self._module_msgs_state[msgid][line] = state - except KeyError: - self._module_msgs_state[msgid] = {line: state} - del lines[lineno] - - - # code checking methods ################################################### - - def get_checkers(self): - """return all available checkers as a list""" - return [self] + [c for checkers in self._checkers.values() - for c in checkers if c is not self] - - def prepare_checkers(self): - """return checkers needed for activated messages and reports""" - if not self.config.reports: - self.disable_reporters() - # get needed checkers - neededcheckers = [self] - for checker in self.get_checkers()[1:]: - messages = set(msg for msg in checker.msgs - if self.is_message_enabled(msg)) - if (messages or - any(self.report_is_enabled(r[0]) for r in checker.reports)): - neededcheckers.append(checker) - checker.active_msgs = messages - return neededcheckers - - def check(self, files_or_modules): - """main checking entry: check a list of files or modules from their - name. - """ - self.reporter.include_ids = self.config.include_ids - if not isinstance(files_or_modules, (list, tuple)): - files_or_modules = (files_or_modules,) - walker = PyLintASTWalker(self) - checkers = self.prepare_checkers() - rawcheckers = [c for c in checkers if implements(c, IRawChecker) - and c is not self] - # notify global begin - for checker in checkers: - checker.open() - if implements(checker, IASTNGChecker): - walker.add_checker(checker) - # build ast and check modules or packages - for descr in self.expand_files(files_or_modules): - modname, filepath = descr['name'], descr['path'] - self.set_current_module(modname, filepath) - # get the module representation - astng = self.get_astng(filepath, modname) - if astng is None: - continue - self.base_name = descr['basename'] - self.base_file = descr['basepath'] - if self.config.files_output: - reportfile = 'pylint_%s.%s' % (modname, self.reporter.extension) - self.reporter.set_output(open(reportfile, 'w')) - self._ignore_file = False - # fix the current file (if the source file was not available or - # if it's actually a c extension) - self.current_file = astng.file - self.check_astng_module(astng, walker, rawcheckers) - # notify global end - self.set_current_module('') - self.stats['statement'] = walker.nbstatements - checkers.reverse() - for checker in checkers: - checker.close() - - def expand_files(self, modules): - """get modules and errors from a list of modules and handle errors - """ - result, errors = expand_modules(modules, self.config.black_list) - for error in errors: - message = modname = error["mod"] - key = error["key"] - self.set_current_module(modname) - if key == "F0001": - message = str(error["ex"]).replace(os.getcwd() + os.sep, '') - self.add_message(key, args=message) - return result - - def set_current_module(self, modname, filepath=None): - """set the name of the currently analyzed module and - init statistics for it - """ - if not modname and filepath is None: - return - self.current_name = modname - self.current_file = filepath or modname - self.stats['by_module'][modname] = {} - self.stats['by_module'][modname]['statement'] = 0 - for msg_cat in MSG_TYPES.values(): - self.stats['by_module'][modname][msg_cat] = 0 - # XXX hack, to be correct we need to keep module_msgs_state - # for every analyzed module (the problem stands with localized - # messages which are only detected in the .close step) - if modname: - self._module_msgs_state = {} - self._module_msg_cats_state = {} - - def get_astng(self, filepath, modname): - """return a astng representation for a module""" - try: - return MANAGER.astng_from_file(filepath, modname, source=True) - except SyntaxError, ex: - self.add_message('E0001', line=ex.lineno, args=ex.msg) - except ASTNGBuildingException, ex: - self.add_message('F0010', args=ex) - except Exception, ex: - import traceback - traceback.print_exc() - self.add_message('F0002', args=(ex.__class__, ex)) - - def check_astng_module(self, astng, walker, rawcheckers): - """check a module from its astng representation, real work""" - # call raw checkers if possible - if not astng.pure_python: - self.add_message('I0001', args=astng.name) - else: - #assert astng.file.endswith('.py') - # invoke IRawChecker interface on self to fetch module/block - # level options - self.process_module(astng) - if self._ignore_file: - return False - # walk ast to collect line numbers - orig_state = self._module_msgs_state.copy() - self._module_msgs_state = {} - self.collect_block_lines(astng, orig_state) - for checker in rawcheckers: - checker.process_module(astng) - # generate events to astng checkers - walker.walk(astng) - return True - - # IASTNGChecker interface ################################################# - - def open(self): - """initialize counters""" - self.stats = { 'by_module' : {}, - 'by_msg' : {}, - } - for msg_cat in MSG_TYPES.values(): - self.stats[msg_cat] = 0 - - def close(self): - """close the whole package /module, it's time to make reports ! - - if persistent run, pickle results for later comparison - """ - if self.base_name is not None: - # load old results if any - old_stats = config.load_results(self.base_name) - if self.config.reports: - self.make_reports(self.stats, old_stats) - elif self.config.output_format == 'html': - self.reporter.display_results(Section()) - # save results if persistent run - if self.config.persistent: - config.save_results(self.stats, self.base_name) - - # specific reports ######################################################## - - def report_evaluation(self, sect, stats, old_stats): - """make the global evaluation report""" - # check with at least check 1 statements (usually 0 when there is a - # syntax error preventing pylint from further processing) - if stats['statement'] == 0: - raise EmptyReport() - # get a global note for the code - evaluation = self.config.evaluation - try: - note = eval(evaluation, {}, self.stats) - except Exception, ex: - msg = 'An exception occurred while rating: %s' % ex - else: - stats['global_note'] = note - msg = 'Your code has been rated at %.2f/10' % note - if 'global_note' in old_stats: - msg += ' (previous run: %.2f/10)' % old_stats['global_note'] - if self.config.comment: - msg = '%s\n%s' % (msg, config.get_note_message(note)) - sect.append(Text(msg)) - -# some reporting functions #################################################### - -def report_total_messages_stats(sect, stats, old_stats): - """make total errors / warnings report""" - lines = ['type', 'number', 'previous', 'difference'] - lines += table_lines_from_stats(stats, old_stats, - ('convention', 'refactor', - 'warning', 'error')) - sect.append(Table(children=lines, cols=4, rheaders=1)) - -def report_messages_stats(sect, stats, _): - """make messages type report""" - if not stats['by_msg']: - # don't print this report when we didn't detected any errors - raise EmptyReport() - in_order = sorted([(value, msg_id) - for msg_id, value in stats['by_msg'].items() - if not msg_id.startswith('I')]) - in_order.reverse() - lines = ('message id', 'occurrences') - for value, msg_id in in_order: - lines += (msg_id, str(value)) - sect.append(Table(children=lines, cols=2, rheaders=1)) - -def report_messages_by_module_stats(sect, stats, _): - """make errors / warnings by modules report""" - if len(stats['by_module']) == 1: - # don't print this report when we are analysing a single module - raise EmptyReport() - by_mod = {} - for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'): - total = stats[m_type] - for module in stats['by_module'].keys(): - mod_total = stats['by_module'][module][m_type] - if total == 0: - percent = 0 - else: - percent = float((mod_total)*100) / total - by_mod.setdefault(module, {})[m_type] = percent - sorted_result = [] - for module, mod_info in by_mod.items(): - sorted_result.append((mod_info['error'], - mod_info['warning'], - mod_info['refactor'], - mod_info['convention'], - module)) - sorted_result.sort() - sorted_result.reverse() - lines = ['module', 'error', 'warning', 'refactor', 'convention'] - for line in sorted_result: - if line[0] == 0 and line[1] == 0: - break - lines.append(line[-1]) - for val in line[:-1]: - lines.append('%.2f' % val) - if len(lines) == 5: - raise EmptyReport() - sect.append(Table(children=lines, cols=5, rheaders=1)) - - -# utilities ################################################################### - -# this may help to import modules using gettext - -try: - __builtins__._ = str -except AttributeError: - __builtins__['_'] = str - - -class ArgumentPreprocessingError(Exception): - """Raised if an error occurs during argument preprocessing.""" - - -def preprocess_options(args, search_for): - """look for some options (keys of ) which have to be processed - before others - - values of are callback functions to call when the option is - found - """ - i = 0 - while i < len(args): - arg = args[i] - if arg.startswith('--'): - try: - option, val = arg[2:].split('=', 1) - except ValueError: - option, val = arg[2:], None - try: - cb, takearg = search_for[option] - del args[i] - if takearg and val is None: - if i >= len(args) or args[i].startswith('-'): - raise ArgumentPreprocessingError(arg) - val = args[i] - del args[i] - cb(option, val) - except KeyError: - i += 1 - else: - i += 1 - -class Run: - """helper class to use as main for pylint : - - run(*sys.argv[1:]) - """ - LinterClass = PyLinter - option_groups = ( - ('Commands', 'Options which are actually commands. Options in this \ -group are mutually exclusive.'), - ) - - def __init__(self, args, reporter=None, exit=True): - self._rcfile = None - self._plugins = [] - try: - preprocess_options(args, { - # option: (callback, takearg) - 'rcfile': (self.cb_set_rcfile, True), - 'load-plugins': (self.cb_add_plugins, True), - }) - except ArgumentPreprocessingError, e: - print >> sys.stderr, 'Argument %s expects a value.' % (e.args[0],) - sys.exit(32) - - self.linter = linter = self.LinterClass(( - ('rcfile', - {'action' : 'callback', 'callback' : lambda *args: 1, - 'type': 'string', 'metavar': '', - 'help' : 'Specify a configuration file.'}), - - ('init-hook', - {'action' : 'callback', 'type' : 'string', 'metavar': '', - 'callback' : cb_init_hook, 'level': 1, - 'help' : 'Python code to execute, usually for sys.path \ -manipulation such as pygtk.require().'}), - - ('help-msg', - {'action' : 'callback', 'type' : 'string', 'metavar': '', - 'callback' : self.cb_help_message, - 'group': 'Commands', - 'help' : '''Display a help message for the given message id and \ -exit. The value may be a comma separated list of message ids.'''}), - - ('list-msgs', - {'action' : 'callback', 'metavar': '', - 'callback' : self.cb_list_messages, - 'group': 'Commands', 'level': 1, - 'help' : "Generate pylint's messages."}), - - ('full-documentation', - {'action' : 'callback', 'metavar': '', - 'callback' : self.cb_full_documentation, - 'group': 'Commands', 'level': 1, - 'help' : "Generate pylint's full documentation."}), - - ('generate-rcfile', - {'action' : 'callback', 'callback' : self.cb_generate_config, - 'group': 'Commands', - 'help' : '''Generate a sample configuration file according to \ -the current configuration. You can put other options before this one to get \ -them in the generated configuration.'''}), - - ('generate-man', - {'action' : 'callback', 'callback' : self.cb_generate_manpage, - 'group': 'Commands', - 'help' : "Generate pylint's man page.",'hide': True}), - - ('errors-only', - {'action' : 'callback', 'callback' : self.cb_error_mode, - 'short': 'E', - 'help' : '''In error mode, checkers without error messages are \ -disabled and for others, only the ERROR messages are displayed, and no reports \ -are done by default'''}), - - ('profile', - {'type' : 'yn', 'metavar' : '', - 'default': False, 'hide': True, - 'help' : 'Profiled execution.'}), - - ), option_groups=self.option_groups, - reporter=reporter, pylintrc=self._rcfile) - # register standard checkers - from pylint import checkers - checkers.initialize(linter) - # load command line plugins - linter.load_plugin_modules(self._plugins) - # add some help section - linter.add_help_section('Environment variables', config.ENV_HELP, level=1) - linter.add_help_section('Output', ''' -Using the default text output, the message format is : - - MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE - -There are 5 kind of message types : - * (C) convention, for programming standard violation - * (R) refactor, for bad code smell - * (W) warning, for python specific problems - * (E) error, for probable bugs in the code - * (F) fatal, if an error occurred which prevented pylint from doing further -processing. - ''', level=1) - linter.add_help_section('Output status code', ''' -Pylint should leave with following status code: - * 0 if everything went fine - * 1 if a fatal message was issued - * 2 if an error message was issued - * 4 if a warning message was issued - * 8 if a refactor message was issued - * 16 if a convention message was issued - * 32 on usage error - -status 1 to 16 will be bit-ORed so you can know which different categories has -been issued by analysing pylint output status code - ''', level=1) - # read configuration - linter.disable('W0704') - linter.read_config_file() - # is there some additional plugins in the file configuration, in - config_parser = linter.cfgfile_parser - if config_parser.has_option('MASTER', 'load-plugins'): - plugins = splitstrip(config_parser.get('MASTER', 'load-plugins')) - linter.load_plugin_modules(plugins) - # now we can load file config and command line, plugins (which can - # provide options) have been registered - linter.load_config_file() - if reporter: - # if a custom reporter is provided as argument, it may be overridden - # by file parameters, so re-set it here, but before command line - # parsing so it's still overrideable by command line option - linter.set_reporter(reporter) - try: - args = linter.load_command_line_configuration(args) - except SystemExit, exc: - if exc.code == 2: # bad options - exc.code = 32 - raise - if not args: - print linter.help() - sys.exit(32) - # insert current working directory to the python path to have a correct - # behaviour - sys.path.insert(0, os.getcwd()) - if self.linter.config.profile: - print >> sys.stderr, '** profiled run' - import cProfile, pstats - cProfile.runctx('linter.check(%r)' % args, globals(), locals(), 'stones.prof' ) - data = pstats.Stats('stones.prof') - data.strip_dirs() - data.sort_stats('time', 'calls') - data.print_stats(30) - else: - linter.check(args) - sys.path.pop(0) - if exit: - sys.exit(self.linter.msg_status) - - def cb_set_rcfile(self, name, value): - """callback for option preprocessing (i.e. before optik parsing)""" - self._rcfile = value - - def cb_add_plugins(self, name, value): - """callback for option preprocessing (i.e. before optik parsing)""" - self._plugins.extend(splitstrip(value)) - - def cb_error_mode(self, *args, **kwargs): - """error mode: - * disable all but error messages - * disable the 'miscellaneous' checker which can be safely deactivated in - debug - * disable reports - * do not save execution information - """ - self.linter.error_mode() - - def cb_generate_config(self, *args, **kwargs): - """optik callback for sample config file generation""" - self.linter.generate_config(skipsections=('COMMANDS',)) - sys.exit(0) - - def cb_generate_manpage(self, *args, **kwargs): - """optik callback for sample config file generation""" - from pylint import __pkginfo__ - self.linter.generate_manpage(__pkginfo__) - sys.exit(0) - - def cb_help_message(self, option, optname, value, parser): - """optik callback for printing some help about a particular message""" - self.linter.help_message(splitstrip(value)) - sys.exit(0) - - def cb_full_documentation(self, option, optname, value, parser): - """optik callback for printing full documentation""" - self.linter.print_full_documentation() - sys.exit(0) - - def cb_list_messages(self, option, optname, value, parser): # FIXME - """optik callback for printing available messages""" - self.linter.list_messages() - sys.exit(0) - -def cb_init_hook(option, optname, value, parser): - """exec arbitrary code to set sys.path for instance""" - exec value - - -if __name__ == '__main__': - Run(sys.argv[1:]) diff --git a/pylibs/pylint/pyreverse/__init__.py b/pylibs/pylint/pyreverse/__init__.py deleted file mode 100644 index 8c32ad96..00000000 --- a/pylibs/pylint/pyreverse/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -pyreverse.extensions -""" - -__revision__ = "$Id $" diff --git a/pylibs/pylint/pyreverse/main.py b/pylibs/pylint/pyreverse/main.py deleted file mode 100644 index da80bd63..00000000 --- a/pylibs/pylint/pyreverse/main.py +++ /dev/null @@ -1,129 +0,0 @@ -# # Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" - %prog [options] - - create UML diagrams for classes and modules in -""" - -import sys, os -from logilab.common.configuration import ConfigurationMixIn -from logilab.astng.manager import ASTNGManager -from logilab.astng.inspector import Linker - -from pylint.pyreverse.diadefslib import DiadefsHandler -from pylint.pyreverse import writer -from pylint.pyreverse.utils import insert_default_options - -OPTIONS = ( -("filter-mode", - dict(short='f', default='PUB_ONLY', dest='mode', type='string', - action='store', metavar='', - help="""filter attributes and functions according to - . Correct modes are : - 'PUB_ONLY' filter all non public attributes - [DEFAULT], equivalent to PRIVATE+SPECIAL_A - 'ALL' no filter - 'SPECIAL' filter Python special functions - except constructor - 'OTHER' filter protected and private - attributes""")), - -("class", -dict(short='c', action="append", metavar="", dest="classes", default=[], - help="create a class diagram with all classes related to ;\ - this uses by default the options -ASmy")), - -("show-ancestors", -dict(short="a", action="store", metavar='', type='int', - help='show generations of ancestor classes not in ')), -("all-ancestors", -dict(short="A", default=None, - help="show all ancestors off all classes in ") ), -("show-associated", -dict(short='s', action="store", metavar='', type='int', - help='show levels of associated classes not in ')), -("all-associated", -dict(short='S', default=None, - help='show recursively all associated off all associated classes')), - -("show-builtin", -dict(short="b", action="store_true", default=False, - help='include builtin objects in representation of classes')), - -("module-names", -dict(short="m", default=None, type='yn', metavar='[yn]', - help='include module name in representation of classes')), -# TODO : generate dependencies like in pylint -#("package-dependencies", -#dict(short="M", action="store", metavar='', type='int', - #help='show module dependencies beyond modules in \ -# (for the package diagram)')), -("only-classnames", -dict(short='k', action="store_true", default=False, - help="don't show attributes and methods in the class boxes; \ -this disables -f values")), -("output", dict(short="o", dest="output_format", action="store", - default="dot", metavar="", - help="create a *. output file if format available.")), -) -# FIXME : quiet mode -#( ('quiet', - #dict(help='run quietly', action='store_true', short='q')), ) - -class PyreverseCommand(ConfigurationMixIn): - """base class providing common behaviour for pyreverse commands""" - - options = OPTIONS - - def __init__(self, args): - ConfigurationMixIn.__init__(self, usage=__doc__) - insert_default_options() - self.manager = ASTNGManager() - self.register_options_provider(self.manager) - args = self.load_command_line_configuration() - self.run(args) - - def run(self, args): - """checking arguments and run project""" - if not args: - print self.help() - return - # insert current working directory to the python path to recognize - # dependencies to local modules even if cwd is not in the PYTHONPATH - sys.path.insert(0, os.getcwd()) - try: - project = self.manager.project_from_files(args) - linker = Linker(project, tag=True) - handler = DiadefsHandler(self.config) - diadefs = handler.get_diadefs(project, linker) - finally: - sys.path.pop(0) - - if self.config.output_format == "vcg": - writer.VCGWriter(self.config).write(diadefs) - else: - writer.DotWriter(self.config).write(diadefs) - - -class Run: - """pyreverse main class""" - def __init__(self, args): - """run pyreverse""" - PyreverseCommand(args) - -if __name__ == '__main__': - Run(sys.argv[1:]) diff --git a/pylibs/pylint/pyreverse/utils.py b/pylibs/pylint/pyreverse/utils.py deleted file mode 100644 index ea8b67cc..00000000 --- a/pylibs/pylint/pyreverse/utils.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright (c) 2002-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -""" -generic classes/functions for pyreverse core/extensions -""" - -import sys -import re -import os - -########### pyreverse option utils ############################## - - -RCFILE = '.pyreverserc' - -def get_default_options(): - """ - Read config file and return list of options - """ - options = [] - home = os.environ.get('HOME', '') - if home: - rcfile = os.path.join(home, RCFILE) - try: - options = open(rcfile).read().split() - except IOError: - pass # ignore if no config file found - return options - -def insert_default_options(): - """insert default options to sys.argv - """ - options = get_default_options() - options.reverse() - for arg in options: - sys.argv.insert(1, arg) - - - -# astng utilities ########################################################### - -SPECIAL = re.compile('^__[A-Za-z0-9]+[A-Za-z0-9_]*__$') -PRIVATE = re.compile('^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$') -PROTECTED = re.compile('^_[_A-Za-z0-9]*$') - -def get_visibility(name): - """return the visibility from a name: public, protected, private or special - """ - if SPECIAL.match(name): - visibility = 'special' - elif PRIVATE.match(name): - visibility = 'private' - elif PROTECTED.match(name): - visibility = 'protected' - - else: - visibility = 'public' - return visibility - -ABSTRACT = re.compile('^.*Abstract.*') -FINAL = re.compile('^[A-Z_]*$') - -def is_abstract(node): - """return true if the given class node correspond to an abstract class - definition - """ - return ABSTRACT.match(node.name) - -def is_final(node): - """return true if the given class/function node correspond to final - definition - """ - return FINAL.match(node.name) - -def is_interface(node): - # bw compat - return node.type == 'interface' - -def is_exception(node): - # bw compat - return node.type == 'exception' - - -# Helpers ##################################################################### - -_CONSTRUCTOR = 1 -_SPECIAL = 2 -_PROTECTED = 4 -_PRIVATE = 8 -MODES = { - 'ALL' : 0, - 'PUB_ONLY' : _SPECIAL + _PROTECTED + _PRIVATE, - 'SPECIAL' : _SPECIAL, - 'OTHER' : _PROTECTED + _PRIVATE, -} -VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED, \ - 'private': _PRIVATE, 'public': 0 } - -class FilterMixIn: - """filter nodes according to a mode and nodes' visibility - """ - def __init__(self, mode): - "init filter modes" - __mode = 0 - for nummod in mode.split('+'): - try: - __mode += MODES[nummod] - except KeyError, ex: - print >> sys.stderr, 'Unknown filter mode %s' % ex - self.__mode = __mode - - - def show_attr(self, node): - """return true if the node should be treated - """ - visibility = get_visibility(getattr(node, 'name', node)) - return not (self.__mode & VIS_MOD[visibility] ) - diff --git a/pylibs/pylint/reporters/__init__.py b/pylibs/pylint/reporters/__init__.py deleted file mode 100644 index 0582a6f0..00000000 --- a/pylibs/pylint/reporters/__init__.py +++ /dev/null @@ -1,79 +0,0 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""utilities methods and classes for reporters - -Copyright (c) 2000-2003 LOGILAB S.A. (Paris, FRANCE). -http://www.logilab.fr/ -- mailto:contact@logilab.fr -""" - -import sys, locale - -CMPS = ['=', '-', '+'] - -def diff_string(old, new): - """given a old and new int value, return a string representing the - difference - """ - diff = abs(old - new) - diff_str = "%s%s" % (CMPS[cmp(old, new)], diff and ('%.2f' % diff) or '') - return diff_str - - -class EmptyReport(Exception): - """raised when a report is empty and so should not be displayed""" - -class BaseReporter: - """base class for reporters""" - - extension = '' - - def __init__(self, output=None): - self.linter = None - self.include_ids = None - self.section = 0 - self.out = None - self.out_encoding = None - self.set_output(output) - - def set_output(self, output=None): - """set output stream""" - self.out = output or sys.stdout - # py3k streams handle their encoding : - if sys.version_info >= (3, 0): - self.encode = lambda x: x - return - - def encode(string): - if not isinstance(string, unicode): - return string - encoding = (getattr(self.out, 'encoding', None) or - locale.getdefaultlocale()[1] or - sys.getdefaultencoding()) - return string.encode(encoding) - self.encode = encode - - def writeln(self, string=''): - """write a line in the output buffer""" - print >> self.out, self.encode(string) - - def display_results(self, layout): - """display results encapsulated in the layout tree""" - self.section = 0 - if self.include_ids and hasattr(layout, 'report_id'): - layout.children[0].children[0].data += ' (%s)' % layout.report_id - self._display(layout) - - def _display(self, layout): - """display the layout""" - raise NotImplementedError() - diff --git a/pylibs/pylint/reporters/html.py b/pylibs/pylint/reporters/html.py deleted file mode 100644 index 56efcd6d..00000000 --- a/pylibs/pylint/reporters/html.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) 2003-2006 Sylvain Thenault (thenault@gmail.com). -# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE). -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""HTML reporter""" - -import sys -from cgi import escape - -from logilab.common.ureports import HTMLWriter, Section, Table - -from pylint.interfaces import IReporter -from pylint.reporters import BaseReporter - - -class HTMLReporter(BaseReporter): - """report messages and layouts in HTML""" - - __implements__ = IReporter - extension = 'html' - - def __init__(self, output=sys.stdout): - BaseReporter.__init__(self, output) - self.msgs = [] - - def add_message(self, msg_id, location, msg): - """manage message of different type and in the context of path""" - module, obj, line, col_offset = location[1:] - if self.include_ids: - sigle = msg_id - else: - sigle = msg_id[0] - self.msgs += [sigle, module, obj, str(line), str(col_offset), escape(msg)] - - def set_output(self, output=None): - """set output stream - - messages buffered for old output is processed first""" - if self.out and self.msgs: - self._display(Section()) - BaseReporter.set_output(self, output) - - def _display(self, layout): - """launch layouts display - - overridden from BaseReporter to add insert the messages section - (in add_message, message is not displayed, just collected so it - can be displayed in an html table) - """ - if self.msgs: - # add stored messages to the layout - msgs = ['type', 'module', 'object', 'line', 'col_offset', 'message'] - msgs += self.msgs - sect = Section('Messages') - layout.append(sect) - sect.append(Table(cols=6, children=msgs, rheaders=1)) - self.msgs = [] - HTMLWriter().format(layout, self.out) - diff --git a/pylibs/pylint/reporters/text.py b/pylibs/pylint/reporters/text.py deleted file mode 100644 index 269a519f..00000000 --- a/pylibs/pylint/reporters/text.py +++ /dev/null @@ -1,155 +0,0 @@ -# Copyright (c) 2003-2007 Sylvain Thenault (thenault@gmail.com). -# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE). -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""Plain text reporters: - -:text: the default one grouping messages by module -:parseable: - standard parseable output with full module path on each message (for - editor integration) -:colorized: an ANSI colorized text reporter - -""" - -import os -import sys - -from logilab.common.ureports import TextWriter -from logilab.common.textutils import colorize_ansi - -from pylint.interfaces import IReporter -from pylint.reporters import BaseReporter - -TITLE_UNDERLINES = ['', '=', '-', '.'] - - -class TextReporter(BaseReporter): - """reports messages and layouts in plain text - """ - - __implements__ = IReporter - extension = 'txt' - - def __init__(self, output=sys.stdout): - BaseReporter.__init__(self, output) - self._modules = {} - - def add_message(self, msg_id, location, msg): - """manage message of different type and in the context of path""" - module, obj, line, col_offset = location[1:] - if module not in self._modules: - if module: - self.writeln('************* Module %s' % module) - self._modules[module] = 1 - else: - self.writeln('************* %s' % module) - if obj: - obj = ':%s' % obj - if self.include_ids: - sigle = msg_id - else: - sigle = msg_id[0] - self.writeln('%s:%3s,%s%s: %s' % (sigle, line, col_offset, obj, msg)) - - def _display(self, layout): - """launch layouts display""" - print >> self.out - TextWriter().format(layout, self.out) - - -class ParseableTextReporter(TextReporter): - """a reporter very similar to TextReporter, but display messages in a form - recognized by most text editors : - - :: - """ - line_format = '%(path)s:%(line)s: [%(sigle)s%(obj)s] %(msg)s' - - def __init__(self, output=sys.stdout, relative=True): - TextReporter.__init__(self, output) - if relative: - self._prefix = os.getcwd() + os.sep - else: - self._prefix = '' - - def add_message(self, msg_id, location, msg): - """manage message of different type and in the context of path""" - path, _, obj, line, _ = location - if obj: - obj = ', %s' % obj - if self.include_ids: - sigle = msg_id - else: - sigle = msg_id[0] - if self._prefix: - path = path.replace(self._prefix, '') - self.writeln(self.line_format % locals()) - -class VSTextReporter(ParseableTextReporter): - """Visual studio text reporter""" - line_format = '%(path)s(%(line)s): [%(sigle)s%(obj)s] %(msg)s' - -class ColorizedTextReporter(TextReporter): - """Simple TextReporter that colorizes text output""" - - COLOR_MAPPING = { - "I" : ("green", None), - 'C' : (None, "bold"), - 'R' : ("magenta", "bold, italic"), - 'W' : ("blue", None), - 'E' : ("red", "bold"), - 'F' : ("red", "bold, underline"), - 'S' : ("yellow", "inverse"), # S stands for module Separator - } - - def __init__(self, output=sys.stdout, color_mapping = None): - TextReporter.__init__(self, output) - self.color_mapping = color_mapping or \ - dict(ColorizedTextReporter.COLOR_MAPPING) - - - def _get_decoration(self, msg_id): - """Returns the tuple color, style associated with msg_id as defined - in self.color_mapping - """ - try: - return self.color_mapping[msg_id[0]] - except KeyError: - return None, None - - def add_message(self, msg_id, location, msg): - """manage message of different types, and colorize output - using ansi escape codes - """ - module, obj, line, _ = location[1:] - if module not in self._modules: - color, style = self._get_decoration('S') - if module: - modsep = colorize_ansi('************* Module %s' % module, - color, style) - else: - modsep = colorize_ansi('************* %s' % module, - color, style) - self.writeln(modsep) - self._modules[module] = 1 - if obj: - obj = ':%s' % obj - if self.include_ids: - sigle = msg_id - else: - sigle = msg_id[0] - color, style = self._get_decoration(sigle) - msg = colorize_ansi(msg, color, style) - sigle = colorize_ansi(sigle, color, style) - self.writeln('%s:%3s%s: %s' % (sigle, line, obj, msg)) diff --git a/pylibs/pylint/utils.py b/pylibs/pylint/utils.py deleted file mode 100644 index 9c8e8f4e..00000000 --- a/pylibs/pylint/utils.py +++ /dev/null @@ -1,531 +0,0 @@ -# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com). -# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). -# http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -"""some various utilities and helper classes, most of them used in the -main pylint class -""" - -import sys -from os import linesep -from os.path import dirname, basename, splitext, exists, isdir, join, normpath - -from logilab.common.modutils import modpath_from_file, get_module_files, \ - file_from_modpath -from logilab.common.textutils import normalize_text -from logilab.common.configuration import rest_format_section -from logilab.common.ureports import Section - -from logilab.astng import nodes, Module - -from pylint.checkers import EmptyReport - - -class UnknownMessage(Exception): - """raised when a unregistered message id is encountered""" - - -MSG_TYPES = { - 'I' : 'info', - 'C' : 'convention', - 'R' : 'refactor', - 'W' : 'warning', - 'E' : 'error', - 'F' : 'fatal' - } -MSG_TYPES_LONG = dict([(v, k) for k, v in MSG_TYPES.iteritems()]) - -MSG_TYPES_STATUS = { - 'I' : 0, - 'C' : 16, - 'R' : 8, - 'W' : 4, - 'E' : 2, - 'F' : 1 - } - -_MSG_ORDER = 'EWRCIF' - -def sort_msgs(msgids): - """sort message identifiers according to their category first""" - msgs = {} - for msg in msgids: - msgs.setdefault(msg[0], []).append(msg) - result = [] - for m_id in _MSG_ORDER: - if m_id in msgs: - result.extend( sorted(msgs[m_id]) ) - return result - -def get_module_and_frameid(node): - """return the module name and the frame id in the module""" - frame = node.frame() - module, obj = '', [] - while frame: - if isinstance(frame, Module): - module = frame.name - else: - obj.append(getattr(frame, 'name', '')) - try: - frame = frame.parent.frame() - except AttributeError: - frame = None - obj.reverse() - return module, '.'.join(obj) - -def category_id(id): - id = id.upper() - if id in MSG_TYPES: - return id - return MSG_TYPES_LONG.get(id) - - -class Message: - def __init__(self, checker, msgid, msg, descr): - assert len(msgid) == 5, 'Invalid message id %s' % msgid - assert msgid[0] in MSG_TYPES, \ - 'Bad message type %s in %r' % (msgid[0], msgid) - self.msgid = msgid - self.msg = msg - self.descr = descr - self.checker = checker - -class MessagesHandlerMixIn: - """a mix-in class containing all the messages related methods for the main - lint class - """ - - def __init__(self): - # dictionary of registered messages - self._messages = {} - self._msgs_state = {} - self._module_msgs_state = {} # None - self._msgs_by_category = {} - self.msg_status = 0 - - def register_messages(self, checker): - """register a dictionary of messages - - Keys are message ids, values are a 2-uple with the message type and the - message itself - - message ids should be a string of len 4, where the two first characters - are the checker id and the two last the message id in this checker - """ - msgs_dict = checker.msgs - chkid = None - for msgid, (msg, msgdescr) in msgs_dict.items(): - # avoid duplicate / malformed ids - assert msgid not in self._messages, \ - 'Message id %r is already defined' % msgid - assert chkid is None or chkid == msgid[1:3], \ - 'Inconsistent checker part in message id %r' % msgid - chkid = msgid[1:3] - self._messages[msgid] = Message(checker, msgid, msg, msgdescr) - self._msgs_by_category.setdefault(msgid[0], []).append(msgid) - - def get_message_help(self, msgid, checkerref=False): - """return the help string for the given message id""" - msg = self.check_message_id(msgid) - desc = normalize_text(' '.join(msg.descr.split()), indent=' ') - if checkerref: - desc += ' This message belongs to the %s checker.' % \ - msg.checker.name - title = msg.msg - if title != '%s': - title = title.splitlines()[0] - return ':%s: *%s*\n%s' % (msg.msgid, title, desc) - return ':%s:\n%s' % (msg.msgid, desc) - - def disable(self, msgid, scope='package', line=None): - """don't output message of the given id""" - assert scope in ('package', 'module') - # msgid is a category? - catid = category_id(msgid) - if catid is not None: - for msgid in self._msgs_by_category.get(catid): - self.disable(msgid, scope, line) - return - # msgid is a checker name? - if msgid.lower() in self._checkers: - for checker in self._checkers[msgid.lower()]: - for msgid in checker.msgs: - self.disable(msgid, scope, line) - return - # msgid is report id? - if msgid.lower().startswith('rp'): - self.disable_report(msgid) - return - # msgid is a msgid. - msg = self.check_message_id(msgid) - if scope == 'module': - assert line > 0 - try: - self._module_msgs_state[msg.msgid][line] = False - except KeyError: - self._module_msgs_state[msg.msgid] = {line: False} - if msgid != 'I0011': - self.add_message('I0011', line=line, args=msg.msgid) - - else: - msgs = self._msgs_state - msgs[msg.msgid] = False - # sync configuration object - self.config.disable_msg = [mid for mid, val in msgs.items() - if not val] - - def enable(self, msgid, scope='package', line=None): - """reenable message of the given id""" - assert scope in ('package', 'module') - catid = category_id(msgid) - # msgid is a category? - if catid is not None: - for msgid in self._msgs_by_category.get(catid): - self.enable(msgid, scope, line) - return - # msgid is a checker name? - if msgid.lower() in self._checkers: - for checker in self._checkers[msgid.lower()]: - for msgid in checker.msgs: - self.enable(msgid, scope, line) - return - # msgid is report id? - if msgid.lower().startswith('rp'): - self.enable_report(msgid) - return - # msgid is a msgid. - msg = self.check_message_id(msgid) - if scope == 'module': - assert line > 0 - try: - self._module_msgs_state[msg.msgid][line] = True - except KeyError: - self._module_msgs_state[msg.msgid] = {line: True} - self.add_message('I0012', line=line, args=msg.msgid) - else: - msgs = self._msgs_state - msgs[msg.msgid] = True - # sync configuration object - self.config.enable = [mid for mid, val in msgs.items() if val] - - def check_message_id(self, msgid): - """raise UnknownMessage if the message id is not defined""" - msgid = msgid.upper() - try: - return self._messages[msgid] - except KeyError: - raise UnknownMessage('No such message id %s' % msgid) - - def is_message_enabled(self, msgid, line=None): - """return true if the message associated to the given message id is - enabled - """ - if line is None: - return self._msgs_state.get(msgid, True) - try: - return self._module_msgs_state[msgid][line] - except (KeyError, TypeError): - return self._msgs_state.get(msgid, True) - - def add_message(self, msgid, line=None, node=None, args=None): - """add the message corresponding to the given id. - - If provided, msg is expanded using args - - astng checkers should provide the node argument, raw checkers should - provide the line argument. - """ - if line is None and node is not None: - line = node.fromlineno - if hasattr(node, 'col_offset'): - col_offset = node.col_offset # XXX measured in bytes for utf-8, divide by two for chars? - else: - col_offset = None - # should this message be displayed - if not self.is_message_enabled(msgid, line): - return - # update stats - msg_cat = MSG_TYPES[msgid[0]] - self.msg_status |= MSG_TYPES_STATUS[msgid[0]] - self.stats[msg_cat] += 1 - self.stats['by_module'][self.current_name][msg_cat] += 1 - try: - self.stats['by_msg'][msgid] += 1 - except KeyError: - self.stats['by_msg'][msgid] = 1 - msg = self._messages[msgid].msg - # expand message ? - if args: - msg %= args - # get module and object - if node is None: - module, obj = self.current_name, '' - path = self.current_file - else: - module, obj = get_module_and_frameid(node) - path = node.root().file - # add the message - self.reporter.add_message(msgid, (path, module, obj, line or 1, col_offset or 0), msg) - - def help_message(self, msgids): - """display help messages for the given message identifiers""" - for msgid in msgids: - try: - print self.get_message_help(msgid, True) - print - except UnknownMessage, ex: - print ex - print - continue - - def print_full_documentation(self): - """output a full documentation in ReST format""" - by_checker = {} - for checker in self.get_checkers(): - if checker.name == 'master': - prefix = 'Main ' - print "Options" - print '-------\n' - if checker.options: - for section, options in checker.options_by_section(): - if section is None: - title = 'General options' - else: - title = '%s options' % section.capitalize() - print title - print '~' * len(title) - rest_format_section(sys.stdout, None, options) - print - else: - try: - by_checker[checker.name][0] += checker.options_and_values() - by_checker[checker.name][1].update(checker.msgs) - by_checker[checker.name][2] += checker.reports - except KeyError: - by_checker[checker.name] = [list(checker.options_and_values()), - dict(checker.msgs), - list(checker.reports)] - for checker, (options, msgs, reports) in by_checker.items(): - prefix = '' - title = '%s checker' % checker - print title - print '-' * len(title) - print - if options: - title = 'Options' - print title - print '~' * len(title) - rest_format_section(sys.stdout, None, options) - print - if msgs: - title = ('%smessages' % prefix).capitalize() - print title - print '~' * len(title) - for msgid in sort_msgs(msgs.keys()): - print self.get_message_help(msgid, False) - print - if reports: - title = ('%sreports' % prefix).capitalize() - print title - print '~' * len(title) - for report in reports: - print ':%s: %s' % report[:2] - print - print - - def list_messages(self): - """output full messages list documentation in ReST format""" - msgids = [] - for checker in self.get_checkers(): - for msgid in checker.msgs.keys(): - msgids.append(msgid) - msgids.sort() - for msgid in msgids: - print self.get_message_help(msgid, False) - print - - -class ReportsHandlerMixIn: - """a mix-in class containing all the reports and stats manipulation - related methods for the main lint class - """ - def __init__(self): - self._reports = {} - self._reports_state = {} - - def register_report(self, reportid, r_title, r_cb, checker): - """register a report - - reportid is the unique identifier for the report - r_title the report's title - r_cb the method to call to make the report - checker is the checker defining the report - """ - reportid = reportid.upper() - self._reports.setdefault(checker, []).append( (reportid, r_title, r_cb) ) - - def enable_report(self, reportid): - """disable the report of the given id""" - reportid = reportid.upper() - self._reports_state[reportid] = True - - def disable_report(self, reportid): - """disable the report of the given id""" - reportid = reportid.upper() - self._reports_state[reportid] = False - - def report_is_enabled(self, reportid): - """return true if the report associated to the given identifier is - enabled - """ - return self._reports_state.get(reportid, True) - - def make_reports(self, stats, old_stats): - """render registered reports""" - if self.config.files_output: - filename = 'pylint_global.' + self.reporter.extension - self.reporter.set_output(open(filename, 'w')) - sect = Section('Report', - '%s statements analysed.'% (self.stats['statement'])) - for checker in self._reports: - for reportid, r_title, r_cb in self._reports[checker]: - if not self.report_is_enabled(reportid): - continue - report_sect = Section(r_title) - try: - r_cb(report_sect, stats, old_stats) - except EmptyReport: - continue - report_sect.report_id = reportid - sect.append(report_sect) - self.reporter.display_results(sect) - - def add_stats(self, **kwargs): - """add some stats entries to the statistic dictionary - raise an AssertionError if there is a key conflict - """ - for key, value in kwargs.items(): - if key[-1] == '_': - key = key[:-1] - assert key not in self.stats - self.stats[key] = value - return self.stats - - -def expand_modules(files_or_modules, black_list): - """take a list of files/modules/packages and return the list of tuple - (file, module name) which have to be actually checked - """ - result = [] - errors = [] - for something in files_or_modules: - if exists(something): - # this is a file or a directory - try: - modname = '.'.join(modpath_from_file(something)) - except ImportError: - modname = splitext(basename(something))[0] - if isdir(something): - filepath = join(something, '__init__.py') - else: - filepath = something - else: - # suppose it's a module or package - modname = something - try: - filepath = file_from_modpath(modname.split('.')) - if filepath is None: - errors.append( {'key' : 'F0003', 'mod': modname} ) - continue - except (ImportError, SyntaxError), ex: - # FIXME p3k : the SyntaxError is a Python bug and should be - # removed as soon as possible http://bugs.python.org/issue10588 - errors.append( {'key': 'F0001', 'mod': modname, 'ex': ex} ) - continue - filepath = normpath(filepath) - result.append( {'path': filepath, 'name': modname, - 'basepath': filepath, 'basename': modname} ) - if not (modname.endswith('.__init__') or modname == '__init__') \ - and '__init__.py' in filepath: - for subfilepath in get_module_files(dirname(filepath), black_list): - if filepath == subfilepath: - continue - submodname = '.'.join(modpath_from_file(subfilepath)) - result.append( {'path': subfilepath, 'name': submodname, - 'basepath': filepath, 'basename': modname} ) - return result, errors - - -class PyLintASTWalker(object): - - def __init__(self, linter): - # callbacks per node types - self.nbstatements = 1 - self.visit_events = {} - self.leave_events = {} - self.linter = linter - - def add_checker(self, checker): - """walk to the checker's dir and collect visit and leave methods""" - # XXX : should be possible to merge needed_checkers and add_checker - vcids = set() - lcids = set() - visits = self.visit_events - leaves = self.leave_events - msgs = self.linter._msgs_state - for member in dir(checker): - cid = member[6:] - if cid == 'default': - continue - if member.startswith('visit_'): - v_meth = getattr(checker, member) - # don't use visit_methods with no activated message: - if hasattr(v_meth, 'checks_msgs'): - if not any(msgs.get(m, True) for m in v_meth.checks_msgs): - continue - visits.setdefault(cid, []).append(v_meth) - vcids.add(cid) - elif member.startswith('leave_'): - l_meth = getattr(checker, member) - # don't use leave_methods with no activated message: - if hasattr(l_meth, 'checks_msgs'): - if not any(msgs.get(m, True) for m in l_meth.checks_msgs): - continue - leaves.setdefault(cid, []).append(l_meth) - lcids.add(cid) - visit_default = getattr(checker, 'visit_default', None) - if visit_default: - for cls in nodes.ALL_NODE_CLASSES: - cid = cls.__name__.lower() - if cid not in vcids: - visits.setdefault(cid, []).append(visit_default) - # for now we have no "leave_default" method in Pylint - - def walk(self, astng): - """call visit events of astng checkers for the given node, recurse on - its children, then leave events. - """ - cid = astng.__class__.__name__.lower() - if astng.is_statement: - self.nbstatements += 1 - # generate events for this node on each checker - for cb in self.visit_events.get(cid, ()): - cb(astng) - # recurse on children - for child in astng.get_children(): - self.walk(child) - for cb in self.leave_events.get(cid, ()): - cb(astng) - diff --git a/pylibs/ropemode/__init__.py b/pylibs/ropemode/__init__.py deleted file mode 100644 index c1b3cb56..00000000 --- a/pylibs/ropemode/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -"""ropemode, a helper for using rope refactoring library in IDEs""" - -INFO = __doc__ -VERSION = '0.1-rc2' -COPYRIGHT = """\ -Copyright (C) 2007-2008 Ali Gholami Rudi - -This program is free software; you can redistribute it and/or modify it -under the terms of GNU General Public License as published by the -Free Software Foundation; either version 2 of the license, or (at your -opinion) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details.""" diff --git a/pylibs/ropemode/decorators.py b/pylibs/ropemode/decorators.py deleted file mode 100644 index 68eab4f1..00000000 --- a/pylibs/ropemode/decorators.py +++ /dev/null @@ -1,102 +0,0 @@ -import traceback - -from rope.base import exceptions - - -class Logger(object): - - message = None - only_short = False - - def __call__(self, message, short=None): - if short is None or not self.only_short: - self._show(message) - if short is not None: - self._show(short) - - def _show(self, message): - if message is None: - print message - else: - self.message(message) - - -logger = Logger() - - -def lisphook(func): - def newfunc(*args, **kwds): - try: - func(*args, **kwds) - except Exception, e: - trace = str(traceback.format_exc()) - short = 'Ignored an exception in ropemode hook: %s' % \ - _exception_message(e) - logger(trace, short) - newfunc.lisp = None - newfunc.__name__ = func.__name__ - newfunc.__doc__ = func.__doc__ - return newfunc - - -def lispfunction(func): - func.lisp = None - return func - - -input_exceptions = (exceptions.RefactoringError, - exceptions.ModuleSyntaxError, - exceptions.BadIdentifierError) - - -def _exception_handler(func): - def newfunc(*args, **kwds): - try: - return func(*args, **kwds) - except exceptions.RopeError, e: - short = None - if isinstance(e, input_exceptions): - short = _exception_message(e) - logger(str(traceback.format_exc()), short) - newfunc.__name__ = func.__name__ - newfunc.__doc__ = func.__doc__ - return newfunc - - -def _exception_message(e): - return '%s: %s' % (e.__class__.__name__, str(e)) - - -def rope_hook(hook): - def decorator(func): - func = lisphook(func) - func.name = func.__name__ - func.kind = 'hook' - func.hook = hook - return func - return decorator - - -def local_command(key=None, prefix=False, shortcut=None, name=None): - def decorator(func, name=name): - func = _exception_handler(func) - func.kind = 'local' - func.prefix = prefix - func.local_key = key - func.shortcut_key = shortcut - if name is None: - name = func.__name__ - func.name = name - return func - return decorator - - -def global_command(key=None, prefix=False): - def decorator(func): - func = _exception_handler(func) - func.kind = 'global' - func.prefix = prefix - func.global_key = key - func.name = func.__name__ - return func - return decorator diff --git a/pylibs/ropemode/dialog.py b/pylibs/ropemode/dialog.py deleted file mode 100644 index 5dac796c..00000000 --- a/pylibs/ropemode/dialog.py +++ /dev/null @@ -1,105 +0,0 @@ -class Data(object): - - def __init__(self, prompt=None, default=None, values=None, - kind=None, decode=None): - self.prompt = prompt - self.default = default - self.values = values - self.kind = kind - self._decode = decode - - def decode(self, value): - if self._decode: - return self._decode(value) - return value - - -class Boolean(Data): - - def __init__(self, prompt=None, default=False): - Data.__init__(self, prompt, self._encode(default), - [self._encode(True), self._encode(False)]) - - @staticmethod - def _encode(value): - if value: - return 'yes' - return 'no' - - def decode(self, value): - if value.lower() in ('yes', '1', 'true'): - return True - return False - - -def show_dialog(askdata, actions, confs=None, optionals=None, initial_asking=True): - result = {} - - if confs is None: - confs = dict() - - if optionals is None: - optionals = dict() - - if initial_asking: - for name, conf in confs.items(): - result[name] = askdata(conf) - - actions.append('batchset') - names = list(actions) - names.extend(optionals.keys()) - names.extend(confs.keys()) - base_question = Data('Choose what to do: ', - default=actions[0], values=names) - batchset_question = Data('Batch sets: ') - while True: - response = askdata(base_question) - if response == '': - response = base_question.default - elif response == 'batchset': - sets = askdata(batchset_question) - for key, value in _parse_batchset(sets).items(): - if key.endswith(':'): - key = key[:-1] - if key in names: - conf = confs.get(key, optionals.get(key)) - result[key] = value - elif response in actions: - break - else: - if response in confs: - conf = confs[response] - else: - conf = optionals[response] - oldvalue = result.get(response, None) - result[response] = askdata(conf, starting=oldvalue) - decoded = {} - all_confs = dict(confs) - all_confs.update(optionals) - for key in all_confs: - conf = all_confs.get(key) - if key in result: - decoded[key] = conf.decode(result[key]) - else: - decoded[key] = conf.decode(conf.default) - return response, decoded - - -def _parse_batchset(sets): - result = [] - multiline = False - for line in sets.splitlines(True): - if line[0].isspace(): - if multiline: - result[-1][1] += line[1:] - else: - if not line.strip(): - continue - multiline = False - tokens = line.split(None, 1) - if len(tokens) > 1: - result.append([tokens[0], tokens[1].rstrip('\r\n')]) - else: - multiline = True - result.append([tokens[0], '']) - return dict(result) diff --git a/pylibs/ropemode/environment.py b/pylibs/ropemode/environment.py deleted file mode 100644 index ea8fe32f..00000000 --- a/pylibs/ropemode/environment.py +++ /dev/null @@ -1,104 +0,0 @@ -class Environment(object): - - def ask(self, prompt, default=None, starting=None): - pass - - def ask_values(self, prompt, values, default=None, starting=None): - pass - - def ask_directory(self, prompt, default=None, starting=None): - pass - - def ask_completion(self, prompt, values, starting=None): - pass - - def message(self, message): - pass - - def yes_or_no(self, prompt): - pass - - def y_or_n(self, prompt): - pass - - def get(self, name, default=None): - pass - - def get_offset(self): - pass - - def get_text(self): - pass - - def get_region(self): - pass - - def filename(self): - pass - - def is_modified(self): - pass - - def goto_line(self, lineno): - pass - - def insert_line(self, line, lineno): - pass - - def insert(self, text): - pass - - def delete(self, start, end): - pass - - def filenames(self): - pass - - def save_files(self, filenames): - pass - - def reload_files(self, filenames, moves=None): - pass - - def find_file(self, filename, readonly=False, other=False): - pass - - def create_progress(self, name): - pass - - def current_word(self): - pass - - def push_mark(self): - pass - - def pop_mark(self): - pass - - def prefix_value(self, prefix): - pass - - def show_occurrences(self, locations): - pass - - def show_doc(self, docs, altview=False): - pass - - def preview_changes(self, diffs): - pass - - def local_command(self, name, callback, key=None, prefix=False): - pass - - def global_command(self, name, callback, key=None, prefix=False): - pass - - def add_hook(self, name, callback, hook): - pass - - @staticmethod - def _completion_text(proposal): - return proposal.name.partition(':')[0].strip() - - def _completion_data(self, proposal): - return self._completion_text(proposal) diff --git a/pylibs/ropemode/filter.py b/pylibs/ropemode/filter.py deleted file mode 100644 index 40c49d06..00000000 --- a/pylibs/ropemode/filter.py +++ /dev/null @@ -1,39 +0,0 @@ -from rope.base import exceptions - - -def resources(project, rules): - """Find python files in the `project` matching `rules` - - `rules` is a multi-line `str`; each line starts with either a '+' - or '-'. Each '+' means include the file (or its children if it's - a folder) that comes after it. '-' has the same meaning for - exclusion. - - """ - all_files = set(project.pycore.get_python_files()) - files = None - for line in rules.splitlines(): - if not line.strip(): - continue - first, path = (line[0], line[1:]) - if first not in '+-': - continue - try: - resource = project.get_resource(path.strip()) - except exceptions.ResourceNotFoundError: - continue - if resource.is_folder(): - matches = set(filter(resource.contains, all_files)) - else: - matches = set([resource]) - if first == '+': - if files is None: - files = set() - files.update(matches) - if first == '-': - if files is None: - files = set(all_files) - files -= matches - if files is None: - return all_files - return files diff --git a/pylibs/ropemode/interface.py b/pylibs/ropemode/interface.py deleted file mode 100644 index 05c562e3..00000000 --- a/pylibs/ropemode/interface.py +++ /dev/null @@ -1,697 +0,0 @@ -import os - -import rope.base.change -from rope.base import libutils, utils, exceptions -from rope.contrib import codeassist, generate, autoimport, findit - -from ropemode import refactor, decorators, dialog - - -class RopeMode(object): - - def __init__(self, env): - self.project = None - self.old_content = None - self.env = env - - self._prepare_refactorings() - self.autoimport = None - - for attrname in dir(self): - attr = getattr(self, attrname) - if not callable(attr): - continue - kind = getattr(attr, 'kind', None) - if kind == 'local': - key = getattr(attr, 'local_key', None) - prefix = getattr(attr, 'prefix', None) - self.env.local_command(attrname, attr, key, prefix) - if kind == 'global': - key = getattr(attr, 'global_key', None) - prefix = getattr(attr, 'prefix', None) - self.env.global_command(attrname, attr, key, prefix) - if kind == 'hook': - hook = getattr(attr, 'hook', None) - self.env.add_hook(attrname, attr, hook) - - def _prepare_refactorings(self): - for name in dir(refactor): - if not name.startswith('_') and name != 'Refactoring': - attr = getattr(refactor, name) - if isinstance(attr, type) and \ - issubclass(attr, refactor.Refactoring): - refname = self._refactoring_name(attr) - - @decorators.local_command(attr.key, 'P', None, refname) - def do_refactor(prefix, self=self, refactoring=attr): - initial_asking = prefix is None - refactoring(self, self.env).show(initial_asking=initial_asking) - setattr(self, refname, do_refactor) - - @staticmethod - def _refactoring_name(refactoring): - return refactor.refactoring_name(refactoring) - - @decorators.rope_hook('before_save') - def before_save_actions(self): - if self.project is not None: - if not self._is_python_file(self.env.filename()): - return - resource = self._get_resource() - if resource.exists(): - self.old_content = resource.read() - else: - self.old_content = '' - - @decorators.rope_hook('after_save') - def after_save_actions(self): - if self.project is not None and self.old_content is not None: - libutils.report_change(self.project, self.env.filename(), - self.old_content) - self.old_content = None - - @decorators.rope_hook('exit') - def exiting_actions(self): - if self.project is not None: - self.close_project() - - @decorators.global_command('o') - def open_project(self, root=None): - if not root: - if self.env.get('auto_project'): - root = self.env.get_cur_dir() - else: - root = self.env.ask_directory('Rope project root folder: ') - if self.project is not None: - self.close_project() - address = rope.base.project._realpath(os.path.join(root, - '.ropeproject')) - if not os.path.exists(address) and not self.env.get('auto_project'): - if not self.env.y_or_n('Project not exists in %s, create one?' % root): - self.env.message("Project creation aborted") - return - - progress = self.env.create_progress('Opening [%s] project' % root) - self.project = rope.base.project.Project(root) - - if self.env.get('enable_autoimport'): - underlined = self.env.get('autoimport_underlineds') - self.autoimport = autoimport.AutoImport(self.project, - underlined=underlined) - progress.done() - - @decorators.global_command('k') - def close_project(self): - if self.project is not None: - progress = self.env.create_progress('Closing [%s] project' % - self.project.address) - self.project.close() - self.project = None - progress.done() - - @decorators.global_command() - def write_project(self): - if self.project is not None: - progress = self.env.create_progress( - 'Writing [%s] project data to disk' % self.project.address) - self.project.sync() - progress.done() - - @decorators.global_command('u') - def undo(self): - self._check_project() - change = self.project.history.tobe_undone - if change is None: - self.env.message('Nothing to undo!') - return - if self.env.y_or_n('Undo [%s]? ' % str(change)): - def undo(handle): - for changes in self.project.history.undo(task_handle=handle): - self._reload_buffers(changes, undo=True) - refactor.runtask(self.env, undo, 'Undo refactoring', - interrupts=False) - - @decorators.global_command('r') - def redo(self): - self._check_project() - change = self.project.history.tobe_redone - if change is None: - self.env.message('Nothing to redo!') - return - if self.env.y_or_n('Redo [%s]? ' % str(change)): - def redo(handle): - for changes in self.project.history.redo(task_handle=handle): - self._reload_buffers(changes) - refactor.runtask(self.env, redo, 'Redo refactoring', - interrupts=False) - - @decorators.local_command('a g', shortcut='C-c g') - def goto_definition(self): - definition = self._base_definition_location() - if definition: - self.env.push_mark() - self._goto_location(definition[0], definition[1]) - else: - self.env.message('Cannot find the definition!') - - @decorators.local_command() - def pop_mark(self): - self.env.pop_mark() - - @decorators.local_command() - def definition_location(self): - definition = self._base_definition_location() - if definition: - return str(definition[0].real_path), definition[1] - return None - - def _base_definition_location(self): - self._check_project() - resource, offset = self._get_location() - maxfixes = self.env.get('codeassist_maxfixes') - try: - definition = codeassist.get_definition_location( - self.project, self._get_text(), offset, resource, maxfixes) - except exceptions.BadIdentifierError: - return None - if tuple(definition) != (None, None): - return definition - return None - - @decorators.local_command('a d', 'P', 'C-c d') - def show_doc(self, prefix): - self._check_project() - self._base_show_doc(prefix, codeassist.get_doc) - - @decorators.local_command('a c', 'P') - def show_calltip(self, prefix): - self._check_project() - def _get_doc(project, text, offset, *args, **kwds): - try: - offset = text.rindex('(', 0, offset) - 1 - except ValueError: - return None - return codeassist.get_calltip(project, text, offset, *args, **kwds) - self._base_show_doc(prefix, _get_doc) - - def _base_show_doc(self, prefix, get_doc): - docs = self._base_get_doc(get_doc) - if docs: - self.env.show_doc(docs, prefix) - else: - self.env.message('No docs available!') - - @decorators.local_command() - def get_doc(self): - self._check_project() - return self._base_get_doc(codeassist.get_doc) - - def _base_get_doc(self, get_doc): - maxfixes = self.env.get('codeassist_maxfixes') - text = self._get_text() - offset = self.env.get_offset() - try: - return get_doc(self.project, text, offset, - self.resource, maxfixes) - except exceptions.BadIdentifierError: - return None - - def _get_text(self): - resource = self.resource - if not self.env.is_modified() and resource is not None: - return resource.read() - return self.env.get_text() - - def _base_findit(self, do_find, optionals, get_kwds): - self._check_project() - self._save_buffers() - resource, offset = self._get_location() - - action, values = dialog.show_dialog( - self._askdata, ['search', 'cancel'], optionals=optionals) - if action == 'search': - kwds = get_kwds(values) - def calculate(handle): - resources = refactor._resources(self.project, - values.get('resources')) - return do_find(self.project, resource, offset, - resources=resources, task_handle=handle, **kwds) - result = refactor.runtask(self.env, calculate, 'Find Occurrences') - locations = [Location(location) for location in result] - self.env.show_occurrences(locations) - - @decorators.local_command('a f', shortcut='C-c f') - def find_occurrences(self): - optionals = { - 'unsure': dialog.Data('Find uncertain occurrences: ', - default='no', values=['yes', 'no']), - 'resources': dialog.Data('Files to search: '), - 'in_hierarchy': dialog.Data( - 'Rename methods in class hierarchy: ', - default='no', values=['yes', 'no'])} - def get_kwds(values): - return {'unsure': values.get('unsure') == 'yes', - 'in_hierarchy': values.get('in_hierarchy') == 'yes'} - self._base_findit(findit.find_occurrences, optionals, get_kwds) - - @decorators.local_command('a i') - def find_implementations(self): - optionals = {'resources': dialog.Data('Files to search: ')} - def get_kwds(values): - return {} - self._base_findit(findit.find_implementations, optionals, get_kwds) - - @decorators.local_command('a /', 'P', 'M-/') - def code_assist(self, prefix): - _CodeAssist(self, self.env).code_assist(prefix) - - @decorators.local_command('a ?', 'P', 'M-?') - def lucky_assist(self, prefix): - _CodeAssist(self, self.env).lucky_assist(prefix) - - @decorators.local_command(prefix='P') - def omni_complete(self, prefix): - _CodeAssist(self, self.env).omni_complete(prefix) - - @decorators.local_command('a') - def auto_import(self): - _CodeAssist(self, self.env).auto_import() - - @decorators.local_command() - def completions(self): - return _CodeAssist(self, self.env).completions() - - @decorators.local_command() - def extended_completions(self): - return _CodeAssist(self, self.env).extended_completions() - - def _check_autoimport(self): - self._check_project() - if self.autoimport is None: - self.env.message('autoimport is disabled; ' - 'see `enable_autoimport\' variable') - return False - return True - - @decorators.global_command('g') - def generate_autoimport_cache(self): - - if not self._check_autoimport(): - return - - modules = self.env.get('autoimport_modules') - modules = [ m if isinstance(m, basestring) else m.value() for m in modules ] - - def gen(handle): - self.autoimport.generate_cache(task_handle=handle) - self.autoimport.generate_modules_cache(modules, task_handle=handle) - - refactor.runtask(self.env, gen, 'Generate autoimport cache') - self.write_project() - - @decorators.global_command('f', 'P') - def find_file(self, prefix): - f = self._base_find_file(prefix) - if f is not None: - self.env.find_file(f.real_path) - - @decorators.global_command('4 f', 'P') - def find_file_other_window(self, prefix): - f = self._base_find_file(prefix) - if f is not None: - self.env.find_file(f.real_path, other=True) - - def _base_find_file(self, prefix): - self._check_project() - if prefix: - files = self.project.pycore.get_python_files() - else: - files = self.project.get_files() - return self._ask_file(files) - - def _ask_file(self, files): - names = [] - for f in files: - names.append('<'.join(reversed(f.path.split('/')))) - result = self.env.ask_values('Rope Find File: ', names) - if result is not None: - path = '/'.join(reversed(result.split('<'))) - f = self.project.get_file(path) - return f - self.env.message('No f selected') - - @decorators.local_command('a j') - def jump_to_global(self): - if not self._check_autoimport(): - return - all_names = list(self.autoimport.get_all_names()) - name = self.env.ask_values('Global name: ', all_names) - result = dict(self.autoimport.get_name_locations(name)) - if len(result) == 1: - resource = list(result.keys())[0] - else: - resource = self._ask_file(result.keys()) - if resource: - self._goto_location(resource, result[resource]) - - @decorators.global_command('c') - def project_config(self): - self._check_project() - if self.project.ropefolder is not None: - config = self.project.ropefolder.get_child('config.py') - self.env.find_file(config.real_path) - else: - self.env.message('No rope project folder found') - - @decorators.global_command('n m') - def create_module(self): - def callback(sourcefolder, name): - return generate.create_module(self.project, name, sourcefolder) - self._create('module', callback) - - @decorators.global_command('n p') - def create_package(self): - def callback(sourcefolder, name): - folder = generate.create_package(self.project, name, sourcefolder) - return folder.get_child('__init__.py') - self._create('package', callback) - - @decorators.global_command('n f') - def create_file(self): - def callback(parent, name): - return parent.create_file(name) - self._create('file', callback, 'parent') - - @decorators.global_command('n d') - def create_directory(self): - def callback(parent, name): - parent.create_folder(name) - self._create('directory', callback, 'parent') - - @decorators.local_command() - def analyze_module(self): - """Perform static object analysis on this module""" - self._check_project() - self.project.pycore.analyze_module(self.resource) - - @decorators.global_command() - def analyze_modules(self): - """Perform static object analysis on all project modules""" - self._check_project() - def _analyze_modules(handle): - libutils.analyze_modules(self.project, task_handle=handle) - refactor.runtask(self.env, _analyze_modules, 'Analyze project modules') - - @decorators.local_command() - def run_module(self): - """Run and perform dynamic object analysis on this module""" - self._check_project() - process = self.project.pycore.run_module(self.resource) - try: - process.wait_process() - finally: - process.kill_process() - - def _create(self, name, callback, parentname='source'): - self._check_project() - confs = {'name': dialog.Data(name.title() + ' name: ')} - parentname = parentname + 'folder' - optionals = {parentname: dialog.Data( - parentname.title() + ' Folder: ', - default=self.project.address, kind='directory')} - action, values = dialog.show_dialog( - self._askdata, ['perform', 'cancel'], confs, optionals) - if action == 'perform': - parent = libutils.path_to_resource( - self.project, values.get(parentname, self.project.address)) - resource = callback(parent, values['name']) - if resource: - self.env.find_file(resource.real_path) - - def _goto_location(self, resource, lineno): - if resource: - self.env.find_file(str(resource.real_path), - other=self.env.get('goto_def_newwin')) - if lineno: - self.env.goto_line(lineno) - - def _get_location(self): - offset = self.env.get_offset() - return self.resource, offset - - def _get_resource(self, filename=None): - if filename is None: - filename = self.env.filename() - if filename is None or self.project is None: - return - resource = libutils.path_to_resource(self.project, filename, 'file') - return resource - - @property - def resource(self): - """the current resource - - Returns `None` when file does not exist. - """ - resource = self._get_resource() - if resource and resource.exists(): - return resource - - def _check_project(self): - if self.project is None: - if self.env.get('guess_project'): - self.open_project(self._guess_project()) - else: - self.open_project() - else: - self.project.validate(self.project.root) - - def _guess_project(self): - cwd = self.env.filename() - if cwd is not None: - while True: - ropefolder = os.path.join(cwd, '.ropeproject') - if os.path.exists(ropefolder) and os.path.isdir(ropefolder): - return cwd - newcwd = os.path.dirname(cwd) - if newcwd == cwd: - break - cwd = newcwd - - def _reload_buffers(self, changes, undo=False): - self._reload_buffers_for_changes( - changes.get_changed_resources(), - self._get_moved_resources(changes, undo)) - - def _reload_buffers_for_changes(self, changed, moved=None): - if moved is None: - moved = dict() - - filenames = [resource.real_path for resource in changed] - moved = dict((resource.real_path, moved[resource].real_path) for resource in moved) - self.env.reload_files(filenames, moved) - - def _get_moved_resources(self, changes, undo=False): - result = {} - if isinstance(changes, rope.base.change.ChangeSet): - for change in changes.changes: - result.update(self._get_moved_resources(change)) - if isinstance(changes, rope.base.change.MoveResource): - result[changes.resource] = changes.new_resource - if undo: - return dict([(value, key) for key, value in result.items()]) - return result - - def _save_buffers(self, only_current=False): - if only_current: - filenames = [self.env.filename()] - else: - filenames = self.env.filenames() - pythons = [] - for filename in filenames: - if self._is_python_file(filename): - pythons.append(filename) - self.env.save_files(pythons) - - def _is_python_file(self, path): - resource = self._get_resource(path) - return (resource is not None and - resource.project == self.project and - self.project.pycore.is_python_file(resource)) - - def _askdata(self, data, starting=None): - ask_func = self.env.ask - ask_args = {'prompt': data.prompt, 'starting': starting, - 'default': data.default} - if data.values: - ask_func = self.env.ask_values - ask_args['values'] = data.values - elif data.kind == 'directory': - ask_func = self.env.ask_directory - return ask_func(**ask_args) - - -class Location(object): - def __init__(self, location): - self.location = location - self.filename = location.resource.real_path - self.offset = location.offset - self.note = '' - if location.unsure: - self.note = '?' - - @property - def lineno(self): - if hasattr(self.location, 'lineno'): - return self.location.lineno - return self.location.resource.read().count('\n', 0, self.offset) + 1 - - -class _CodeAssist(object): - - def __init__(self, interface, env): - self.interface = interface - self.env = env - - def code_assist(self, prefix): - proposals = self._calculate_proposals() - if prefix is not None: - arg = self.env.prefix_value(prefix) - if arg == 0: - arg = len(proposals) - common_start = self._calculate_prefix(proposals[:arg]) - self.env.insert(common_start[self.offset - self.starting_offset:]) - self._starting = common_start - self._offset = self.starting_offset + len(common_start) - prompt = 'Completion for %s: ' % self.expression - proposals = map(self.env._completion_data, proposals) - result = self.env.ask_completion(prompt, proposals, self.starting) - if result is not None: - self._apply_assist(result) - - def omni_complete(self, prefix): - proposals = self._calculate_proposals() - proposals = self.env._update_proposals(proposals) - command = u'let g:pythoncomplete_completions = [%s]' % proposals - self.env._command(command, encode=True) - - def lucky_assist(self, prefix): - proposals = self._calculate_proposals() - selected = 0 - if prefix is not None: - selected = self.env.prefix_value(prefix) - if 0 <= selected < len(proposals): - result = self.env._completion_text(proposals[selected]) - else: - self.env.message('Not enough proposals!') - return - self._apply_assist(result) - - def auto_import(self): - - if not self.interface._check_autoimport(): - return - - if not self.autoimport.names and self.env.get('autoimport_generate'): - self.interface.generate_autoimport_cache() - - name = self.env.current_word() - modules = self.autoimport.get_modules(name) - - if modules: - if len(modules) == 1: - module = modules[0] - else: - module = self.env.ask_values( - 'Which module to import: ', modules) - self._insert_import(name, module) - else: - self.env.message('Global name %s not found!' % name) - - def completions(self): - proposals = self._calculate_proposals() - prefix = self.offset - self.starting_offset - return [self.env._completion_text(proposal)[prefix:] - for proposal in proposals] - - def extended_completions(self): - proposals = self._calculate_proposals() - prefix = self.offset - self.starting_offset - return [[proposal.name[prefix:], proposal.get_doc(), - proposal.type] for proposal in proposals] - - def _apply_assist(self, assist): - if ' : ' in assist: - name, module = assist.rsplit(' : ', 1) - self.env.delete(self.starting_offset + 1, self.offset + 1) - self.env.insert(name) - self._insert_import(name, module) - else: - self.env.delete(self.starting_offset + 1, self.offset + 1) - self.env.insert(assist) - - def _calculate_proposals(self): - self.interface._check_project() - resource = self.interface.resource - maxfixes = self.env.get('codeassist_maxfixes') - proposals = codeassist.code_assist( - self.interface.project, self.source, self.offset, - resource, maxfixes=maxfixes) - if self.env.get('sorted_completions'): - proposals = codeassist.sorted_proposals(proposals) - if self.autoimport is not None: - if self.starting.strip() and '.' not in self.expression: - import_assists = self.autoimport.import_assist(self.starting) - for assist in import_assists: - p = codeassist.CompletionProposal(' : '.join(assist), - 'autoimport') - proposals.append(p) - return proposals - - def _insert_import(self, name, module): - lineno = self.autoimport.find_insertion_line(self.source) - line = 'from %s import %s' % (module, name) - self.env.insert_line(line, lineno) - - def _calculate_prefix(self, proposals): - if not proposals: - return '' - prefix = self.env._completion_text(proposals[0]) - for proposal in proposals: - common = 0 - name = self.env._completion_text(proposal) - for c1, c2 in zip(prefix, name): - if c1 != c2 or ' ' in (c1, c2): - break - common += 1 - prefix = prefix[:common] - return prefix - - @property - @utils.cacheit - def offset(self): - return self.env.get_offset() - - @property - @utils.cacheit - def source(self): - return self.interface._get_text() - - @property - @utils.cacheit - def starting_offset(self): - return codeassist.starting_offset(self.source, self.offset) - - @property - @utils.cacheit - def starting(self): - return self.source[self.starting_offset:self.offset] - - @property - @utils.cacheit - def expression(self): - return codeassist.starting_expression(self.source, self.offset) - - @property - def autoimport(self): - return self.interface.autoimport diff --git a/pylibs/ropemode/refactor.py b/pylibs/ropemode/refactor.py deleted file mode 100644 index 738c1201..00000000 --- a/pylibs/ropemode/refactor.py +++ /dev/null @@ -1,533 +0,0 @@ -import re - -import rope.base.change -import rope.contrib.generate -import rope.refactor.change_signature -import rope.refactor.extract -import rope.refactor.inline -import rope.refactor.introduce_factory -import rope.refactor.method_object -import rope.refactor.move -import rope.refactor.rename -import rope.refactor.restructure -import rope.refactor.usefunction -from rope.base import taskhandle - -from ropemode import dialog, filter as file_filter - - -class Refactoring(object): - key = None - confs = {} - optionals = {} - saveall = True - - def __init__(self, interface, env): - self.interface = interface - self.env = env - - def show(self, initial_asking=True): - self.interface._check_project() - self.interface._save_buffers(only_current=not self.saveall) - self._create_refactoring() - action, result = dialog.show_dialog( - self.interface._askdata, ['perform', 'preview', 'cancel'], - self._get_confs(), self._get_optionals(), - initial_asking=initial_asking) - if action == 'cancel': - self.env.message('Cancelled!') - return - def calculate(handle): - return self._calculate_changes(result, handle) - name = 'Calculating %s changes' % self.name - changes = runtask(self.env, calculate, name=name) - if action == 'perform': - self._perform(changes) - if action == 'preview': - if changes is not None: - diffs = changes.get_description() - if self.env.preview_changes(diffs): - self._perform(changes) - else: - self.env.message('Thrown away!') - else: - self.env.message('No changes!') - - @property - def project(self): - return self.interface.project - - @property - def resource(self): - return self.interface._get_resource() - - @property - def offset(self): - return self.env.get_offset() - - @property - def region(self): - return self.env.get_region() - - @property - def name(self): - return refactoring_name(self.__class__) - - def _calculate_changes(self, option_values, task_handle): - pass - - def _create_refactoring(self): - pass - - def _done(self): - pass - - def _perform(self, changes): - if changes is None: - self.env.message('No changes!') - return - def perform(handle, self=self, changes=changes): - self.project.do(changes, task_handle=handle) - self.interface._reload_buffers(changes) - self._done() - runtask(self.env, perform, 'Making %s changes' % self.name, - interrupts=False) - self.env.message(str(changes.description) + ' finished') - - def _get_confs(self): - return self.confs - - def _get_optionals(self): - return self.optionals - - @property - def resources_option(self): - return dialog.Data('Files to apply this refactoring on: ', - decode=self._decode_resources) - - def _decode_resources(self, value): - return _resources(self.project, value) - - -class Rename(Refactoring): - key = 'r' - saveall = True - - def __init__(self, *args): - self.renamer = None - super(Rename, self).__init__(*args) - - def _create_refactoring(self): - self.renamer = rope.refactor.rename.Rename( - self.project, self.resource, self.offset) - - def _calculate_changes(self, values, task_handle): - return self.renamer.get_changes(task_handle=task_handle, **values) - - def _get_optionals(self): - opts = {} - opts['docs'] = dialog.Boolean('Search comments and docs: ', True) - if self.renamer.is_method(): - opts['in_hierarchy'] = dialog.Boolean('Rename methods in ' - 'class hierarchy: ') - opts['resources'] = self.resources_option - opts['unsure'] = dialog.Data('Unsure occurrences: ', - decode=self._decode_unsure, - values=['ignore', 'match'], - default='ignore') - return opts - - def _get_confs(self): - oldname = str(self.renamer.get_old_name()) - return {'new_name': dialog.Data('New name: ', default=oldname)} - - @staticmethod - def _decode_unsure(value): - unsure = value == 'match' - return lambda occurrence: unsure - - -class RenameCurrentModule(Rename): - key = '1 r' - offset = None - - -class Restructure(Refactoring): - key = 'x' - confs = {'pattern': dialog.Data('Restructuring pattern: '), - 'goal': dialog.Data('Restructuring goal: ')} - - def _calculate_changes(self, values, task_handle): - restructuring = rope.refactor.restructure.Restructure( - self.project, values['pattern'], values['goal'], - args=values['args'], imports=values['imports']) - return restructuring.get_changes(resources=values['resources'], - task_handle=task_handle) - - def _get_optionals(self): - return { - 'args': dialog.Data('Arguments: ', decode=self._decode_args), - 'imports': dialog.Data('Imports: ', decode=self._decode_imports), - 'resources': self.resources_option} - - @staticmethod - def _decode_args(value): - if value: - args = {} - for raw_check in value.split('\n'): - if raw_check: - key, value = raw_check.split(':', 1) - args[key.strip()] = value.strip() - return args - - @staticmethod - def _decode_imports(value): - if value: - return [line.strip() for line in value.split('\n')] - - -class UseFunction(Refactoring): - key = 'u' - - def __init__(self, *args): - super(UseFunction, self).__init__(*args) - self.user = None - - def _create_refactoring(self): - self.user = rope.refactor.usefunction.UseFunction( - self.project, self.resource, self.offset) - - def _calculate_changes(self, values, task_handle): - return self.user.get_changes(task_handle=task_handle, **values) - - def _get_optionals(self): - return {'resources': self.resources_option} - - -class Move(Refactoring): - key = 'v' - def __init__(self, *args): - super(Move, self).__init__(*args) - self.mover = None - - def _create_refactoring(self): - self.mover = rope.refactor.move.create_move(self.project, - self.resource, - self.offset) - - def _calculate_changes(self, values, task_handle): - destination = values['destination'] - resources = values.get('resources', None) - if isinstance(self.mover, rope.refactor.move.MoveGlobal): - return self._move_global(destination, resources, task_handle) - if isinstance(self.mover, rope.refactor.move.MoveModule): - return self._move_module(destination, resources, task_handle) - if isinstance(self.mover, rope.refactor.move.MoveMethod): - return self._move_method(destination, resources, task_handle) - - def _move_global(self, dest, resources, handle): - destination = self.project.pycore.find_module(dest) - return self.mover.get_changes( - destination, resources=resources, task_handle=handle) - - def _move_method(self, dest, resources, handle): - return self.mover.get_changes( - dest, self.mover.get_method_name(), - resources=resources, task_handle=handle) - - def _move_module(self, dest, resources, handle): - destination = self.project.pycore.find_module(dest) - return self.mover.get_changes( - destination, resources=resources, task_handle=handle) - - def _get_confs(self): - if isinstance(self.mover, rope.refactor.move.MoveGlobal): - prompt = 'Destination module: ' - if isinstance(self.mover, rope.refactor.move.MoveModule): - prompt = 'Destination package: ' - if isinstance(self.mover, rope.refactor.move.MoveMethod): - prompt = 'Destination attribute: ' - return {'destination': dialog.Data(prompt)} - - def _get_optionals(self): - return {'resources': self.resources_option} - - -class MoveCurrentModule(Move): - key = '1 v' - offset = None - - -class ModuleToPackage(Refactoring): - key = '1 p' - saveall = False - def __init__(self, *args): - super(ModuleToPackage, self).__init__(*args) - self.packager = None - - def _create_refactoring(self): - self.packager = rope.refactor.ModuleToPackage( - self.project, self.resource) - - def _calculate_changes(self, values, task_handle): - return self.packager.get_changes() - - -class Inline(Refactoring): - key = 'i' - def __init__(self, *args): - super(Inline, self).__init__(*args) - self.inliner = None - - def _create_refactoring(self): - self.inliner = rope.refactor.inline.create_inline( - self.project, self.resource, self.offset) - - def _calculate_changes(self, values, task_handle): - return self.inliner.get_changes(task_handle=task_handle, **values) - - def _get_optionals(self): - opts = {'resources': self.resources_option} - if self.inliner.get_kind() == 'parameter': - opts['in_hierarchy'] = dialog.Boolean( - 'Apply on all matching methods in class hierarchy: ', False) - else: - opts['remove'] = dialog.Boolean('Remove the definition: ', True) - opts['only_current'] = dialog.Boolean('Inline this ' - 'occurrence only: ') - return opts - - -class _Extract(Refactoring): - saveall = False - optionals = {'similar': dialog.Boolean('Extract similar pieces: ', True), - 'global_': dialog.Boolean('Make global: ')} - kind = None - constructor = rope.refactor.extract.ExtractVariable - - def __init__(self, *args): - super(_Extract, self).__init__(*args) - self.extractor = None - - def _create_refactoring(self): - start, end = self.region - self.extractor = self.constructor(self.project, - self.resource, start, end) - - def _calculate_changes(self, values, task_handle): - similar = values.get('similar') - global_ = values.get('global_') - return self.extractor.get_changes(values['name'], similar=similar, - global_=global_) - - def _get_confs(self): - return {'name': dialog.Data('Extracted %s name: ' % self.kind)} - - -class ExtractVariable(_Extract): - key = 'l' - kind = 'variable' - constructor = rope.refactor.extract.ExtractVariable - - -class ExtractMethod(_Extract): - key = 'm' - kind = 'method' - constructor = rope.refactor.extract.ExtractMethod - - -class OrganizeImports(Refactoring): - key = 'o' - saveall = False - - def __init__(self, *args): - self.organizer = None - super(OrganizeImports, self).__init__(*args) - - def _create_refactoring(self): - self.organizer = rope.refactor.ImportOrganizer(self.project) - - def _calculate_changes(self, values, task_handle): - return self.organizer.organize_imports(self.resource) - - -class MethodObject(Refactoring): - saveall = False - confs = {'classname': dialog.Data('New class name: ', - default='_ExtractedClass')} - def __init__(self, *args): - super(MethodObject, self).__init__(*args) - self.objecter = None - - def _create_refactoring(self): - self.objecter = rope.refactor.method_object.MethodObject( - self.project, self.resource, self.offset) - - def _calculate_changes(self, values, task_handle): - classname = values.get('classname') - return self.objecter.get_changes(classname) - - -class IntroduceFactory(Refactoring): - saveall = True - key = 'f' - def __init__(self, *args): - super(IntroduceFactory, self).__init__(*args) - self.factory = None - - def _create_refactoring(self): - self.factory = rope.refactor.introduce_factory.IntroduceFactory( - self.project, self.resource, self.offset) - - def _calculate_changes(self, values, task_handle): - return self.factory.get_changes(task_handle=task_handle, **values) - - def _get_confs(self): - default = 'create_%s' % self.factory.old_name.lower() - return {'factory_name': dialog.Data('Factory name: ', default)} - - def _get_optionals(self): - return {'global_factory': dialog.Boolean('Make global: ', True), - 'resources': self.resources_option} - - -class ChangeSignature(Refactoring): - saveall = True - key = 's' - def __init__(self, *args): - super(ChangeSignature, self).__init__(*args) - self.changer = None - - def _create_refactoring(self): - self.changer = rope.refactor.change_signature.ChangeSignature( - self.project, self.resource, self.offset) - - def _calculate_changes(self, values, task_handle): - signature = values.get('signature') - args = re.sub(r'[\s\(\)]+', '', signature).split(',') - olds = [arg[0] for arg in self._get_args()] - - changers = [] - for arg in list(olds): - if arg in args: - continue - changers.append(rope.refactor.change_signature. - ArgumentRemover(olds.index(arg))) - olds.remove(arg) - - order = [] - for index, arg in enumerate(args): - if arg not in olds: - changers.append(rope.refactor.change_signature. - ArgumentAdder(index, arg)) - olds.insert(index, arg) - order.append(olds.index(arg)) - changers.append(rope.refactor.change_signature. - ArgumentReorderer(order, autodef='None')) - - del values['signature'] - return self.changer.get_changes(changers, task_handle=task_handle, - **values) - - def _get_args(self): - if hasattr(self.changer, 'get_args'): - return self.changer.get_args() - return self.changer.get_definition_info().args_with_defaults - - def _get_confs(self): - args = [] - for arg, _ in self._get_args(): - args.append(arg) - signature = '(' + ', '.join(args) + ')' - return {'signature': dialog.Data('Change the signature: ', - default=signature)} - - def _get_optionals(self): - opts = {'resources': self.resources_option} - if self.changer.is_method(): - opts['in_hierarchy'] = dialog.Boolean('Rename methods in ' - 'class hierarchy: ') - return opts - - -class _GenerateElement(Refactoring): - def __init__(self, *args): - super(_GenerateElement, self).__init__(*args) - self.generator = None - - def _create_refactoring(self): - kind = self.name.split('_')[-1] - self.generator = rope.contrib.generate.create_generate( - kind, self.project, self.resource, self.offset) - - def _calculate_changes(self, values, task_handle): - return self.generator.get_changes() - - def _done(self): - resource, lineno = self.generator.get_location() - self.interface._goto_location(resource, lineno) - - -class GenerateVariable(_GenerateElement): - key = 'n v' - - -class GenerateFunction(_GenerateElement): - key = 'n f' - - -class GenerateClass(_GenerateElement): - key = 'n c' - - -class GenerateModule(_GenerateElement): - key = 'n m' - - -class GeneratePackage(_GenerateElement): - key = 'n p' - - -def refactoring_name(refactoring): - classname = refactoring.__name__ - result = [] - for c in classname: - if result and c.isupper(): - result.append('_') - result.append(c.lower()) - name = ''.join(result) - return name - -def _resources(project, text): - if text is None or text.strip() == '': - return None - return file_filter.resources(project, text) - - -def runtask(env, command, name, interrupts=True): - return RunTask(env, command, name, interrupts)() - - -class RunTask(object): - - def __init__(self, env, task, name, interrupts=True): - self.env = env - self.task = task - self.name = name - self.interrupts = interrupts - - def __call__(self): - handle = taskhandle.TaskHandle(name=self.name) - progress = self.env.create_progress(self.name) - - def update_progress(): - jobset = handle.current_jobset() - if jobset: - percent = jobset.get_percent_done() - if percent is not None: - progress.update(percent) - - handle.add_observer(update_progress) - result = self.task(handle) - progress.done() - return result diff --git a/pylibs/ropevim.py b/pylibs/ropevim.py deleted file mode 100644 index 3890b500..00000000 --- a/pylibs/ropevim.py +++ /dev/null @@ -1,385 +0,0 @@ -"""ropevim, a vim mode for using rope refactoring library""" -import os -import tempfile -import re - -import ropemode.decorators -import ropemode.environment -import ropemode.interface - -import vim - - -class VimUtils(ropemode.environment.Environment): - - def ask(self, prompt, default=None, starting=None): - if starting is None: - starting = '' - if default is not None: - prompt = prompt + ('[%s] ' % default) - result = call('input("%s", "%s")' % (prompt, starting)) - if default is not None and result == '': - return default - return result - - def ask_values(self, prompt, values, default=None, - starting=None, show_values=None): - if show_values or (show_values is None and len(values) < 14): - self._print_values(values) - if default is not None: - prompt = prompt + ('[%s] ' % default) - starting = starting or '' - _completer.values = values - answer = call('input("%s", "%s", "customlist,RopeValueCompleter")' % - (prompt, starting)) - if answer is None: - if 'cancel' in values: - return 'cancel' - return - if default is not None and not answer: - return default - if answer.isdigit() and 0 <= int(answer) < len(values): - return values[int(answer)] - return answer - - def ask_directory(self, prompt, default=None, starting=None): - return call('input("%s", ".", "dir")' % prompt) - - def _update_proposals(self, values): - if not self.get('extended_complete'): - return u','.join(u"'%s'" % self._completion_text(proposal) - for proposal in values) - - return u','.join(self._extended_completion(proposal) - for proposal in values) - - def _command(self, command, encode=False): - if encode: - command = command.encode(self._get_encoding()) - vim.command(command) - - def ask_completion(self, prompt, values, starting=None): - if self.get('vim_completion') and 'i' in call('mode()'): - proposals = self._update_proposals(values) - col = int(call('col(".")')) - if starting: - col -= len(starting) - self._command(u'call complete(%s, [%s])' % (col, proposals), - encode=True) - return None - - return self.ask_values(prompt, values, starting=starting, - show_values=False) - - def message(self, message): - echo(message) - - @staticmethod - def _print_values(values): - numbered = [] - for index, value in enumerate(values): - numbered.append('%s. %s' % (index, str(value))) - echo('\n'.join(numbered) + '\n') - - def yes_or_no(self, prompt): - return self.ask_values(prompt, ['yes', 'no']) == 'yes' - - def y_or_n(self, prompt): - return self.yes_or_no(prompt) - - def get(self, name): - vimname = 'g:pymode_rope_%s' % name - result = vim.eval(vimname) - if isinstance(result, str) and result.isdigit(): - return int(result) - return result - - def get_offset(self): - result = self._position_to_offset(*self.cursor) - return result - - @staticmethod - def _get_encoding(): - return vim.eval('&encoding') - - def _encode_line(self, line): - return line.encode(self._get_encoding()) - - def _decode_line(self, line): - return line.decode(self._get_encoding()) - - def _position_to_offset(self, lineno, colno): - result = min(colno, len(vim.current.buffer[lineno -1]) + 1) - for line in vim.current.buffer[:lineno-1]: - line = self._decode_line(line) - result += len(line) + 1 - return result - - def get_text(self): - return self._decode_line('\n'.join(vim.current.buffer)) + u'\n' - - def get_region(self): - start = self._position_to_offset(*vim.current.buffer.mark('<')) - end = self._position_to_offset(*vim.current.buffer.mark('>')) - return start, end - - def _get_cursor(self): - lineno, col = vim.current.window.cursor - line = self._decode_line(vim.current.line[:col]) - col = len(line) - return (lineno, col) - - def _set_cursor(self, cursor): - lineno, col = cursor - line = self._decode_line(vim.current.line) - line = self._encode_line(line[:col]) - col = len(line) - vim.current.window.cursor = (lineno, col) - - cursor = property(_get_cursor, _set_cursor) - - @staticmethod - def get_cur_dir(): - return vim.eval('getcwd()') - - def filename(self): - return vim.current.buffer.name - - def is_modified(self): - return vim.eval('&modified') - - def goto_line(self, lineno): - self.cursor = (lineno, 0) - - def insert_line(self, line, lineno): - vim.current.buffer[lineno - 1:lineno - 1] = [line] - - def insert(self, text): - lineno, colno = self.cursor - line = vim.current.buffer[lineno - 1] - vim.current.buffer[lineno - 1] = line[:colno] + text + line[colno:] - self.cursor = (lineno, colno + len(text)) - - def delete(self, start, end): - lineno1, colno1 = self._offset_to_position(start - 1) - lineno2, colno2 = self._offset_to_position(end - 1) - lineno, colno = self.cursor - if lineno1 == lineno2: - line = vim.current.buffer[lineno1 - 1] - vim.current.buffer[lineno1 - 1] = line[:colno1] + line[colno2:] - if lineno == lineno1 and colno >= colno1: - diff = colno2 - colno1 - self.cursor = (lineno, max(0, colno - diff)) - - def _offset_to_position(self, offset): - text = self.get_text() - lineno = text.count('\n', 0, offset) + 1 - try: - colno = offset - text.rindex('\n', 0, offset) - 1 - except ValueError: - colno = offset - return lineno, colno - - def filenames(self): - result = [] - for b in vim.buffers: - if b.name: - result.append(b.name) - return result - - def save_files(self, filenames): - vim.command('wall') - - def reload_files(self, filenames, moves=None): - if moves is None: - moves = dict() - initial = self.filename() - for filename in filenames: - self.find_file(moves.get(filename, filename), force=True) - if initial: - self.find_file(initial) - - def find_file(self, filename, readonly=False, other=False, force=False): - if filename != self.filename() or force: - if other: - vim.command('new') - vim.command('e %s' % filename) - if readonly: - vim.command('set nomodifiable') - - def create_progress(self, name): - return VimProgress(name) - - def current_word(self): - return vim.eval('expand("")') - - def push_mark(self): - vim.command('mark `') - - def prefix_value(self, prefix): - return prefix - - def show_occurrences(self, locations): - self._quickfixdefs(locations) - vim.command('cwindow') - - def _quickfixdefs(self, locations): - filename = os.path.join(tempfile.gettempdir(), tempfile.mktemp()) - try: - self._writedefs(locations, filename) - vim.command('let old_errorfile = &errorfile') - vim.command('let old_errorformat = &errorformat') - vim.command('set errorformat=%f:%l:\ %m') - vim.command('cfile ' + filename) - vim.command('let &errorformat = old_errorformat') - vim.command('let &errorfile = old_errorfile') - finally: - os.remove(filename) - - @staticmethod - def _writedefs(locations, filename): - tofile = open(filename, 'w') - try: - for location in locations: - err = '%s:%d: - %s\n' % (location.filename, - location.lineno, location.note) - tofile.write(err) - finally: - tofile.close() - - def show_doc(self, docs, altview=False): - if docs: - cmd = 'call pymode#ShowStr("%s")' % str(docs.replace('"', '\\"')) - print cmd - vim.command(cmd) - - def preview_changes(self, diffs): - echo(diffs) - return self.y_or_n('Do the changes? ') - - def local_command(self, name, callback, key=None, prefix=False): - self._add_command(name, callback, key, prefix, - prekey=self.get('local_prefix')) - - def global_command(self, name, callback, key=None, prefix=False): - self._add_command(name, callback, key, prefix, - prekey=self.get('global_prefix')) - - def add_hook(self, name, callback, hook): - mapping = {'before_save': 'FileWritePre,BufWritePre', - 'after_save': 'FileWritePost,BufWritePost', - 'exit': 'VimLeave'} - self._add_function(name, callback) - vim.command('autocmd %s *.py call %s()' % - (mapping[hook], _vim_name(name))) - - def _add_command(self, name, callback, key, prefix, prekey): - self._add_function(name, callback, prefix) - vim.command('command! -range %s call %s()' % - (_vim_name(name), _vim_name(name))) - if key is not None: - key = prekey + key.replace(' ', '') - vim.command('map %s :call %s()' % (key, _vim_name(name))) - - @staticmethod - def _add_function(name, callback, prefix=False): - globals()[name] = callback - arg = 'None' if prefix else '' - vim.command('function! %s()\n' % _vim_name(name) + - 'python ropevim.%s(%s)\n' % (name, arg) + - 'endfunction\n') - - def _completion_data(self, proposal): - return proposal - - _docstring_re = re.compile('^[\s\t\n]*([^\n]*)') - - def _extended_completion(self, proposal): - # we are using extended complete and return dicts instead of strings. - # `ci` means "completion item". see `:help complete-items` - word, _, menu = map(lambda x: x.strip(), proposal.name.partition(':')) - ci = dict(word = word, menu = menu or '') - kind = ''.join(s if s not in 'aeyuo' else '' for s in proposal.type)[:3] - - if proposal.scope == 'parameter_keyword': - default = proposal.get_default() - ci["menu"] += '*' if default is None else '= %s' % default - - if menu is '': - obj_doc = proposal.get_doc() - ci["menu"] = self._docstring_re.match(obj_doc).group(1) if obj_doc else '' - - ci['kind'] = kind - ci['menu'] = menu.replace('"', '\\"') - return repr(ci).replace(": u'", ": '") - - -def _vim_name(name): - tokens = name.split('_') - newtokens = ['Rope'] + [token.title() for token in tokens] - return ''.join(newtokens) - - -class VimProgress(object): - - def __init__(self, name): - self.name = name - self.last = 0 - status('%s ... ' % self.name) - - def update(self, percent): - try: - vim.eval('getchar(0)') - except vim.error: - raise KeyboardInterrupt('Task %s was interrupted!' % self.name) - if percent > self.last + 4: - status('%s ... %s%%%%' % (self.name, percent)) - self.last = percent - - def done(self): - status('%s ... done' % self.name) - - -def echo(message): - if isinstance(message, unicode): - message = message.encode(vim.eval('&encoding')) - print message - -def status(message): - if isinstance(message, unicode): - message = message.encode(vim.eval('&encoding')) - vim.command('redraw | echon "%s"' % message) - -def call(command): - return vim.eval(command) - -class _ValueCompleter(object): - - def __init__(self): - self.values = [] - vim.command('python import vim') - vim.command('function! RopeValueCompleter(A, L, P)\n' - 'python args = [vim.eval("a:" + p) for p in "ALP"]\n' - 'python ropevim._completer(*args)\n' - 'return s:completions\n' - 'endfunction\n') - - def __call__(self, arg_lead, cmd_line, cursor_pos): - # don't know if self.values can be empty but better safe then sorry - if self.values: - if not isinstance(self.values[0], basestring): - result = [proposal.name for proposal in self.values \ - if proposal.name.startswith(arg_lead)] - else: - result = [proposal for proposal in self.values \ - if proposal.startswith(arg_lead)] - vim.command('let s:completions = %s' % result) - - -ropemode.decorators.logger.message = echo -ropemode.decorators.logger.only_short = True - -_completer = _ValueCompleter() - -_env = VimUtils() -_interface = ropemode.interface.RopeMode(env=_env) diff --git a/pymode/__init__.py b/pymode/__init__.py new file mode 100644 index 00000000..d5e63ba3 --- /dev/null +++ b/pymode/__init__.py @@ -0,0 +1,37 @@ +""" Pymode support functions. """ + +from __future__ import absolute_import + +import sys +import vim # noqa + + +def auto(): + """ Fix PEP8 erorrs in current buffer. """ + from .autopep8 import fix_file + + class Options(object): + aggressive = 2 + diff = False + experimental = True + ignore = vim.eval('g:pymode_lint_ignore') + in_place = True + indent_size = int(vim.eval('&tabstop')) + line_range = None + max_line_length = int(vim.eval('g:pymode_options_max_line_length')) + pep8_passes = 100 + recursive = False + select = vim.eval('g:pymode_lint_select') + verbose = 0 + + fix_file(vim.current.buffer.name, Options) + + +def get_documentation(): + """ Search documentation and append to current buffer. """ + from ._compat import StringIO + + sys.stdout, _ = StringIO(), sys.stdout + help(vim.eval('a:word')) + sys.stdout, out = _, sys.stdout.getvalue() + vim.current.buffer.append(str(out).splitlines(), 0) diff --git a/pymode/_compat.py b/pymode/_compat.py new file mode 100644 index 00000000..d859f152 --- /dev/null +++ b/pymode/_compat.py @@ -0,0 +1,98 @@ +""" Compatibility. + + Some py2/py3 compatibility support based on a stripped down + version of six so we don't have to depend on a specific version + of it. + + :copyright: (c) 2014 by Armin Ronacher. + :license: BSD +""" +import sys + +PY2 = sys.version_info[0] == 2 +_identity = lambda x: x + + +if not PY2: + text_type = str + string_types = (str,) + integer_types = (int, ) + + iterkeys = lambda d: iter(d.keys()) + itervalues = lambda d: iter(d.values()) + iteritems = lambda d: iter(d.items()) + + from io import StringIO + from queue import Queue # noqa + + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + + implements_to_string = _identity + +else: + text_type = unicode + string_types = (str, unicode) + integer_types = (int, long) + + iterkeys = lambda d: d.iterkeys() + itervalues = lambda d: d.itervalues() + iteritems = lambda d: d.iteritems() + + from cStringIO import StringIO + from Queue import Queue + + exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') + + def implements_to_string(cls): + cls.__unicode__ = cls.__str__ + cls.__str__ = lambda x: x.__unicode__().encode('utf-8') + return cls + + +def with_metaclass(meta, *bases): + # This requires a bit of explanation: the basic idea is to make a + # dummy metaclass for one level of class instantiation that replaces + # itself with the actual metaclass. Because of internal type checks + # we also need to make sure that we downgrade the custom metaclass + # for one level to something closer to type (that's why __call__ and + # __init__ comes back from type etc.). + # + # This has the advantage over six.with_metaclass in that it does not + # introduce dummy classes into the final MRO. + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +# Certain versions of pypy have a bug where clearing the exception stack +# breaks the __exit__ function in a very peculiar way. This is currently +# true for pypy 2.2.1 for instance. The second level of exception blocks +# is necessary because pypy seems to forget to check if an exception +# happend until the next bytecode instruction? +BROKEN_PYPY_CTXMGR_EXIT = False +if hasattr(sys, 'pypy_version_info'): + class _Mgr(object): + def __enter__(self): + return self + def __exit__(self, *args): + sys.exc_clear() + try: + try: + with _Mgr(): + raise AssertionError() + except: + raise + except TypeError: + BROKEN_PYPY_CTXMGR_EXIT = True + except AssertionError: + pass + +# pylama:skip=1 diff --git a/pymode/async.py b/pymode/async.py new file mode 100644 index 00000000..dd314d76 --- /dev/null +++ b/pymode/async.py @@ -0,0 +1,6 @@ +""" Python-mode async support. """ + +from ._compat import Queue + + +RESULTS = Queue() diff --git a/pymode/autopep8.py b/pymode/autopep8.py new file mode 100644 index 00000000..62e5832f --- /dev/null +++ b/pymode/autopep8.py @@ -0,0 +1,3885 @@ +#!/usr/bin/env python + +# Copyright (C) 2010-2011 Hideo Hattori +# Copyright (C) 2011-2013 Hideo Hattori, Steven Myint +# Copyright (C) 2013-2016 Hideo Hattori, Steven Myint, Bill Wendling +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Automatically formats Python code to conform to the PEP 8 style guide. + +Fixes that only need be done once can be added by adding a function of the form +"fix_(source)" to this module. They should return the fixed source code. +These fixes are picked up by apply_global_fixes(). + +Fixes that depend on pycodestyle should be added as methods to FixPEP8. See the +class documentation for more information. + +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import codecs +import collections +import copy +import difflib +import fnmatch +import inspect +import io +import keyword +import locale +import os +import re +import signal +import sys +import textwrap +import token +import tokenize + +import pycodestyle + + +try: + unicode +except NameError: + unicode = str + + +__version__ = '1.3.2' + + +CR = '\r' +LF = '\n' +CRLF = '\r\n' + + +PYTHON_SHEBANG_REGEX = re.compile(r'^#!.*\bpython[23]?\b\s*$') +LAMBDA_REGEX = re.compile(r'([\w.]+)\s=\slambda\s*([\(\)\w,\s.]*):') +COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+([^][)(}{]+)\s+(in|is)\s') +BARE_EXCEPT_REGEX = re.compile(r'except\s*:') +STARTSWITH_DEF_REGEX = re.compile(r'^(async\s+def|def)\s.*\):') + + +# For generating line shortening candidates. +SHORTEN_OPERATOR_GROUPS = frozenset([ + frozenset([',']), + frozenset(['%']), + frozenset([',', '(', '[', '{']), + frozenset(['%', '(', '[', '{']), + frozenset([',', '(', '[', '{', '%', '+', '-', '*', '/', '//']), + frozenset(['%', '+', '-', '*', '/', '//']), +]) + + +DEFAULT_IGNORE = 'E24,W503' +DEFAULT_INDENT_SIZE = 4 + + +# W602 is handled separately due to the need to avoid "with_traceback". +CODE_TO_2TO3 = { + 'E231': ['ws_comma'], + 'E721': ['idioms'], + 'W601': ['has_key'], + 'W603': ['ne'], + 'W604': ['repr'], + 'W690': ['apply', + 'except', + 'exitfunc', + 'numliterals', + 'operator', + 'paren', + 'reduce', + 'renames', + 'standarderror', + 'sys_exc', + 'throw', + 'tuple_params', + 'xreadlines']} + + +if sys.platform == 'win32': # pragma: no cover + DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8') +else: + DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or + os.path.expanduser('~/.config'), 'pep8') +PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8') + + +MAX_PYTHON_FILE_DETECTION_BYTES = 1024 + + +def open_with_encoding(filename, + encoding=None, mode='r', limit_byte_check=-1): + """Return opened file with a specific encoding.""" + if not encoding: + encoding = detect_encoding(filename, limit_byte_check=limit_byte_check) + + return io.open(filename, mode=mode, encoding=encoding, + newline='') # Preserve line endings + + +def detect_encoding(filename, limit_byte_check=-1): + """Return file encoding.""" + try: + with open(filename, 'rb') as input_file: + from lib2to3.pgen2 import tokenize as lib2to3_tokenize + encoding = lib2to3_tokenize.detect_encoding(input_file.readline)[0] + + with open_with_encoding(filename, encoding) as test_file: + test_file.read(limit_byte_check) + + return encoding + except (LookupError, SyntaxError, UnicodeDecodeError): + return 'latin-1' + + +def readlines_from_file(filename): + """Return contents of file.""" + with open_with_encoding(filename) as input_file: + return input_file.readlines() + + +def extended_blank_lines(logical_line, + blank_lines, + blank_before, + indent_level, + previous_logical): + """Check for missing blank lines after class declaration.""" + if previous_logical.startswith('def '): + if blank_lines and pycodestyle.DOCSTRING_REGEX.match(logical_line): + yield (0, 'E303 too many blank lines ({0})'.format(blank_lines)) + elif pycodestyle.DOCSTRING_REGEX.match(previous_logical): + # Missing blank line between class docstring and method declaration. + if ( + indent_level and + not blank_lines and + not blank_before and + logical_line.startswith(('def ')) and + '(self' in logical_line + ): + yield (0, 'E301 expected 1 blank line, found 0') + + +pycodestyle.register_check(extended_blank_lines) + + +def continued_indentation(logical_line, tokens, indent_level, indent_char, + noqa): + """Override pycodestyle's function to provide indentation information.""" + first_row = tokens[0][2][0] + nrows = 1 + tokens[-1][2][0] - first_row + if noqa or nrows == 1: + return + + # indent_next tells us whether the next block is indented. Assuming + # that it is indented by 4 spaces, then we should not allow 4-space + # indents on the final continuation line. In turn, some other + # indents are allowed to have an extra 4 spaces. + indent_next = logical_line.endswith(':') + + row = depth = 0 + valid_hangs = ( + (DEFAULT_INDENT_SIZE,) + if indent_char != '\t' else (DEFAULT_INDENT_SIZE, + 2 * DEFAULT_INDENT_SIZE) + ) + + # Remember how many brackets were opened on each line. + parens = [0] * nrows + + # Relative indents of physical lines. + rel_indent = [0] * nrows + + # For each depth, collect a list of opening rows. + open_rows = [[0]] + # For each depth, memorize the hanging indentation. + hangs = [None] + + # Visual indents. + indent_chances = {} + last_indent = tokens[0][2] + indent = [last_indent[1]] + + last_token_multiline = None + line = None + last_line = '' + last_line_begins_with_multiline = False + for token_type, text, start, end, line in tokens: + + newline = row < start[0] - first_row + if newline: + row = start[0] - first_row + newline = (not last_token_multiline and + token_type not in (tokenize.NL, tokenize.NEWLINE)) + last_line_begins_with_multiline = last_token_multiline + + if newline: + # This is the beginning of a continuation line. + last_indent = start + + # Record the initial indent. + rel_indent[row] = pycodestyle.expand_indent(line) - indent_level + + # Identify closing bracket. + close_bracket = (token_type == tokenize.OP and text in ']})') + + # Is the indent relative to an opening bracket line? + for open_row in reversed(open_rows[depth]): + hang = rel_indent[row] - rel_indent[open_row] + hanging_indent = hang in valid_hangs + if hanging_indent: + break + if hangs[depth]: + hanging_indent = (hang == hangs[depth]) + + visual_indent = (not close_bracket and hang > 0 and + indent_chances.get(start[1])) + + if close_bracket and indent[depth]: + # Closing bracket for visual indent. + if start[1] != indent[depth]: + yield (start, 'E124 {0}'.format(indent[depth])) + elif close_bracket and not hang: + pass + elif indent[depth] and start[1] < indent[depth]: + # Visual indent is broken. + yield (start, 'E128 {0}'.format(indent[depth])) + elif (hanging_indent or + (indent_next and + rel_indent[row] == 2 * DEFAULT_INDENT_SIZE)): + # Hanging indent is verified. + if close_bracket: + yield (start, 'E123 {0}'.format(indent_level + + rel_indent[open_row])) + hangs[depth] = hang + elif visual_indent is True: + # Visual indent is verified. + indent[depth] = start[1] + elif visual_indent in (text, unicode): + # Ignore token lined up with matching one from a previous line. + pass + else: + one_indented = (indent_level + rel_indent[open_row] + + DEFAULT_INDENT_SIZE) + # Indent is broken. + if hang <= 0: + error = ('E122', one_indented) + elif indent[depth]: + error = ('E127', indent[depth]) + elif not close_bracket and hangs[depth]: + error = ('E131', one_indented) + elif hang > DEFAULT_INDENT_SIZE: + error = ('E126', one_indented) + else: + hangs[depth] = hang + error = ('E121', one_indented) + + yield (start, '{0} {1}'.format(*error)) + + # Look for visual indenting. + if ( + parens[row] and + token_type not in (tokenize.NL, tokenize.COMMENT) and + not indent[depth] + ): + indent[depth] = start[1] + indent_chances[start[1]] = True + # Deal with implicit string concatenation. + elif (token_type in (tokenize.STRING, tokenize.COMMENT) or + text in ('u', 'ur', 'b', 'br')): + indent_chances[start[1]] = unicode + # Special case for the "if" statement because len("if (") is equal to + # 4. + elif not indent_chances and not row and not depth and text == 'if': + indent_chances[end[1] + 1] = True + elif text == ':' and line[end[1]:].isspace(): + open_rows[depth].append(row) + + # Keep track of bracket depth. + if token_type == tokenize.OP: + if text in '([{': + depth += 1 + indent.append(0) + hangs.append(None) + if len(open_rows) == depth: + open_rows.append([]) + open_rows[depth].append(row) + parens[row] += 1 + elif text in ')]}' and depth > 0: + # Parent indents should not be more than this one. + prev_indent = indent.pop() or last_indent[1] + hangs.pop() + for d in range(depth): + if indent[d] > prev_indent: + indent[d] = 0 + for ind in list(indent_chances): + if ind >= prev_indent: + del indent_chances[ind] + del open_rows[depth + 1:] + depth -= 1 + if depth: + indent_chances[indent[depth]] = True + for idx in range(row, -1, -1): + if parens[idx]: + parens[idx] -= 1 + break + assert len(indent) == depth + 1 + if ( + start[1] not in indent_chances and + # This is for purposes of speeding up E121 (GitHub #90). + not last_line.rstrip().endswith(',') + ): + # Allow to line up tokens. + indent_chances[start[1]] = text + + last_token_multiline = (start[0] != end[0]) + if last_token_multiline: + rel_indent[end[0] - first_row] = rel_indent[row] + + last_line = line + + if ( + indent_next and + not last_line_begins_with_multiline and + pycodestyle.expand_indent(line) == indent_level + DEFAULT_INDENT_SIZE + ): + pos = (start[0], indent[0] + 4) + desired_indent = indent_level + 2 * DEFAULT_INDENT_SIZE + if visual_indent: + yield (pos, 'E129 {0}'.format(desired_indent)) + else: + yield (pos, 'E125 {0}'.format(desired_indent)) + + +del pycodestyle._checks['logical_line'][pycodestyle.continued_indentation] +pycodestyle.register_check(continued_indentation) + + +class FixPEP8(object): + + """Fix invalid code. + + Fixer methods are prefixed "fix_". The _fix_source() method looks for these + automatically. + + The fixer method can take either one or two arguments (in addition to + self). The first argument is "result", which is the error information from + pycodestyle. The second argument, "logical", is required only for + logical-line fixes. + + The fixer method can return the list of modified lines or None. An empty + list would mean that no changes were made. None would mean that only the + line reported in the pycodestyle error was modified. Note that the modified + line numbers that are returned are indexed at 1. This typically would + correspond with the line number reported in the pycodestyle error + information. + + [fixed method list] + - e111,e114,e115,e116 + - e121,e122,e123,e124,e125,e126,e127,e128,e129 + - e201,e202,e203 + - e211 + - e221,e222,e223,e224,e225 + - e231 + - e251 + - e261,e262 + - e271,e272,e273,e274 + - e301,e302,e303,e304,e306 + - e401 + - e502 + - e701,e702,e703,e704 + - e711,e712,e713,e714 + - e722 + - e731 + - w291 + - w503 + + """ + + def __init__(self, filename, + options, + contents=None, + long_line_ignore_cache=None): + self.filename = filename + if contents is None: + self.source = readlines_from_file(filename) + else: + sio = io.StringIO(contents) + self.source = sio.readlines() + self.options = options + self.indent_word = _get_indentword(''.join(self.source)) + + self.long_line_ignore_cache = ( + set() if long_line_ignore_cache is None + else long_line_ignore_cache) + + # Many fixers are the same even though pycodestyle categorizes them + # differently. + self.fix_e115 = self.fix_e112 + self.fix_e116 = self.fix_e113 + self.fix_e121 = self._fix_reindent + self.fix_e122 = self._fix_reindent + self.fix_e123 = self._fix_reindent + self.fix_e124 = self._fix_reindent + self.fix_e126 = self._fix_reindent + self.fix_e127 = self._fix_reindent + self.fix_e128 = self._fix_reindent + self.fix_e129 = self._fix_reindent + self.fix_e202 = self.fix_e201 + self.fix_e203 = self.fix_e201 + self.fix_e211 = self.fix_e201 + self.fix_e221 = self.fix_e271 + self.fix_e222 = self.fix_e271 + self.fix_e223 = self.fix_e271 + self.fix_e226 = self.fix_e225 + self.fix_e227 = self.fix_e225 + self.fix_e228 = self.fix_e225 + self.fix_e241 = self.fix_e271 + self.fix_e242 = self.fix_e224 + self.fix_e261 = self.fix_e262 + self.fix_e272 = self.fix_e271 + self.fix_e273 = self.fix_e271 + self.fix_e274 = self.fix_e271 + self.fix_e306 = self.fix_e301 + self.fix_e501 = ( + self.fix_long_line_logically if + options and (options.aggressive >= 2 or options.experimental) else + self.fix_long_line_physically) + self.fix_e703 = self.fix_e702 + self.fix_w293 = self.fix_w291 + + def _fix_source(self, results): + try: + (logical_start, logical_end) = _find_logical(self.source) + logical_support = True + except (SyntaxError, tokenize.TokenError): # pragma: no cover + logical_support = False + + completed_lines = set() + for result in sorted(results, key=_priority_key): + if result['line'] in completed_lines: + continue + + fixed_methodname = 'fix_' + result['id'].lower() + if hasattr(self, fixed_methodname): + fix = getattr(self, fixed_methodname) + + line_index = result['line'] - 1 + original_line = self.source[line_index] + + is_logical_fix = len(_get_parameters(fix)) > 2 + if is_logical_fix: + logical = None + if logical_support: + logical = _get_logical(self.source, + result, + logical_start, + logical_end) + if logical and set(range( + logical[0][0] + 1, + logical[1][0] + 1)).intersection( + completed_lines): + continue + + modified_lines = fix(result, logical) + else: + modified_lines = fix(result) + + if modified_lines is None: + # Force logical fixes to report what they modified. + assert not is_logical_fix + + if self.source[line_index] == original_line: + modified_lines = [] + + if modified_lines: + completed_lines.update(modified_lines) + elif modified_lines == []: # Empty list means no fix + if self.options.verbose >= 2: + print( + '---> Not fixing {error} on line {line}'.format( + error=result['id'], line=result['line']), + file=sys.stderr) + else: # We assume one-line fix when None. + completed_lines.add(result['line']) + else: + if self.options.verbose >= 3: + print( + "---> '{0}' is not defined.".format(fixed_methodname), + file=sys.stderr) + + info = result['info'].strip() + print('---> {0}:{1}:{2}:{3}'.format(self.filename, + result['line'], + result['column'], + info), + file=sys.stderr) + + def fix(self): + """Return a version of the source code with PEP 8 violations fixed.""" + pep8_options = { + 'ignore': self.options.ignore, + 'select': self.options.select, + 'max_line_length': self.options.max_line_length, + } + results = _execute_pep8(pep8_options, self.source) + + if self.options.verbose: + progress = {} + for r in results: + if r['id'] not in progress: + progress[r['id']] = set() + progress[r['id']].add(r['line']) + print('---> {n} issue(s) to fix {progress}'.format( + n=len(results), progress=progress), file=sys.stderr) + + if self.options.line_range: + start, end = self.options.line_range + results = [r for r in results + if start <= r['line'] <= end] + + self._fix_source(filter_results(source=''.join(self.source), + results=results, + aggressive=self.options.aggressive)) + + if self.options.line_range: + # If number of lines has changed then change line_range. + count = sum(sline.count('\n') + for sline in self.source[start - 1:end]) + self.options.line_range[1] = start + count - 1 + + return ''.join(self.source) + + def _fix_reindent(self, result): + """Fix a badly indented line. + + This is done by adding or removing from its initial indent only. + + """ + num_indent_spaces = int(result['info'].split()[1]) + line_index = result['line'] - 1 + target = self.source[line_index] + + self.source[line_index] = ' ' * num_indent_spaces + target.lstrip() + + def fix_e112(self, result): + """Fix under-indented comments.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + if not target.lstrip().startswith('#'): + # Don't screw with invalid syntax. + return [] + + self.source[line_index] = self.indent_word + target + + def fix_e113(self, result): + """Fix over-indented comments.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + indent = _get_indentation(target) + stripped = target.lstrip() + + if not stripped.startswith('#'): + # Don't screw with invalid syntax. + return [] + + self.source[line_index] = indent[1:] + stripped + + def fix_e125(self, result): + """Fix indentation undistinguish from the next logical line.""" + num_indent_spaces = int(result['info'].split()[1]) + line_index = result['line'] - 1 + target = self.source[line_index] + + spaces_to_add = num_indent_spaces - len(_get_indentation(target)) + indent = len(_get_indentation(target)) + modified_lines = [] + + while len(_get_indentation(self.source[line_index])) >= indent: + self.source[line_index] = (' ' * spaces_to_add + + self.source[line_index]) + modified_lines.append(1 + line_index) # Line indexed at 1. + line_index -= 1 + + return modified_lines + + def fix_e131(self, result): + """Fix indentation undistinguish from the next logical line.""" + num_indent_spaces = int(result['info'].split()[1]) + line_index = result['line'] - 1 + target = self.source[line_index] + + spaces_to_add = num_indent_spaces - len(_get_indentation(target)) + + if spaces_to_add >= 0: + self.source[line_index] = (' ' * spaces_to_add + + self.source[line_index]) + else: + offset = abs(spaces_to_add) + self.source[line_index] = self.source[line_index][offset:] + + def fix_e201(self, result): + """Remove extraneous whitespace.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] - 1 + + fixed = fix_whitespace(target, + offset=offset, + replacement='') + + self.source[line_index] = fixed + + def fix_e224(self, result): + """Remove extraneous whitespace around operator.""" + target = self.source[result['line'] - 1] + offset = result['column'] - 1 + fixed = target[:offset] + target[offset:].replace('\t', ' ') + self.source[result['line'] - 1] = fixed + + def fix_e225(self, result): + """Fix missing whitespace around operator.""" + target = self.source[result['line'] - 1] + offset = result['column'] - 1 + fixed = target[:offset] + ' ' + target[offset:] + + # Only proceed if non-whitespace characters match. + # And make sure we don't break the indentation. + if ( + fixed.replace(' ', '') == target.replace(' ', '') and + _get_indentation(fixed) == _get_indentation(target) + ): + self.source[result['line'] - 1] = fixed + error_code = result.get('id', 0) + try: + ts = generate_tokens(fixed) + except tokenize.TokenError: + return + if not check_syntax(fixed.lstrip()): + return + errors = list( + pycodestyle.missing_whitespace_around_operator(fixed, ts)) + for e in reversed(errors): + if error_code != e[1].split()[0]: + continue + offset = e[0][1] + fixed = fixed[:offset] + ' ' + fixed[offset:] + self.source[result['line'] - 1] = fixed + else: + return [] + + def fix_e231(self, result): + """Add missing whitespace.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] + fixed = target[:offset].rstrip() + ' ' + target[offset:].lstrip() + self.source[line_index] = fixed + + def fix_e251(self, result): + """Remove whitespace around parameter '=' sign.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + # This is necessary since pycodestyle sometimes reports columns that + # goes past the end of the physical line. This happens in cases like, + # foo(bar\n=None) + c = min(result['column'] - 1, + len(target) - 1) + + if target[c].strip(): + fixed = target + else: + fixed = target[:c].rstrip() + target[c:].lstrip() + + # There could be an escaped newline + # + # def foo(a=\ + # 1) + if fixed.endswith(('=\\\n', '=\\\r\n', '=\\\r')): + self.source[line_index] = fixed.rstrip('\n\r \t\\') + self.source[line_index + 1] = self.source[line_index + 1].lstrip() + return [line_index + 1, line_index + 2] # Line indexed at 1 + + self.source[result['line'] - 1] = fixed + + def fix_e262(self, result): + """Fix spacing after comment hash.""" + target = self.source[result['line'] - 1] + offset = result['column'] + + code = target[:offset].rstrip(' \t#') + comment = target[offset:].lstrip(' \t#') + + fixed = code + (' # ' + comment if comment.strip() else '\n') + + self.source[result['line'] - 1] = fixed + + def fix_e271(self, result): + """Fix extraneous whitespace around keywords.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] - 1 + + fixed = fix_whitespace(target, + offset=offset, + replacement=' ') + + if fixed == target: + return [] + else: + self.source[line_index] = fixed + + def fix_e301(self, result): + """Add missing blank line.""" + cr = '\n' + self.source[result['line'] - 1] = cr + self.source[result['line'] - 1] + + def fix_e302(self, result): + """Add missing 2 blank lines.""" + add_linenum = 2 - int(result['info'].split()[-1]) + cr = '\n' * add_linenum + self.source[result['line'] - 1] = cr + self.source[result['line'] - 1] + + def fix_e303(self, result): + """Remove extra blank lines.""" + delete_linenum = int(result['info'].split('(')[1].split(')')[0]) - 2 + delete_linenum = max(1, delete_linenum) + + # We need to count because pycodestyle reports an offset line number if + # there are comments. + cnt = 0 + line = result['line'] - 2 + modified_lines = [] + while cnt < delete_linenum and line >= 0: + if not self.source[line].strip(): + self.source[line] = '' + modified_lines.append(1 + line) # Line indexed at 1 + cnt += 1 + line -= 1 + + return modified_lines + + def fix_e304(self, result): + """Remove blank line following function decorator.""" + line = result['line'] - 2 + if not self.source[line].strip(): + self.source[line] = '' + + def fix_e305(self, result): + """Add missing 2 blank lines after end of function or class.""" + cr = '\n' + # check comment line + offset = result['line'] - 2 + while True: + if offset < 0: + break + line = self.source[offset].lstrip() + if not line: + break + if line[0] != '#': + break + offset -= 1 + offset += 1 + self.source[offset] = cr + self.source[offset] + + def fix_e401(self, result): + """Put imports on separate lines.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] - 1 + + if not target.lstrip().startswith('import'): + return [] + + indentation = re.split(pattern=r'\bimport\b', + string=target, maxsplit=1)[0] + fixed = (target[:offset].rstrip('\t ,') + '\n' + + indentation + 'import ' + target[offset:].lstrip('\t ,')) + self.source[line_index] = fixed + + def fix_long_line_logically(self, result, logical): + """Try to make lines fit within --max-line-length characters.""" + if ( + not logical or + len(logical[2]) == 1 or + self.source[result['line'] - 1].lstrip().startswith('#') + ): + return self.fix_long_line_physically(result) + + start_line_index = logical[0][0] + end_line_index = logical[1][0] + logical_lines = logical[2] + + previous_line = get_item(self.source, start_line_index - 1, default='') + next_line = get_item(self.source, end_line_index + 1, default='') + + single_line = join_logical_line(''.join(logical_lines)) + + try: + fixed = self.fix_long_line( + target=single_line, + previous_line=previous_line, + next_line=next_line, + original=''.join(logical_lines)) + except (SyntaxError, tokenize.TokenError): + return self.fix_long_line_physically(result) + + if fixed: + for line_index in range(start_line_index, end_line_index + 1): + self.source[line_index] = '' + self.source[start_line_index] = fixed + return range(start_line_index + 1, end_line_index + 1) + + return [] + + def fix_long_line_physically(self, result): + """Try to make lines fit within --max-line-length characters.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + previous_line = get_item(self.source, line_index - 1, default='') + next_line = get_item(self.source, line_index + 1, default='') + + try: + fixed = self.fix_long_line( + target=target, + previous_line=previous_line, + next_line=next_line, + original=target) + except (SyntaxError, tokenize.TokenError): + return [] + + if fixed: + self.source[line_index] = fixed + return [line_index + 1] + + return [] + + def fix_long_line(self, target, previous_line, + next_line, original): + cache_entry = (target, previous_line, next_line) + if cache_entry in self.long_line_ignore_cache: + return [] + + if target.lstrip().startswith('#'): + if self.options.aggressive: + # Wrap commented lines. + return shorten_comment( + line=target, + max_line_length=self.options.max_line_length, + last_comment=not next_line.lstrip().startswith('#')) + else: + return [] + + fixed = get_fixed_long_line( + target=target, + previous_line=previous_line, + original=original, + indent_word=self.indent_word, + max_line_length=self.options.max_line_length, + aggressive=self.options.aggressive, + experimental=self.options.experimental, + verbose=self.options.verbose) + + if fixed and not code_almost_equal(original, fixed): + return fixed + + self.long_line_ignore_cache.add(cache_entry) + return None + + def fix_e502(self, result): + """Remove extraneous escape of newline.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + self.source[line_index] = target.rstrip('\n\r \t\\') + '\n' + + def fix_e701(self, result): + """Put colon-separated compound statement on separate lines.""" + line_index = result['line'] - 1 + target = self.source[line_index] + c = result['column'] + + fixed_source = (target[:c] + '\n' + + _get_indentation(target) + self.indent_word + + target[c:].lstrip('\n\r \t\\')) + self.source[result['line'] - 1] = fixed_source + return [result['line'], result['line'] + 1] + + def fix_e702(self, result, logical): + """Put semicolon-separated compound statement on separate lines.""" + if not logical: + return [] # pragma: no cover + logical_lines = logical[2] + + # Avoid applying this when indented. + # https://docs.python.org/reference/compound_stmts.html + for line in logical_lines: + if ':' in line: + return [] + + line_index = result['line'] - 1 + target = self.source[line_index] + + if target.rstrip().endswith('\\'): + # Normalize '1; \\\n2' into '1; 2'. + self.source[line_index] = target.rstrip('\n \r\t\\') + self.source[line_index + 1] = self.source[line_index + 1].lstrip() + return [line_index + 1, line_index + 2] + + if target.rstrip().endswith(';'): + self.source[line_index] = target.rstrip('\n \r\t;') + '\n' + return [line_index + 1] + + offset = result['column'] - 1 + first = target[:offset].rstrip(';').rstrip() + second = (_get_indentation(logical_lines[0]) + + target[offset:].lstrip(';').lstrip()) + + # Find inline comment. + inline_comment = None + if target[offset:].lstrip(';').lstrip()[:2] == '# ': + inline_comment = target[offset:].lstrip(';') + + if inline_comment: + self.source[line_index] = first + inline_comment + else: + self.source[line_index] = first + '\n' + second + return [line_index + 1] + + def fix_e704(self, result): + """Fix multiple statements on one line def""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + match = STARTSWITH_DEF_REGEX.match(target) + if match: + self.source[line_index] = '{0}\n{1}{2}'.format( + match.group(0), + _get_indentation(target) + self.indent_word, + target[match.end(0):].lstrip()) + + def fix_e711(self, result): + """Fix comparison with None.""" + (line_index, offset, target) = get_index_offset_contents(result, + self.source) + + right_offset = offset + 2 + if right_offset >= len(target): + return [] + + left = target[:offset].rstrip() + center = target[offset:right_offset] + right = target[right_offset:].lstrip() + + if not right.startswith('None'): + return [] + + if center.strip() == '==': + new_center = 'is' + elif center.strip() == '!=': + new_center = 'is not' + else: + return [] + + self.source[line_index] = ' '.join([left, new_center, right]) + + def fix_e712(self, result): + """Fix (trivial case of) comparison with boolean.""" + (line_index, offset, target) = get_index_offset_contents(result, + self.source) + + # Handle very easy "not" special cases. + if re.match(r'^\s*if [\w.]+ == False:$', target): + self.source[line_index] = re.sub(r'if ([\w.]+) == False:', + r'if not \1:', target, count=1) + elif re.match(r'^\s*if [\w.]+ != True:$', target): + self.source[line_index] = re.sub(r'if ([\w.]+) != True:', + r'if not \1:', target, count=1) + else: + right_offset = offset + 2 + if right_offset >= len(target): + return [] + + left = target[:offset].rstrip() + center = target[offset:right_offset] + right = target[right_offset:].lstrip() + + # Handle simple cases only. + new_right = None + if center.strip() == '==': + if re.match(r'\bTrue\b', right): + new_right = re.sub(r'\bTrue\b *', '', right, count=1) + elif center.strip() == '!=': + if re.match(r'\bFalse\b', right): + new_right = re.sub(r'\bFalse\b *', '', right, count=1) + + if new_right is None: + return [] + + if new_right[0].isalnum(): + new_right = ' ' + new_right + + self.source[line_index] = left + new_right + + def fix_e713(self, result): + """Fix (trivial case of) non-membership check.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + + match = COMPARE_NEGATIVE_REGEX.search(target) + if match: + if match.group(3) == 'in': + pos_start = match.start(1) + self.source[line_index] = '{0}{1} {2} {3} {4}'.format( + target[:pos_start], match.group(2), match.group(1), + match.group(3), target[match.end():]) + + def fix_e714(self, result): + """Fix object identity should be 'is not' case.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + + match = COMPARE_NEGATIVE_REGEX.search(target) + if match: + if match.group(3) == 'is': + pos_start = match.start(1) + self.source[line_index] = '{0}{1} {2} {3} {4}'.format( + target[:pos_start], match.group(2), match.group(3), + match.group(1), target[match.end():]) + + def fix_e722(self, result): + """fix bare except""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + match = BARE_EXCEPT_REGEX.search(target) + if match: + self.source[line_index] = '{0}{1}{2}'.format( + target[:result['column'] - 1], "except BaseException:", + target[match.end():]) + + def fix_e731(self, result): + """Fix do not assign a lambda expression check.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + match = LAMBDA_REGEX.search(target) + if match: + end = match.end() + self.source[line_index] = '{0}def {1}({2}): return {3}'.format( + target[:match.start(0)], match.group(1), match.group(2), + target[end:].lstrip()) + + def fix_w291(self, result): + """Remove trailing whitespace.""" + fixed_line = self.source[result['line'] - 1].rstrip() + self.source[result['line'] - 1] = fixed_line + '\n' + + def fix_w391(self, _): + """Remove trailing blank lines.""" + blank_count = 0 + for line in reversed(self.source): + line = line.rstrip() + if line: + break + else: + blank_count += 1 + + original_length = len(self.source) + self.source = self.source[:original_length - blank_count] + return range(1, 1 + original_length) + + def fix_w503(self, result): + (line_index, _, target) = get_index_offset_contents(result, + self.source) + one_string_token = target.split()[0] + try: + ts = generate_tokens(one_string_token) + except tokenize.TokenError: + return + if not _is_binary_operator(ts[0][0], one_string_token): + return + # find comment + comment_index = None + for i in range(5): + # NOTE: try to parse code in 5 times + if (line_index - i) < 0: + break + from_index = line_index - i - 1 + to_index = line_index + 1 + try: + ts = generate_tokens("".join(self.source[from_index:to_index])) + except Exception: + continue + newline_count = 0 + newline_index = [] + for i, t in enumerate(ts): + if t[0] in (tokenize.NEWLINE, tokenize.NL): + newline_index.append(i) + newline_count += 1 + if newline_count > 2: + tts = ts[newline_index[-3]:] + else: + tts = ts + old = None + for t in tts: + if tokenize.COMMENT == t[0]: + if old is None: + comment_index = 0 + else: + comment_index = old[3][1] + break + old = t + break + i = target.index(one_string_token) + self.source[line_index] = '{0}{1}'.format( + target[:i], target[i + len(one_string_token):]) + nl = find_newline(self.source[line_index - 1:line_index]) + before_line = self.source[line_index - 1] + bl = before_line.index(nl) + if comment_index: + self.source[line_index - 1] = '{0} {1} {2}'.format( + before_line[:comment_index], one_string_token, + before_line[comment_index + 1:]) + else: + self.source[line_index - 1] = '{0} {1}{2}'.format( + before_line[:bl], one_string_token, before_line[bl:]) + + +def get_index_offset_contents(result, source): + """Return (line_index, column_offset, line_contents).""" + line_index = result['line'] - 1 + return (line_index, + result['column'] - 1, + source[line_index]) + + +def get_fixed_long_line(target, previous_line, original, + indent_word=' ', max_line_length=79, + aggressive=False, experimental=False, verbose=False): + """Break up long line and return result. + + Do this by generating multiple reformatted candidates and then + ranking the candidates to heuristically select the best option. + + """ + indent = _get_indentation(target) + source = target[len(indent):] + assert source.lstrip() == source + assert not target.lstrip().startswith('#') + + # Check for partial multiline. + tokens = list(generate_tokens(source)) + + candidates = shorten_line( + tokens, source, indent, + indent_word, + max_line_length, + aggressive=aggressive, + experimental=experimental, + previous_line=previous_line) + + # Also sort alphabetically as a tie breaker (for determinism). + candidates = sorted( + sorted(set(candidates).union([target, original])), + key=lambda x: line_shortening_rank( + x, + indent_word, + max_line_length, + experimental=experimental)) + + if verbose >= 4: + print(('-' * 79 + '\n').join([''] + candidates + ['']), + file=wrap_output(sys.stderr, 'utf-8')) + + if candidates: + best_candidate = candidates[0] + + # Don't allow things to get longer. + if longest_line_length(best_candidate) > longest_line_length(original): + return None + + return best_candidate + + +def longest_line_length(code): + """Return length of longest line.""" + return max(len(line) for line in code.splitlines()) + + +def join_logical_line(logical_line): + """Return single line based on logical line input.""" + indentation = _get_indentation(logical_line) + + return indentation + untokenize_without_newlines( + generate_tokens(logical_line.lstrip())) + '\n' + + +def untokenize_without_newlines(tokens): + """Return source code based on tokens.""" + text = '' + last_row = 0 + last_column = -1 + + for t in tokens: + token_string = t[1] + (start_row, start_column) = t[2] + (end_row, end_column) = t[3] + + if start_row > last_row: + last_column = 0 + if ( + (start_column > last_column or token_string == '\n') and + not text.endswith(' ') + ): + text += ' ' + + if token_string != '\n': + text += token_string + + last_row = end_row + last_column = end_column + + return text.rstrip() + + +def _find_logical(source_lines): + # Make a variable which is the index of all the starts of lines. + logical_start = [] + logical_end = [] + last_newline = True + parens = 0 + for t in generate_tokens(''.join(source_lines)): + if t[0] in [tokenize.COMMENT, tokenize.DEDENT, + tokenize.INDENT, tokenize.NL, + tokenize.ENDMARKER]: + continue + if not parens and t[0] in [tokenize.NEWLINE, tokenize.SEMI]: + last_newline = True + logical_end.append((t[3][0] - 1, t[2][1])) + continue + if last_newline and not parens: + logical_start.append((t[2][0] - 1, t[2][1])) + last_newline = False + if t[0] == tokenize.OP: + if t[1] in '([{': + parens += 1 + elif t[1] in '}])': + parens -= 1 + return (logical_start, logical_end) + + +def _get_logical(source_lines, result, logical_start, logical_end): + """Return the logical line corresponding to the result. + + Assumes input is already E702-clean. + + """ + row = result['line'] - 1 + col = result['column'] - 1 + ls = None + le = None + for i in range(0, len(logical_start), 1): + assert logical_end + x = logical_end[i] + if x[0] > row or (x[0] == row and x[1] > col): + le = x + ls = logical_start[i] + break + if ls is None: + return None + original = source_lines[ls[0]:le[0] + 1] + return ls, le, original + + +def get_item(items, index, default=None): + if 0 <= index < len(items): + return items[index] + + return default + + +def reindent(source, indent_size): + """Reindent all lines.""" + reindenter = Reindenter(source) + return reindenter.run(indent_size) + + +def code_almost_equal(a, b): + """Return True if code is similar. + + Ignore whitespace when comparing specific line. + + """ + split_a = split_and_strip_non_empty_lines(a) + split_b = split_and_strip_non_empty_lines(b) + + if len(split_a) != len(split_b): + return False + + for (index, _) in enumerate(split_a): + if ''.join(split_a[index].split()) != ''.join(split_b[index].split()): + return False + + return True + + +def split_and_strip_non_empty_lines(text): + """Return lines split by newline. + + Ignore empty lines. + + """ + return [line.strip() for line in text.splitlines() if line.strip()] + + +def fix_e265(source, aggressive=False): # pylint: disable=unused-argument + """Format block comments.""" + if '#' not in source: + # Optimization. + return source + + ignored_line_numbers = multiline_string_lines( + source, + include_docstrings=True) | set(commented_out_code_lines(source)) + + fixed_lines = [] + sio = io.StringIO(source) + for (line_number, line) in enumerate(sio.readlines(), start=1): + if ( + line.lstrip().startswith('#') and + line_number not in ignored_line_numbers and + not pycodestyle.noqa(line) + ): + indentation = _get_indentation(line) + line = line.lstrip() + + # Normalize beginning if not a shebang. + if len(line) > 1: + pos = next((index for index, c in enumerate(line) + if c != '#')) + if ( + # Leave multiple spaces like '# ' alone. + (line[:pos].count('#') > 1 or line[1].isalnum()) and + # Leave stylistic outlined blocks alone. + not line.rstrip().endswith('#') + ): + line = '# ' + line.lstrip('# \t') + + fixed_lines.append(indentation + line) + else: + fixed_lines.append(line) + + return ''.join(fixed_lines) + + +def refactor(source, fixer_names, ignore=None, filename=''): + """Return refactored code using lib2to3. + + Skip if ignore string is produced in the refactored code. + + """ + from lib2to3 import pgen2 + try: + new_text = refactor_with_2to3(source, + fixer_names=fixer_names, + filename=filename) + except (pgen2.parse.ParseError, + SyntaxError, + UnicodeDecodeError, + UnicodeEncodeError): + return source + + if ignore: + if ignore in new_text and ignore not in source: + return source + + return new_text + + +def code_to_2to3(select, ignore): + fixes = set() + for code, fix in CODE_TO_2TO3.items(): + if code_match(code, select=select, ignore=ignore): + fixes |= set(fix) + return fixes + + +def fix_2to3(source, + aggressive=True, select=None, ignore=None, filename=''): + """Fix various deprecated code (via lib2to3).""" + if not aggressive: + return source + + select = select or [] + ignore = ignore or [] + + return refactor(source, + code_to_2to3(select=select, + ignore=ignore), + filename=filename) + + +def fix_w602(source, aggressive=True): + """Fix deprecated form of raising exception.""" + if not aggressive: + return source + + return refactor(source, ['raise'], + ignore='with_traceback') + + +def find_newline(source): + """Return type of newline used in source. + + Input is a list of lines. + + """ + assert not isinstance(source, unicode) + + counter = collections.defaultdict(int) + for line in source: + if line.endswith(CRLF): + counter[CRLF] += 1 + elif line.endswith(CR): + counter[CR] += 1 + elif line.endswith(LF): + counter[LF] += 1 + + return (sorted(counter, key=counter.get, reverse=True) or [LF])[0] + + +def _get_indentword(source): + """Return indentation type.""" + indent_word = ' ' # Default in case source has no indentation + try: + for t in generate_tokens(source): + if t[0] == token.INDENT: + indent_word = t[1] + break + except (SyntaxError, tokenize.TokenError): + pass + return indent_word + + +def _get_indentation(line): + """Return leading whitespace.""" + if line.strip(): + non_whitespace_index = len(line) - len(line.lstrip()) + return line[:non_whitespace_index] + + return '' + + +def get_diff_text(old, new, filename): + """Return text of unified diff between old and new.""" + newline = '\n' + diff = difflib.unified_diff( + old, new, + 'original/' + filename, + 'fixed/' + filename, + lineterm=newline) + + text = '' + for line in diff: + text += line + + # Work around missing newline (http://bugs.python.org/issue2142). + if text and not line.endswith(newline): + text += newline + r'\ No newline at end of file' + newline + + return text + + +def _priority_key(pep8_result): + """Key for sorting PEP8 results. + + Global fixes should be done first. This is important for things like + indentation. + + """ + priority = [ + # Fix multiline colon-based before semicolon based. + 'e701', + # Break multiline statements early. + 'e702', + # Things that make lines longer. + 'e225', 'e231', + # Remove extraneous whitespace before breaking lines. + 'e201', + # Shorten whitespace in comment before resorting to wrapping. + 'e262' + ] + middle_index = 10000 + lowest_priority = [ + # We need to shorten lines last since the logical fixer can get in a + # loop, which causes us to exit early. + 'e501', + 'w503' + ] + key = pep8_result['id'].lower() + try: + return priority.index(key) + except ValueError: + try: + return middle_index + lowest_priority.index(key) + 1 + except ValueError: + return middle_index + + +def shorten_line(tokens, source, indentation, indent_word, max_line_length, + aggressive=False, experimental=False, previous_line=''): + """Separate line at OPERATOR. + + Multiple candidates will be yielded. + + """ + for candidate in _shorten_line(tokens=tokens, + source=source, + indentation=indentation, + indent_word=indent_word, + aggressive=aggressive, + previous_line=previous_line): + yield candidate + + if aggressive: + for key_token_strings in SHORTEN_OPERATOR_GROUPS: + shortened = _shorten_line_at_tokens( + tokens=tokens, + source=source, + indentation=indentation, + indent_word=indent_word, + key_token_strings=key_token_strings, + aggressive=aggressive) + + if shortened is not None and shortened != source: + yield shortened + + if experimental: + for shortened in _shorten_line_at_tokens_new( + tokens=tokens, + source=source, + indentation=indentation, + max_line_length=max_line_length): + + yield shortened + + +def _shorten_line(tokens, source, indentation, indent_word, + aggressive=False, previous_line=''): + """Separate line at OPERATOR. + + The input is expected to be free of newlines except for inside multiline + strings and at the end. + + Multiple candidates will be yielded. + + """ + for (token_type, + token_string, + start_offset, + end_offset) in token_offsets(tokens): + + if ( + token_type == tokenize.COMMENT and + not is_probably_part_of_multiline(previous_line) and + not is_probably_part_of_multiline(source) and + not source[start_offset + 1:].strip().lower().startswith( + ('noqa', 'pragma:', 'pylint:')) + ): + # Move inline comments to previous line. + first = source[:start_offset] + second = source[start_offset:] + yield (indentation + second.strip() + '\n' + + indentation + first.strip() + '\n') + elif token_type == token.OP and token_string != '=': + # Don't break on '=' after keyword as this violates PEP 8. + + assert token_type != token.INDENT + + first = source[:end_offset] + + second_indent = indentation + if first.rstrip().endswith('('): + second_indent += indent_word + elif '(' in first: + second_indent += ' ' * (1 + first.find('(')) + else: + second_indent += indent_word + + second = (second_indent + source[end_offset:].lstrip()) + if ( + not second.strip() or + second.lstrip().startswith('#') + ): + continue + + # Do not begin a line with a comma + if second.lstrip().startswith(','): + continue + # Do end a line with a dot + if first.rstrip().endswith('.'): + continue + if token_string in '+-*/': + fixed = first + ' \\' + '\n' + second + else: + fixed = first + '\n' + second + + # Only fix if syntax is okay. + if check_syntax(normalize_multiline(fixed) + if aggressive else fixed): + yield indentation + fixed + + +def _is_binary_operator(token_type, text): + return ((token_type == tokenize.OP or text in ['and', 'or']) and + text not in '()[]{},:.;@=%~') + + +# A convenient way to handle tokens. +Token = collections.namedtuple('Token', ['token_type', 'token_string', + 'spos', 'epos', 'line']) + + +class ReformattedLines(object): + + """The reflowed lines of atoms. + + Each part of the line is represented as an "atom." They can be moved + around when need be to get the optimal formatting. + + """ + + ########################################################################### + # Private Classes + + class _Indent(object): + + """Represent an indentation in the atom stream.""" + + def __init__(self, indent_amt): + self._indent_amt = indent_amt + + def emit(self): + return ' ' * self._indent_amt + + @property + def size(self): + return self._indent_amt + + class _Space(object): + + """Represent a space in the atom stream.""" + + def emit(self): + return ' ' + + @property + def size(self): + return 1 + + class _LineBreak(object): + + """Represent a line break in the atom stream.""" + + def emit(self): + return '\n' + + @property + def size(self): + return 0 + + def __init__(self, max_line_length): + self._max_line_length = max_line_length + self._lines = [] + self._bracket_depth = 0 + self._prev_item = None + self._prev_prev_item = None + + def __repr__(self): + return self.emit() + + ########################################################################### + # Public Methods + + def add(self, obj, indent_amt, break_after_open_bracket): + if isinstance(obj, Atom): + self._add_item(obj, indent_amt) + return + + self._add_container(obj, indent_amt, break_after_open_bracket) + + def add_comment(self, item): + num_spaces = 2 + if len(self._lines) > 1: + if isinstance(self._lines[-1], self._Space): + num_spaces -= 1 + if len(self._lines) > 2: + if isinstance(self._lines[-2], self._Space): + num_spaces -= 1 + + while num_spaces > 0: + self._lines.append(self._Space()) + num_spaces -= 1 + self._lines.append(item) + + def add_indent(self, indent_amt): + self._lines.append(self._Indent(indent_amt)) + + def add_line_break(self, indent): + self._lines.append(self._LineBreak()) + self.add_indent(len(indent)) + + def add_line_break_at(self, index, indent_amt): + self._lines.insert(index, self._LineBreak()) + self._lines.insert(index + 1, self._Indent(indent_amt)) + + def add_space_if_needed(self, curr_text, equal=False): + if ( + not self._lines or isinstance( + self._lines[-1], (self._LineBreak, self._Indent, self._Space)) + ): + return + + prev_text = unicode(self._prev_item) + prev_prev_text = ( + unicode(self._prev_prev_item) if self._prev_prev_item else '') + + if ( + # The previous item was a keyword or identifier and the current + # item isn't an operator that doesn't require a space. + ((self._prev_item.is_keyword or self._prev_item.is_string or + self._prev_item.is_name or self._prev_item.is_number) and + (curr_text[0] not in '([{.,:}])' or + (curr_text[0] == '=' and equal))) or + + # Don't place spaces around a '.', unless it's in an 'import' + # statement. + ((prev_prev_text != 'from' and prev_text[-1] != '.' and + curr_text != 'import') and + + # Don't place a space before a colon. + curr_text[0] != ':' and + + # Don't split up ending brackets by spaces. + ((prev_text[-1] in '}])' and curr_text[0] not in '.,}])') or + + # Put a space after a colon or comma. + prev_text[-1] in ':,' or + + # Put space around '=' if asked to. + (equal and prev_text == '=') or + + # Put spaces around non-unary arithmetic operators. + ((self._prev_prev_item and + (prev_text not in '+-' and + (self._prev_prev_item.is_name or + self._prev_prev_item.is_number or + self._prev_prev_item.is_string)) and + prev_text in ('+', '-', '%', '*', '/', '//', '**', 'in'))))) + ): + self._lines.append(self._Space()) + + def previous_item(self): + """Return the previous non-whitespace item.""" + return self._prev_item + + def fits_on_current_line(self, item_extent): + return self.current_size() + item_extent <= self._max_line_length + + def current_size(self): + """The size of the current line minus the indentation.""" + size = 0 + for item in reversed(self._lines): + size += item.size + if isinstance(item, self._LineBreak): + break + + return size + + def line_empty(self): + return (self._lines and + isinstance(self._lines[-1], + (self._LineBreak, self._Indent))) + + def emit(self): + string = '' + for item in self._lines: + if isinstance(item, self._LineBreak): + string = string.rstrip() + string += item.emit() + + return string.rstrip() + '\n' + + ########################################################################### + # Private Methods + + def _add_item(self, item, indent_amt): + """Add an item to the line. + + Reflow the line to get the best formatting after the item is + inserted. The bracket depth indicates if the item is being + inserted inside of a container or not. + + """ + if self._prev_item and self._prev_item.is_string and item.is_string: + # Place consecutive string literals on separate lines. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + + item_text = unicode(item) + if self._lines and self._bracket_depth: + # Adding the item into a container. + self._prevent_default_initializer_splitting(item, indent_amt) + + if item_text in '.,)]}': + self._split_after_delimiter(item, indent_amt) + + elif self._lines and not self.line_empty(): + # Adding the item outside of a container. + if self.fits_on_current_line(len(item_text)): + self._enforce_space(item) + + else: + # Line break for the new item. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + + self._lines.append(item) + self._prev_item, self._prev_prev_item = item, self._prev_item + + if item_text in '([{': + self._bracket_depth += 1 + + elif item_text in '}])': + self._bracket_depth -= 1 + assert self._bracket_depth >= 0 + + def _add_container(self, container, indent_amt, break_after_open_bracket): + actual_indent = indent_amt + 1 + + if ( + unicode(self._prev_item) != '=' and + not self.line_empty() and + not self.fits_on_current_line( + container.size + self._bracket_depth + 2) + ): + + if unicode(container)[0] == '(' and self._prev_item.is_name: + # Don't split before the opening bracket of a call. + break_after_open_bracket = True + actual_indent = indent_amt + 4 + elif ( + break_after_open_bracket or + unicode(self._prev_item) not in '([{' + ): + # If the container doesn't fit on the current line and the + # current line isn't empty, place the container on the next + # line. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + break_after_open_bracket = False + else: + actual_indent = self.current_size() + 1 + break_after_open_bracket = False + + if isinstance(container, (ListComprehension, IfExpression)): + actual_indent = indent_amt + + # Increase the continued indentation only if recursing on a + # container. + container.reflow(self, ' ' * actual_indent, + break_after_open_bracket=break_after_open_bracket) + + def _prevent_default_initializer_splitting(self, item, indent_amt): + """Prevent splitting between a default initializer. + + When there is a default initializer, it's best to keep it all on + the same line. It's nicer and more readable, even if it goes + over the maximum allowable line length. This goes back along the + current line to determine if we have a default initializer, and, + if so, to remove extraneous whitespaces and add a line + break/indent before it if needed. + + """ + if unicode(item) == '=': + # This is the assignment in the initializer. Just remove spaces for + # now. + self._delete_whitespace() + return + + if (not self._prev_item or not self._prev_prev_item or + unicode(self._prev_item) != '='): + return + + self._delete_whitespace() + prev_prev_index = self._lines.index(self._prev_prev_item) + + if ( + isinstance(self._lines[prev_prev_index - 1], self._Indent) or + self.fits_on_current_line(item.size + 1) + ): + # The default initializer is already the only item on this line. + # Don't insert a newline here. + return + + # Replace the space with a newline/indent combo. + if isinstance(self._lines[prev_prev_index - 1], self._Space): + del self._lines[prev_prev_index - 1] + + self.add_line_break_at(self._lines.index(self._prev_prev_item), + indent_amt) + + def _split_after_delimiter(self, item, indent_amt): + """Split the line only after a delimiter.""" + self._delete_whitespace() + + if self.fits_on_current_line(item.size): + return + + last_space = None + for current_item in reversed(self._lines): + if ( + last_space and + (not isinstance(current_item, Atom) or + not current_item.is_colon) + ): + break + else: + last_space = None + if isinstance(current_item, self._Space): + last_space = current_item + if isinstance(current_item, (self._LineBreak, self._Indent)): + return + + if not last_space: + return + + self.add_line_break_at(self._lines.index(last_space), indent_amt) + + def _enforce_space(self, item): + """Enforce a space in certain situations. + + There are cases where we will want a space where normally we + wouldn't put one. This just enforces the addition of a space. + + """ + if isinstance(self._lines[-1], + (self._Space, self._LineBreak, self._Indent)): + return + + if not self._prev_item: + return + + item_text = unicode(item) + prev_text = unicode(self._prev_item) + + # Prefer a space around a '.' in an import statement, and between the + # 'import' and '('. + if ( + (item_text == '.' and prev_text == 'from') or + (item_text == 'import' and prev_text == '.') or + (item_text == '(' and prev_text == 'import') + ): + self._lines.append(self._Space()) + + def _delete_whitespace(self): + """Delete all whitespace from the end of the line.""" + while isinstance(self._lines[-1], (self._Space, self._LineBreak, + self._Indent)): + del self._lines[-1] + + +class Atom(object): + + """The smallest unbreakable unit that can be reflowed.""" + + def __init__(self, atom): + self._atom = atom + + def __repr__(self): + return self._atom.token_string + + def __len__(self): + return self.size + + def reflow( + self, reflowed_lines, continued_indent, extent, + break_after_open_bracket=False, + is_list_comp_or_if_expr=False, + next_is_dot=False + ): + if self._atom.token_type == tokenize.COMMENT: + reflowed_lines.add_comment(self) + return + + total_size = extent if extent else self.size + + if self._atom.token_string not in ',:([{}])': + # Some atoms will need an extra 1-sized space token after them. + total_size += 1 + + prev_item = reflowed_lines.previous_item() + if ( + not is_list_comp_or_if_expr and + not reflowed_lines.fits_on_current_line(total_size) and + not (next_is_dot and + reflowed_lines.fits_on_current_line(self.size + 1)) and + not reflowed_lines.line_empty() and + not self.is_colon and + not (prev_item and prev_item.is_name and + unicode(self) == '(') + ): + # Start a new line if there is already something on the line and + # adding this atom would make it go over the max line length. + reflowed_lines.add_line_break(continued_indent) + else: + reflowed_lines.add_space_if_needed(unicode(self)) + + reflowed_lines.add(self, len(continued_indent), + break_after_open_bracket) + + def emit(self): + return self.__repr__() + + @property + def is_keyword(self): + return keyword.iskeyword(self._atom.token_string) + + @property + def is_string(self): + return self._atom.token_type == tokenize.STRING + + @property + def is_name(self): + return self._atom.token_type == tokenize.NAME + + @property + def is_number(self): + return self._atom.token_type == tokenize.NUMBER + + @property + def is_comma(self): + return self._atom.token_string == ',' + + @property + def is_colon(self): + return self._atom.token_string == ':' + + @property + def size(self): + return len(self._atom.token_string) + + +class Container(object): + + """Base class for all container types.""" + + def __init__(self, items): + self._items = items + + def __repr__(self): + string = '' + last_was_keyword = False + + for item in self._items: + if item.is_comma: + string += ', ' + elif item.is_colon: + string += ': ' + else: + item_string = unicode(item) + if ( + string and + (last_was_keyword or + (not string.endswith(tuple('([{,.:}]) ')) and + not item_string.startswith(tuple('([{,.:}])')))) + ): + string += ' ' + string += item_string + + last_was_keyword = item.is_keyword + return string + + def __iter__(self): + for element in self._items: + yield element + + def __getitem__(self, idx): + return self._items[idx] + + def reflow(self, reflowed_lines, continued_indent, + break_after_open_bracket=False): + last_was_container = False + for (index, item) in enumerate(self._items): + next_item = get_item(self._items, index + 1) + + if isinstance(item, Atom): + is_list_comp_or_if_expr = ( + isinstance(self, (ListComprehension, IfExpression))) + item.reflow(reflowed_lines, continued_indent, + self._get_extent(index), + is_list_comp_or_if_expr=is_list_comp_or_if_expr, + next_is_dot=(next_item and + unicode(next_item) == '.')) + if last_was_container and item.is_comma: + reflowed_lines.add_line_break(continued_indent) + last_was_container = False + else: # isinstance(item, Container) + reflowed_lines.add(item, len(continued_indent), + break_after_open_bracket) + last_was_container = not isinstance(item, (ListComprehension, + IfExpression)) + + if ( + break_after_open_bracket and index == 0 and + # Prefer to keep empty containers together instead of + # separating them. + unicode(item) == self.open_bracket and + (not next_item or unicode(next_item) != self.close_bracket) and + (len(self._items) != 3 or not isinstance(next_item, Atom)) + ): + reflowed_lines.add_line_break(continued_indent) + break_after_open_bracket = False + else: + next_next_item = get_item(self._items, index + 2) + if ( + unicode(item) not in ['.', '%', 'in'] and + next_item and not isinstance(next_item, Container) and + unicode(next_item) != ':' and + next_next_item and (not isinstance(next_next_item, Atom) or + unicode(next_item) == 'not') and + not reflowed_lines.line_empty() and + not reflowed_lines.fits_on_current_line( + self._get_extent(index + 1) + 2) + ): + reflowed_lines.add_line_break(continued_indent) + + def _get_extent(self, index): + """The extent of the full element. + + E.g., the length of a function call or keyword. + + """ + extent = 0 + prev_item = get_item(self._items, index - 1) + seen_dot = prev_item and unicode(prev_item) == '.' + while index < len(self._items): + item = get_item(self._items, index) + index += 1 + + if isinstance(item, (ListComprehension, IfExpression)): + break + + if isinstance(item, Container): + if prev_item and prev_item.is_name: + if seen_dot: + extent += 1 + else: + extent += item.size + + prev_item = item + continue + elif (unicode(item) not in ['.', '=', ':', 'not'] and + not item.is_name and not item.is_string): + break + + if unicode(item) == '.': + seen_dot = True + + extent += item.size + prev_item = item + + return extent + + @property + def is_string(self): + return False + + @property + def size(self): + return len(self.__repr__()) + + @property + def is_keyword(self): + return False + + @property + def is_name(self): + return False + + @property + def is_comma(self): + return False + + @property + def is_colon(self): + return False + + @property + def open_bracket(self): + return None + + @property + def close_bracket(self): + return None + + +class Tuple(Container): + + """A high-level representation of a tuple.""" + + @property + def open_bracket(self): + return '(' + + @property + def close_bracket(self): + return ')' + + +class List(Container): + + """A high-level representation of a list.""" + + @property + def open_bracket(self): + return '[' + + @property + def close_bracket(self): + return ']' + + +class DictOrSet(Container): + + """A high-level representation of a dictionary or set.""" + + @property + def open_bracket(self): + return '{' + + @property + def close_bracket(self): + return '}' + + +class ListComprehension(Container): + + """A high-level representation of a list comprehension.""" + + @property + def size(self): + length = 0 + for item in self._items: + if isinstance(item, IfExpression): + break + length += item.size + return length + + +class IfExpression(Container): + + """A high-level representation of an if-expression.""" + + +def _parse_container(tokens, index, for_or_if=None): + """Parse a high-level container, such as a list, tuple, etc.""" + + # Store the opening bracket. + items = [Atom(Token(*tokens[index]))] + index += 1 + + num_tokens = len(tokens) + while index < num_tokens: + tok = Token(*tokens[index]) + + if tok.token_string in ',)]}': + # First check if we're at the end of a list comprehension or + # if-expression. Don't add the ending token as part of the list + # comprehension or if-expression, because they aren't part of those + # constructs. + if for_or_if == 'for': + return (ListComprehension(items), index - 1) + + elif for_or_if == 'if': + return (IfExpression(items), index - 1) + + # We've reached the end of a container. + items.append(Atom(tok)) + + # If not, then we are at the end of a container. + if tok.token_string == ')': + # The end of a tuple. + return (Tuple(items), index) + + elif tok.token_string == ']': + # The end of a list. + return (List(items), index) + + elif tok.token_string == '}': + # The end of a dictionary or set. + return (DictOrSet(items), index) + + elif tok.token_string in '([{': + # A sub-container is being defined. + (container, index) = _parse_container(tokens, index) + items.append(container) + + elif tok.token_string == 'for': + (container, index) = _parse_container(tokens, index, 'for') + items.append(container) + + elif tok.token_string == 'if': + (container, index) = _parse_container(tokens, index, 'if') + items.append(container) + + else: + items.append(Atom(tok)) + + index += 1 + + return (None, None) + + +def _parse_tokens(tokens): + """Parse the tokens. + + This converts the tokens into a form where we can manipulate them + more easily. + + """ + + index = 0 + parsed_tokens = [] + + num_tokens = len(tokens) + while index < num_tokens: + tok = Token(*tokens[index]) + + assert tok.token_type != token.INDENT + if tok.token_type == tokenize.NEWLINE: + # There's only one newline and it's at the end. + break + + if tok.token_string in '([{': + (container, index) = _parse_container(tokens, index) + if not container: + return None + parsed_tokens.append(container) + else: + parsed_tokens.append(Atom(tok)) + + index += 1 + + return parsed_tokens + + +def _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line): + """Reflow the lines so that it looks nice.""" + + if unicode(parsed_tokens[0]) == 'def': + # A function definition gets indented a bit more. + continued_indent = indentation + ' ' * 2 * DEFAULT_INDENT_SIZE + else: + continued_indent = indentation + ' ' * DEFAULT_INDENT_SIZE + + break_after_open_bracket = not start_on_prefix_line + + lines = ReformattedLines(max_line_length) + lines.add_indent(len(indentation.lstrip('\r\n'))) + + if not start_on_prefix_line: + # If splitting after the opening bracket will cause the first element + # to be aligned weirdly, don't try it. + first_token = get_item(parsed_tokens, 0) + second_token = get_item(parsed_tokens, 1) + + if ( + first_token and second_token and + unicode(second_token)[0] == '(' and + len(indentation) + len(first_token) + 1 == len(continued_indent) + ): + return None + + for item in parsed_tokens: + lines.add_space_if_needed(unicode(item), equal=True) + + save_continued_indent = continued_indent + if start_on_prefix_line and isinstance(item, Container): + start_on_prefix_line = False + continued_indent = ' ' * (lines.current_size() + 1) + + item.reflow(lines, continued_indent, break_after_open_bracket) + continued_indent = save_continued_indent + + return lines.emit() + + +def _shorten_line_at_tokens_new(tokens, source, indentation, + max_line_length): + """Shorten the line taking its length into account. + + The input is expected to be free of newlines except for inside + multiline strings and at the end. + + """ + # Yield the original source so to see if it's a better choice than the + # shortened candidate lines we generate here. + yield indentation + source + + parsed_tokens = _parse_tokens(tokens) + + if parsed_tokens: + # Perform two reflows. The first one starts on the same line as the + # prefix. The second starts on the line after the prefix. + fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line=True) + if fixed and check_syntax(normalize_multiline(fixed.lstrip())): + yield fixed + + fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line=False) + if fixed and check_syntax(normalize_multiline(fixed.lstrip())): + yield fixed + + +def _shorten_line_at_tokens(tokens, source, indentation, indent_word, + key_token_strings, aggressive): + """Separate line by breaking at tokens in key_token_strings. + + The input is expected to be free of newlines except for inside + multiline strings and at the end. + + """ + offsets = [] + for (index, _t) in enumerate(token_offsets(tokens)): + (token_type, + token_string, + start_offset, + end_offset) = _t + + assert token_type != token.INDENT + + if token_string in key_token_strings: + # Do not break in containers with zero or one items. + unwanted_next_token = { + '(': ')', + '[': ']', + '{': '}'}.get(token_string) + if unwanted_next_token: + if ( + get_item(tokens, + index + 1, + default=[None, None])[1] == unwanted_next_token or + get_item(tokens, + index + 2, + default=[None, None])[1] == unwanted_next_token + ): + continue + + if ( + index > 2 and token_string == '(' and + tokens[index - 1][1] in ',(%[' + ): + # Don't split after a tuple start, or before a tuple start if + # the tuple is in a list. + continue + + if end_offset < len(source) - 1: + # Don't split right before newline. + offsets.append(end_offset) + else: + # Break at adjacent strings. These were probably meant to be on + # separate lines in the first place. + previous_token = get_item(tokens, index - 1) + if ( + token_type == tokenize.STRING and + previous_token and previous_token[0] == tokenize.STRING + ): + offsets.append(start_offset) + + current_indent = None + fixed = None + for line in split_at_offsets(source, offsets): + if fixed: + fixed += '\n' + current_indent + line + + for symbol in '([{': + if line.endswith(symbol): + current_indent += indent_word + else: + # First line. + fixed = line + assert not current_indent + current_indent = indent_word + + assert fixed is not None + + if check_syntax(normalize_multiline(fixed) + if aggressive > 1 else fixed): + return indentation + fixed + + return None + + +def token_offsets(tokens): + """Yield tokens and offsets.""" + end_offset = 0 + previous_end_row = 0 + previous_end_column = 0 + for t in tokens: + token_type = t[0] + token_string = t[1] + (start_row, start_column) = t[2] + (end_row, end_column) = t[3] + + # Account for the whitespace between tokens. + end_offset += start_column + if previous_end_row == start_row: + end_offset -= previous_end_column + + # Record the start offset of the token. + start_offset = end_offset + + # Account for the length of the token itself. + end_offset += len(token_string) + + yield (token_type, + token_string, + start_offset, + end_offset) + + previous_end_row = end_row + previous_end_column = end_column + + +def normalize_multiline(line): + """Normalize multiline-related code that will cause syntax error. + + This is for purposes of checking syntax. + + """ + if line.startswith('def ') and line.rstrip().endswith(':'): + return line + ' pass' + elif line.startswith('return '): + return 'def _(): ' + line + elif line.startswith('@'): + return line + 'def _(): pass' + elif line.startswith('class '): + return line + ' pass' + elif line.startswith(('if ', 'elif ', 'for ', 'while ')): + return line + ' pass' + + return line + + +def fix_whitespace(line, offset, replacement): + """Replace whitespace at offset and return fixed line.""" + # Replace escaped newlines too + left = line[:offset].rstrip('\n\r \t\\') + right = line[offset:].lstrip('\n\r \t\\') + if right.startswith('#'): + return line + + return left + replacement + right + + +def _execute_pep8(pep8_options, source): + """Execute pycodestyle via python method calls.""" + class QuietReport(pycodestyle.BaseReport): + + """Version of checker that does not print.""" + + def __init__(self, options): + super(QuietReport, self).__init__(options) + self.__full_error_results = [] + + def error(self, line_number, offset, text, check): + """Collect errors.""" + code = super(QuietReport, self).error(line_number, + offset, + text, + check) + if code: + self.__full_error_results.append( + {'id': code, + 'line': line_number, + 'column': offset + 1, + 'info': text}) + + def full_error_results(self): + """Return error results in detail. + + Results are in the form of a list of dictionaries. Each + dictionary contains 'id', 'line', 'column', and 'info'. + + """ + return self.__full_error_results + + checker = pycodestyle.Checker('', lines=source, reporter=QuietReport, + **pep8_options) + checker.check_all() + return checker.report.full_error_results() + + +def _remove_leading_and_normalize(line): + return line.lstrip().rstrip(CR + LF) + '\n' + + +class Reindenter(object): + + """Reindents badly-indented code to uniformly use four-space indentation. + + Released to the public domain, by Tim Peters, 03 October 2000. + + """ + + def __init__(self, input_text): + sio = io.StringIO(input_text) + source_lines = sio.readlines() + + self.string_content_line_numbers = multiline_string_lines(input_text) + + # File lines, rstripped & tab-expanded. Dummy at start is so + # that we can use tokenize's 1-based line numbering easily. + # Note that a line is all-blank iff it is a newline. + self.lines = [] + for line_number, line in enumerate(source_lines, start=1): + # Do not modify if inside a multiline string. + if line_number in self.string_content_line_numbers: + self.lines.append(line) + else: + # Only expand leading tabs. + self.lines.append(_get_indentation(line).expandtabs() + + _remove_leading_and_normalize(line)) + + self.lines.insert(0, None) + self.index = 1 # index into self.lines of next line + self.input_text = input_text + + def run(self, indent_size=DEFAULT_INDENT_SIZE): + """Fix indentation and return modified line numbers. + + Line numbers are indexed at 1. + + """ + if indent_size < 1: + return self.input_text + + try: + stats = _reindent_stats(tokenize.generate_tokens(self.getline)) + except (SyntaxError, tokenize.TokenError): + return self.input_text + # Remove trailing empty lines. + lines = self.lines + # Sentinel. + stats.append((len(lines), 0)) + # Map count of leading spaces to # we want. + have2want = {} + # Program after transformation. + after = [] + # Copy over initial empty lines -- there's nothing to do until + # we see a line with *something* on it. + i = stats[0][0] + after.extend(lines[1:i]) + for i in range(len(stats) - 1): + thisstmt, thislevel = stats[i] + nextstmt = stats[i + 1][0] + have = _leading_space_count(lines[thisstmt]) + want = thislevel * indent_size + if want < 0: + # A comment line. + if have: + # An indented comment line. If we saw the same + # indentation before, reuse what it most recently + # mapped to. + want = have2want.get(have, -1) + if want < 0: + # Then it probably belongs to the next real stmt. + for j in range(i + 1, len(stats) - 1): + jline, jlevel = stats[j] + if jlevel >= 0: + if have == _leading_space_count(lines[jline]): + want = jlevel * indent_size + break + if want < 0: # Maybe it's a hanging + # comment like this one, + # in which case we should shift it like its base + # line got shifted. + for j in range(i - 1, -1, -1): + jline, jlevel = stats[j] + if jlevel >= 0: + want = (have + _leading_space_count( + after[jline - 1]) - + _leading_space_count(lines[jline])) + break + if want < 0: + # Still no luck -- leave it alone. + want = have + else: + want = 0 + assert want >= 0 + have2want[have] = want + diff = want - have + if diff == 0 or have == 0: + after.extend(lines[thisstmt:nextstmt]) + else: + for line_number, line in enumerate(lines[thisstmt:nextstmt], + start=thisstmt): + if line_number in self.string_content_line_numbers: + after.append(line) + elif diff > 0: + if line == '\n': + after.append(line) + else: + after.append(' ' * diff + line) + else: + remove = min(_leading_space_count(line), -diff) + after.append(line[remove:]) + + return ''.join(after) + + def getline(self): + """Line-getter for tokenize.""" + if self.index >= len(self.lines): + line = '' + else: + line = self.lines[self.index] + self.index += 1 + return line + + +def _reindent_stats(tokens): + """Return list of (lineno, indentlevel) pairs. + + One for each stmt and comment line. indentlevel is -1 for comment + lines, as a signal that tokenize doesn't know what to do about them; + indeed, they're our headache! + + """ + find_stmt = 1 # Next token begins a fresh stmt? + level = 0 # Current indent level. + stats = [] + + for t in tokens: + token_type = t[0] + sline = t[2][0] + line = t[4] + + if token_type == tokenize.NEWLINE: + # A program statement, or ENDMARKER, will eventually follow, + # after some (possibly empty) run of tokens of the form + # (NL | COMMENT)* (INDENT | DEDENT+)? + find_stmt = 1 + + elif token_type == tokenize.INDENT: + find_stmt = 1 + level += 1 + + elif token_type == tokenize.DEDENT: + find_stmt = 1 + level -= 1 + + elif token_type == tokenize.COMMENT: + if find_stmt: + stats.append((sline, -1)) + # But we're still looking for a new stmt, so leave + # find_stmt alone. + + elif token_type == tokenize.NL: + pass + + elif find_stmt: + # This is the first "real token" following a NEWLINE, so it + # must be the first token of the next program statement, or an + # ENDMARKER. + find_stmt = 0 + if line: # Not endmarker. + stats.append((sline, level)) + + return stats + + +def _leading_space_count(line): + """Return number of leading spaces in line.""" + i = 0 + while i < len(line) and line[i] == ' ': + i += 1 + return i + + +def refactor_with_2to3(source_text, fixer_names, filename=''): + """Use lib2to3 to refactor the source. + + Return the refactored source code. + + """ + from lib2to3.refactor import RefactoringTool + fixers = ['lib2to3.fixes.fix_' + name for name in fixer_names] + tool = RefactoringTool(fixer_names=fixers, explicit=fixers) + + from lib2to3.pgen2 import tokenize as lib2to3_tokenize + try: + # The name parameter is necessary particularly for the "import" fixer. + return unicode(tool.refactor_string(source_text, name=filename)) + except lib2to3_tokenize.TokenError: + return source_text + + +def check_syntax(code): + """Return True if syntax is okay.""" + try: + return compile(code, '', 'exec') + except (SyntaxError, TypeError, UnicodeDecodeError): + return False + + +def filter_results(source, results, aggressive): + """Filter out spurious reports from pycodestyle. + + If aggressive is True, we allow possibly unsafe fixes (E711, E712). + + """ + non_docstring_string_line_numbers = multiline_string_lines( + source, include_docstrings=False) + all_string_line_numbers = multiline_string_lines( + source, include_docstrings=True) + + commented_out_code_line_numbers = commented_out_code_lines(source) + + has_e901 = any(result['id'].lower() == 'e901' for result in results) + + for r in results: + issue_id = r['id'].lower() + + if r['line'] in non_docstring_string_line_numbers: + if issue_id.startswith(('e1', 'e501', 'w191')): + continue + + if r['line'] in all_string_line_numbers: + if issue_id in ['e501']: + continue + + # We must offset by 1 for lines that contain the trailing contents of + # multiline strings. + if not aggressive and (r['line'] + 1) in all_string_line_numbers: + # Do not modify multiline strings in non-aggressive mode. Remove + # trailing whitespace could break doctests. + if issue_id.startswith(('w29', 'w39')): + continue + + if aggressive <= 0: + if issue_id.startswith(('e711', 'e72', 'w6')): + continue + + if aggressive <= 1: + if issue_id.startswith(('e712', 'e713', 'e714', 'w5')): + continue + + if aggressive <= 2: + if issue_id.startswith(('e704', 'w5')): + continue + + if r['line'] in commented_out_code_line_numbers: + if issue_id.startswith(('e26', 'e501')): + continue + + # Do not touch indentation if there is a token error caused by + # incomplete multi-line statement. Otherwise, we risk screwing up the + # indentation. + if has_e901: + if issue_id.startswith(('e1', 'e7')): + continue + + yield r + + +def multiline_string_lines(source, include_docstrings=False): + """Return line numbers that are within multiline strings. + + The line numbers are indexed at 1. + + Docstrings are ignored. + + """ + line_numbers = set() + previous_token_type = '' + try: + for t in generate_tokens(source): + token_type = t[0] + start_row = t[2][0] + end_row = t[3][0] + + if token_type == tokenize.STRING and start_row != end_row: + if ( + include_docstrings or + previous_token_type != tokenize.INDENT + ): + # We increment by one since we want the contents of the + # string. + line_numbers |= set(range(1 + start_row, 1 + end_row)) + + previous_token_type = token_type + except (SyntaxError, tokenize.TokenError): + pass + + return line_numbers + + +def commented_out_code_lines(source): + """Return line numbers of comments that are likely code. + + Commented-out code is bad practice, but modifying it just adds even + more clutter. + + """ + line_numbers = [] + try: + for t in generate_tokens(source): + token_type = t[0] + token_string = t[1] + start_row = t[2][0] + line = t[4] + + # Ignore inline comments. + if not line.lstrip().startswith('#'): + continue + + if token_type == tokenize.COMMENT: + stripped_line = token_string.lstrip('#').strip() + if ( + ' ' in stripped_line and + '#' not in stripped_line and + check_syntax(stripped_line) + ): + line_numbers.append(start_row) + except (SyntaxError, tokenize.TokenError): + pass + + return line_numbers + + +def shorten_comment(line, max_line_length, last_comment=False): + """Return trimmed or split long comment line. + + If there are no comments immediately following it, do a text wrap. + Doing this wrapping on all comments in general would lead to jagged + comment text. + + """ + assert len(line) > max_line_length + line = line.rstrip() + + # PEP 8 recommends 72 characters for comment text. + indentation = _get_indentation(line) + '# ' + max_line_length = min(max_line_length, + len(indentation) + 72) + + MIN_CHARACTER_REPEAT = 5 + if ( + len(line) - len(line.rstrip(line[-1])) >= MIN_CHARACTER_REPEAT and + not line[-1].isalnum() + ): + # Trim comments that end with things like --------- + return line[:max_line_length] + '\n' + elif last_comment and re.match(r'\s*#+\s*\w+', line): + split_lines = textwrap.wrap(line.lstrip(' \t#'), + initial_indent=indentation, + subsequent_indent=indentation, + width=max_line_length, + break_long_words=False, + break_on_hyphens=False) + return '\n'.join(split_lines) + '\n' + + return line + '\n' + + +def normalize_line_endings(lines, newline): + """Return fixed line endings. + + All lines will be modified to use the most common line ending. + + """ + return [line.rstrip('\n\r') + newline for line in lines] + + +def mutual_startswith(a, b): + return b.startswith(a) or a.startswith(b) + + +def code_match(code, select, ignore): + if ignore: + assert not isinstance(ignore, unicode) + for ignored_code in [c.strip() for c in ignore]: + if mutual_startswith(code.lower(), ignored_code.lower()): + return False + + if select: + assert not isinstance(select, unicode) + for selected_code in [c.strip() for c in select]: + if mutual_startswith(code.lower(), selected_code.lower()): + return True + return False + + return True + + +def fix_code(source, options=None, encoding=None, apply_config=False): + """Return fixed source code. + + "encoding" will be used to decode "source" if it is a byte string. + + """ + options = _get_options(options, apply_config) + + if not isinstance(source, unicode): + source = source.decode(encoding or get_encoding()) + + sio = io.StringIO(source) + return fix_lines(sio.readlines(), options=options) + + +def _get_options(raw_options, apply_config): + """Return parsed options.""" + if not raw_options: + return parse_args([''], apply_config=apply_config) + + if isinstance(raw_options, dict): + options = parse_args([''], apply_config=apply_config) + for name, value in raw_options.items(): + if not hasattr(options, name): + raise ValueError("No such option '{}'".format(name)) + + # Check for very basic type errors. + expected_type = type(getattr(options, name)) + if not isinstance(expected_type, (str, unicode)): + if isinstance(value, (str, unicode)): + raise ValueError( + "Option '{}' should not be a string".format(name)) + setattr(options, name, value) + else: + options = raw_options + + return options + + +def fix_lines(source_lines, options, filename=''): + """Return fixed source code.""" + # Transform everything to line feed. Then change them back to original + # before returning fixed source code. + original_newline = find_newline(source_lines) + tmp_source = ''.join(normalize_line_endings(source_lines, '\n')) + + # Keep a history to break out of cycles. + previous_hashes = set() + + if options.line_range: + # Disable "apply_local_fixes()" for now due to issue #175. + fixed_source = tmp_source + else: + # Apply global fixes only once (for efficiency). + fixed_source = apply_global_fixes(tmp_source, + options, + filename=filename) + + passes = 0 + long_line_ignore_cache = set() + while hash(fixed_source) not in previous_hashes: + if options.pep8_passes >= 0 and passes > options.pep8_passes: + break + passes += 1 + + previous_hashes.add(hash(fixed_source)) + + tmp_source = copy.copy(fixed_source) + + fix = FixPEP8( + filename, + options, + contents=tmp_source, + long_line_ignore_cache=long_line_ignore_cache) + + fixed_source = fix.fix() + + sio = io.StringIO(fixed_source) + return ''.join(normalize_line_endings(sio.readlines(), original_newline)) + + +def fix_file(filename, options=None, output=None, apply_config=False): + if not options: + options = parse_args([filename], apply_config=apply_config) + + original_source = readlines_from_file(filename) + + fixed_source = original_source + + if options.in_place or output: + encoding = detect_encoding(filename) + + if output: + output = LineEndingWrapper(wrap_output(output, encoding=encoding)) + + fixed_source = fix_lines(fixed_source, options, filename=filename) + + if options.diff: + new = io.StringIO(fixed_source) + new = new.readlines() + diff = get_diff_text(original_source, new, filename) + if output: + output.write(diff) + output.flush() + else: + return diff + elif options.in_place: + fp = open_with_encoding(filename, encoding=encoding, mode='w') + fp.write(fixed_source) + fp.close() + else: + if output: + output.write(fixed_source) + output.flush() + else: + return fixed_source + + +def global_fixes(): + """Yield multiple (code, function) tuples.""" + for function in list(globals().values()): + if inspect.isfunction(function): + arguments = _get_parameters(function) + if arguments[:1] != ['source']: + continue + + code = extract_code_from_function(function) + if code: + yield (code, function) + + +def _get_parameters(function): + # pylint: disable=deprecated-method + if sys.version_info >= (3, 3): + # We need to match "getargspec()", which includes "self" as the first + # value for methods. + # https://bugs.python.org/issue17481#msg209469 + if inspect.ismethod(function): + function = function.__func__ + + return list(inspect.signature(function).parameters) + else: + return inspect.getargspec(function)[0] + + +def apply_global_fixes(source, options, where='global', filename=''): + """Run global fixes on source code. + + These are fixes that only need be done once (unlike those in + FixPEP8, which are dependent on pycodestyle). + + """ + if any(code_match(code, select=options.select, ignore=options.ignore) + for code in ['E101', 'E111']): + source = reindent(source, + indent_size=options.indent_size) + + for (code, function) in global_fixes(): + if code_match(code, select=options.select, ignore=options.ignore): + if options.verbose: + print('---> Applying {0} fix for {1}'.format(where, + code.upper()), + file=sys.stderr) + source = function(source, + aggressive=options.aggressive) + + source = fix_2to3(source, + aggressive=options.aggressive, + select=options.select, + ignore=options.ignore, + filename=filename) + + return source + + +def extract_code_from_function(function): + """Return code handled by function.""" + if not function.__name__.startswith('fix_'): + return None + + code = re.sub('^fix_', '', function.__name__) + if not code: + return None + + try: + int(code[1:]) + except ValueError: + return None + + return code + + +def _get_package_version(): + packages = ["pycodestyle: {0}".format(pycodestyle.__version__)] + return ", ".join(packages) + + +def create_parser(): + """Return command-line parser.""" + # Do import locally to be friendly to those who use autopep8 as a library + # and are supporting Python 2.6. + import argparse + + parser = argparse.ArgumentParser(description=docstring_summary(__doc__), + prog='autopep8') + parser.add_argument('--version', action='version', + version='%(prog)s {0} ({1})'.format( + __version__, _get_package_version())) + parser.add_argument('-v', '--verbose', action='count', + default=0, + help='print verbose messages; ' + 'multiple -v result in more verbose messages') + parser.add_argument('-d', '--diff', action='store_true', + help='print the diff for the fixed source') + parser.add_argument('-i', '--in-place', action='store_true', + help='make changes to files in place') + parser.add_argument('--global-config', metavar='filename', + default=DEFAULT_CONFIG, + help='path to a global pep8 config file; if this file ' + 'does not exist then this is ignored ' + '(default: {0})'.format(DEFAULT_CONFIG)) + parser.add_argument('--ignore-local-config', action='store_true', + help="don't look for and apply local config files; " + 'if not passed, defaults are updated with any ' + "config files in the project's root directory") + parser.add_argument('-r', '--recursive', action='store_true', + help='run recursively over directories; ' + 'must be used with --in-place or --diff') + parser.add_argument('-j', '--jobs', type=int, metavar='n', default=1, + help='number of parallel jobs; ' + 'match CPU count if value is less than 1') + parser.add_argument('-p', '--pep8-passes', metavar='n', + default=-1, type=int, + help='maximum number of additional pep8 passes ' + '(default: infinite)') + parser.add_argument('-a', '--aggressive', action='count', default=0, + help='enable non-whitespace changes; ' + 'multiple -a result in more aggressive changes') + parser.add_argument('--experimental', action='store_true', + help='enable experimental fixes') + parser.add_argument('--exclude', metavar='globs', + help='exclude file/directory names that match these ' + 'comma-separated globs') + parser.add_argument('--list-fixes', action='store_true', + help='list codes for fixes; ' + 'used by --ignore and --select') + parser.add_argument('--ignore', metavar='errors', default='', + help='do not fix these errors/warnings ' + '(default: {0})'.format(DEFAULT_IGNORE)) + parser.add_argument('--select', metavar='errors', default='', + help='fix only these errors/warnings (e.g. E4,W)') + parser.add_argument('--max-line-length', metavar='n', default=79, type=int, + help='set maximum allowed line length ' + '(default: %(default)s)') + parser.add_argument('--line-range', '--range', metavar='line', + default=None, type=int, nargs=2, + help='only fix errors found within this inclusive ' + 'range of line numbers (e.g. 1 99); ' + 'line numbers are indexed at 1') + parser.add_argument('--indent-size', default=DEFAULT_INDENT_SIZE, + type=int, help=argparse.SUPPRESS) + parser.add_argument('files', nargs='*', + help="files to format or '-' for standard in") + + return parser + + +def parse_args(arguments, apply_config=False): + """Parse command-line options.""" + parser = create_parser() + args = parser.parse_args(arguments) + + if not args.files and not args.list_fixes: + parser.error('incorrect number of arguments') + + args.files = [decode_filename(name) for name in args.files] + + if apply_config: + parser = read_config(args, parser) + args = parser.parse_args(arguments) + args.files = [decode_filename(name) for name in args.files] + + if '-' in args.files: + if len(args.files) > 1: + parser.error('cannot mix stdin and regular files') + + if args.diff: + parser.error('--diff cannot be used with standard input') + + if args.in_place: + parser.error('--in-place cannot be used with standard input') + + if args.recursive: + parser.error('--recursive cannot be used with standard input') + + if len(args.files) > 1 and not (args.in_place or args.diff): + parser.error('autopep8 only takes one filename as argument ' + 'unless the "--in-place" or "--diff" args are ' + 'used') + + if args.recursive and not (args.in_place or args.diff): + parser.error('--recursive must be used with --in-place or --diff') + + if args.in_place and args.diff: + parser.error('--in-place and --diff are mutually exclusive') + + if args.max_line_length <= 0: + parser.error('--max-line-length must be greater than 0') + + if args.select: + args.select = _split_comma_separated(args.select) + + if args.ignore: + args.ignore = _split_comma_separated(args.ignore) + elif not args.select: + if args.aggressive: + # Enable everything by default if aggressive. + args.select = set(['E', 'W']) + else: + args.ignore = _split_comma_separated(DEFAULT_IGNORE) + + if args.exclude: + args.exclude = _split_comma_separated(args.exclude) + else: + args.exclude = set([]) + + if args.jobs < 1: + # Do not import multiprocessing globally in case it is not supported + # on the platform. + import multiprocessing + args.jobs = multiprocessing.cpu_count() + + if args.jobs > 1 and not args.in_place: + parser.error('parallel jobs requires --in-place') + + if args.line_range: + if args.line_range[0] <= 0: + parser.error('--range must be positive numbers') + if args.line_range[0] > args.line_range[1]: + parser.error('First value of --range should be less than or equal ' + 'to the second') + + return args + + +def read_config(args, parser): + """Read both user configuration and local configuration.""" + try: + from configparser import ConfigParser as SafeConfigParser + from configparser import Error + except ImportError: + from ConfigParser import SafeConfigParser + from ConfigParser import Error + + config = SafeConfigParser() + + try: + config.read(args.global_config) + + if not args.ignore_local_config: + parent = tail = args.files and os.path.abspath( + os.path.commonprefix(args.files)) + while tail: + if config.read([os.path.join(parent, fn) + for fn in PROJECT_CONFIG]): + break + (parent, tail) = os.path.split(parent) + + defaults = dict() + option_list = dict([(o.dest, o.type or type(o.default)) + for o in parser._actions]) + + for section in ['pep8', 'pycodestyle']: + if not config.has_section(section): + continue + for (k, _) in config.items(section): + norm_opt = k.lstrip('-').replace('-', '_') + opt_type = option_list[norm_opt] + if opt_type is int: + value = config.getint(section, k) + elif opt_type is bool: + value = config.getboolean(section, k) + else: + value = config.get(section, k) + defaults[norm_opt] = value + + parser.set_defaults(**defaults) + except Error: + # Ignore for now. + pass + + return parser + + +def _split_comma_separated(string): + """Return a set of strings.""" + return set(text.strip() for text in string.split(',') if text.strip()) + + +def decode_filename(filename): + """Return Unicode filename.""" + if isinstance(filename, unicode): + return filename + + return filename.decode(sys.getfilesystemencoding()) + + +def supported_fixes(): + """Yield pep8 error codes that autopep8 fixes. + + Each item we yield is a tuple of the code followed by its + description. + + """ + yield ('E101', docstring_summary(reindent.__doc__)) + + instance = FixPEP8(filename=None, options=None, contents='') + for attribute in dir(instance): + code = re.match('fix_([ew][0-9][0-9][0-9])', attribute) + if code: + yield ( + code.group(1).upper(), + re.sub(r'\s+', ' ', + docstring_summary(getattr(instance, attribute).__doc__)) + ) + + for (code, function) in sorted(global_fixes()): + yield (code.upper() + (4 - len(code)) * ' ', + re.sub(r'\s+', ' ', docstring_summary(function.__doc__))) + + for code in sorted(CODE_TO_2TO3): + yield (code.upper() + (4 - len(code)) * ' ', + re.sub(r'\s+', ' ', docstring_summary(fix_2to3.__doc__))) + + +def docstring_summary(docstring): + """Return summary of docstring.""" + return docstring.split('\n')[0] if docstring else '' + + +def line_shortening_rank(candidate, indent_word, max_line_length, + experimental=False): + """Return rank of candidate. + + This is for sorting candidates. + + """ + if not candidate.strip(): + return 0 + + rank = 0 + lines = candidate.rstrip().split('\n') + + offset = 0 + if ( + not lines[0].lstrip().startswith('#') and + lines[0].rstrip()[-1] not in '([{' + ): + for (opening, closing) in ('()', '[]', '{}'): + # Don't penalize empty containers that aren't split up. Things like + # this "foo(\n )" aren't particularly good. + opening_loc = lines[0].find(opening) + closing_loc = lines[0].find(closing) + if opening_loc >= 0: + if closing_loc < 0 or closing_loc != opening_loc + 1: + offset = max(offset, 1 + opening_loc) + + current_longest = max(offset + len(x.strip()) for x in lines) + + rank += 4 * max(0, current_longest - max_line_length) + + rank += len(lines) + + # Too much variation in line length is ugly. + rank += 2 * standard_deviation(len(line) for line in lines) + + bad_staring_symbol = { + '(': ')', + '[': ']', + '{': '}'}.get(lines[0][-1]) + + if len(lines) > 1: + if ( + bad_staring_symbol and + lines[1].lstrip().startswith(bad_staring_symbol) + ): + rank += 20 + + for lineno, current_line in enumerate(lines): + current_line = current_line.strip() + + if current_line.startswith('#'): + continue + + for bad_start in ['.', '%', '+', '-', '/']: + if current_line.startswith(bad_start): + rank += 100 + + # Do not tolerate operators on their own line. + if current_line == bad_start: + rank += 1000 + + if ( + current_line.endswith(('.', '%', '+', '-', '/')) and + "': " in current_line + ): + rank += 1000 + + if current_line.endswith(('(', '[', '{', '.')): + # Avoid lonely opening. They result in longer lines. + if len(current_line) <= len(indent_word): + rank += 100 + + # Avoid the ugliness of ", (\n". + if ( + current_line.endswith('(') and + current_line[:-1].rstrip().endswith(',') + ): + rank += 100 + + # Avoid the ugliness of "something[\n" and something[index][\n. + if ( + current_line.endswith('[') and + len(current_line) > 1 and + (current_line[-2].isalnum() or current_line[-2] in ']') + ): + rank += 300 + + # Also avoid the ugliness of "foo.\nbar" + if current_line.endswith('.'): + rank += 100 + + if has_arithmetic_operator(current_line): + rank += 100 + + # Avoid breaking at unary operators. + if re.match(r'.*[(\[{]\s*[\-\+~]$', current_line.rstrip('\\ ')): + rank += 1000 + + if re.match(r'.*lambda\s*\*$', current_line.rstrip('\\ ')): + rank += 1000 + + if current_line.endswith(('%', '(', '[', '{')): + rank -= 20 + + # Try to break list comprehensions at the "for". + if current_line.startswith('for '): + rank -= 50 + + if current_line.endswith('\\'): + # If a line ends in \-newline, it may be part of a + # multiline string. In that case, we would like to know + # how long that line is without the \-newline. If it's + # longer than the maximum, or has comments, then we assume + # that the \-newline is an okay candidate and only + # penalize it a bit. + total_len = len(current_line) + lineno += 1 + while lineno < len(lines): + total_len += len(lines[lineno]) + + if lines[lineno].lstrip().startswith('#'): + total_len = max_line_length + break + + if not lines[lineno].endswith('\\'): + break + + lineno += 1 + + if total_len < max_line_length: + rank += 10 + else: + rank += 100 if experimental else 1 + + # Prefer breaking at commas rather than colon. + if ',' in current_line and current_line.endswith(':'): + rank += 10 + + # Avoid splitting dictionaries between key and value. + if current_line.endswith(':'): + rank += 100 + + rank += 10 * count_unbalanced_brackets(current_line) + + return max(0, rank) + + +def standard_deviation(numbers): + """Return standard devation.""" + numbers = list(numbers) + if not numbers: + return 0 + mean = sum(numbers) / len(numbers) + return (sum((n - mean) ** 2 for n in numbers) / + len(numbers)) ** .5 + + +def has_arithmetic_operator(line): + """Return True if line contains any arithmetic operators.""" + for operator in pycodestyle.ARITHMETIC_OP: + if operator in line: + return True + + return False + + +def count_unbalanced_brackets(line): + """Return number of unmatched open/close brackets.""" + count = 0 + for opening, closing in ['()', '[]', '{}']: + count += abs(line.count(opening) - line.count(closing)) + + return count + + +def split_at_offsets(line, offsets): + """Split line at offsets. + + Return list of strings. + + """ + result = [] + + previous_offset = 0 + current_offset = 0 + for current_offset in sorted(offsets): + if current_offset < len(line) and previous_offset != current_offset: + result.append(line[previous_offset:current_offset].strip()) + previous_offset = current_offset + + result.append(line[current_offset:]) + + return result + + +class LineEndingWrapper(object): + + r"""Replace line endings to work with sys.stdout. + + It seems that sys.stdout expects only '\n' as the line ending, no matter + the platform. Otherwise, we get repeated line endings. + + """ + + def __init__(self, output): + self.__output = output + + def write(self, s): + self.__output.write(s.replace('\r\n', '\n').replace('\r', '\n')) + + def flush(self): + self.__output.flush() + + +def match_file(filename, exclude): + """Return True if file is okay for modifying/recursing.""" + base_name = os.path.basename(filename) + + if base_name.startswith('.'): + return False + + for pattern in exclude: + if fnmatch.fnmatch(base_name, pattern): + return False + if fnmatch.fnmatch(filename, pattern): + return False + + if not os.path.isdir(filename) and not is_python_file(filename): + return False + + return True + + +def find_files(filenames, recursive, exclude): + """Yield filenames.""" + while filenames: + name = filenames.pop(0) + if recursive and os.path.isdir(name): + for root, directories, children in os.walk(name): + filenames += [os.path.join(root, f) for f in children + if match_file(os.path.join(root, f), + exclude)] + directories[:] = [d for d in directories + if match_file(os.path.join(root, d), + exclude)] + else: + yield name + + +def _fix_file(parameters): + """Helper function for optionally running fix_file() in parallel.""" + if parameters[1].verbose: + print('[file:{0}]'.format(parameters[0]), file=sys.stderr) + try: + fix_file(*parameters) + except IOError as error: + print(unicode(error), file=sys.stderr) + + +def fix_multiple_files(filenames, options, output=None): + """Fix list of files. + + Optionally fix files recursively. + + """ + filenames = find_files(filenames, options.recursive, options.exclude) + if options.jobs > 1: + import multiprocessing + pool = multiprocessing.Pool(options.jobs) + pool.map(_fix_file, + [(name, options) for name in filenames]) + else: + for name in filenames: + _fix_file((name, options, output)) + + +def is_python_file(filename): + """Return True if filename is Python file.""" + if filename.endswith('.py'): + return True + + try: + with open_with_encoding( + filename, + limit_byte_check=MAX_PYTHON_FILE_DETECTION_BYTES) as f: + text = f.read(MAX_PYTHON_FILE_DETECTION_BYTES) + if not text: + return False + first_line = text.splitlines()[0] + except (IOError, IndexError): + return False + + if not PYTHON_SHEBANG_REGEX.match(first_line): + return False + + return True + + +def is_probably_part_of_multiline(line): + """Return True if line is likely part of a multiline string. + + When multiline strings are involved, pep8 reports the error as being + at the start of the multiline string, which doesn't work for us. + + """ + return ( + '"""' in line or + "'''" in line or + line.rstrip().endswith('\\') + ) + + +def wrap_output(output, encoding): + """Return output with specified encoding.""" + return codecs.getwriter(encoding)(output.buffer + if hasattr(output, 'buffer') + else output) + + +def get_encoding(): + """Return preferred encoding.""" + return locale.getpreferredencoding() or sys.getdefaultencoding() + + +def main(argv=None, apply_config=True): + """Command-line entry.""" + if argv is None: + argv = sys.argv + + try: + # Exit on broken pipe. + signal.signal(signal.SIGPIPE, signal.SIG_DFL) + except AttributeError: # pragma: no cover + # SIGPIPE is not available on Windows. + pass + + try: + args = parse_args(argv[1:], apply_config=apply_config) + + if args.list_fixes: + for code, description in sorted(supported_fixes()): + print('{code} - {description}'.format( + code=code, description=description)) + return 0 + + if args.files == ['-']: + assert not args.in_place + + encoding = sys.stdin.encoding or get_encoding() + + # LineEndingWrapper is unnecessary here due to the symmetry between + # standard in and standard out. + wrap_output(sys.stdout, encoding=encoding).write( + fix_code(sys.stdin.read(), args, encoding=encoding)) + else: + if args.in_place or args.diff: + args.files = list(set(args.files)) + else: + assert len(args.files) == 1 + assert not args.recursive + + fix_multiple_files(args.files, args, sys.stdout) + except KeyboardInterrupt: + return 1 # pragma: no cover + + +class CachedTokenizer(object): + + """A one-element cache around tokenize.generate_tokens(). + + Original code written by Ned Batchelder, in coverage.py. + + """ + + def __init__(self): + self.last_text = None + self.last_tokens = None + + def generate_tokens(self, text): + """A stand-in for tokenize.generate_tokens().""" + if text != self.last_text: + string_io = io.StringIO(text) + self.last_tokens = list( + tokenize.generate_tokens(string_io.readline) + ) + self.last_text = text + return self.last_tokens + + +_cached_tokenizer = CachedTokenizer() +generate_tokens = _cached_tokenizer.generate_tokens + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/pymode/environment.py b/pymode/environment.py new file mode 100644 index 00000000..c146ea6e --- /dev/null +++ b/pymode/environment.py @@ -0,0 +1,249 @@ +"""Define interfaces.""" + +from __future__ import print_function + +import json +import os.path +import time +import vim # noqa + +from ._compat import PY2 + + +class VimPymodeEnviroment(object): + + """Vim User interface.""" + + prefix = '[Pymode]' + + def __init__(self): + """Init VIM environment.""" + self.current = vim.current + self.options = dict(encoding=vim.eval('&enc')) + self.options['debug'] = self.var('g:pymode_debug', True) + + @property + def curdir(self): + """Return current working directory.""" + return self.var('getcwd()') + + @property + def curbuf(self): + """Return current buffer.""" + return self.current.buffer + + @property + def cursor(self): + """Return current window position. + + :return tuple: (row, col) + + """ + return self.current.window.cursor + + @property + def source(self): + """Return source of current buffer.""" + return "\n".join(self.lines) + + @property + def lines(self): + """Iterate by lines in current file. + + :return list: + + """ + if not PY2: + return self.curbuf + + return [l.decode(self.options.get('encoding')) for l in self.curbuf] + + @staticmethod + def var(name, to_bool=False, silence=False): + """Get vim variable. + + :return vimobj: + + """ + try: + value = vim.eval(name) + except vim.error: + if silence: + return None + raise + + if to_bool: + try: + value = bool(int(value)) + except ValueError: + value = value + return value + + @staticmethod + def message(msg, history=False): + """Show message to user. + + :return: :None + + """ + if history: + return vim.command('echom "%s"' % str(msg)) + + return vim.command('call pymode#wide_message("%s")' % str(msg)) + + def user_input(self, msg, default=''): + """Return user input or default. + + :return str: + + """ + msg = '%s %s ' % (self.prefix, msg) + + if default != '': + msg += '[%s] ' % default + + try: + vim.command('echohl Debug') + input_str = vim.eval('input("%s> ")' % msg) + vim.command('echohl none') + except KeyboardInterrupt: + input_str = '' + + return input_str or default + + def user_confirm(self, msg, yes=False): + """Get user confirmation. + + :return bool: + + """ + default = 'yes' if yes else 'no' + action = self.user_input(msg, default) + return action and 'yes'.startswith(action) + + def user_input_choices(self, msg, *options): + """Get one of many options. + + :return str: A choosen option + + """ + choices = ['%s %s' % (self.prefix, msg)] + choices += [ + "%s. %s" % (num, opt) for num, opt in enumerate(options, 1)] + try: + input_str = int( + vim.eval('inputlist(%s)' % self.prepare_value(choices))) + except (KeyboardInterrupt, ValueError): + input_str = 0 + + if not input_str: + self.message('Cancelled!') + return False + + try: + return options[input_str - 1] + except (IndexError, ValueError): + self.error('Invalid option: %s' % input_str) + return self.user_input_choices(msg, *options) + + @staticmethod + def error(msg): + """Show error to user.""" + vim.command('call pymode#error("%s")' % str(msg)) + + def debug(self, msg, *args): + """Print debug information.""" + if self.options.get('debug'): + print("%s %s [%s]" % ( + int(time.time()), msg, ', '.join([str(a) for a in args]))) + + def stop(self, value=None): + """Break Vim function.""" + cmd = 'return' + if value is not None: + cmd += ' ' + self.prepare_value(value) + vim.command(cmd) + + def catch_exceptions(self, func): + """Decorator. Make execution more silence. + + :return func: + + """ + def _wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except (Exception, vim.error) as e: # noqa + if self.options.get('debug'): + raise + self.error(e) + return None + return _wrapper + + def run(self, name, *args): + """Run vim function.""" + vim.command('call %s(%s)' % (name, ", ".join([ + self.prepare_value(a) for a in args + ]))) + + def let(self, name, value): + """Set variable.""" + cmd = 'let %s = %s' % (name, self.prepare_value(value)) + self.debug(cmd) + vim.command(cmd) + + def prepare_value(self, value, dumps=True): + """Decode bstr to vim encoding. + + :return unicode string: + + """ + if dumps: + value = json.dumps(value) + + if PY2: + value = value.decode('utf-8').encode(self.options.get('encoding')) + + return value + + def get_offset_params(self, cursor=None, base=""): + """Calculate current offset. + + :return tuple: (source, offset) + + """ + row, col = cursor or env.cursor + source = "" + offset = 0 + for i, line in enumerate(self.lines, 1): + if i == row: + source += line[:col] + base + offset = len(source) + source += line[col:] + else: + source += line + source += '\n' + env.debug('Get offset', base or None, row, col, offset) + return source, offset + + @staticmethod + def goto_line(line): + """Go to line.""" + vim.command('normal %sggzz' % line) + + def goto_file(self, path, cmd='e', force=False): + """Open file by path.""" + if force or os.path.abspath(path) != self.curbuf.name: + self.debug('read', path) + if ' ' in path and os.name == 'posix': + path = path.replace(' ', '\\ ') + vim.command("%s %s" % (cmd, path)) + + @staticmethod + def goto_buffer(bufnr): + """Open buffer.""" + if str(bufnr) != '-1': + vim.command('buffer %s' % bufnr) + + +env = VimPymodeEnviroment() diff --git a/pymode/libs/astroid/__init__.py b/pymode/libs/astroid/__init__.py new file mode 100644 index 00000000..175dcb5e --- /dev/null +++ b/pymode/libs/astroid/__init__.py @@ -0,0 +1,136 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""Python Abstract Syntax Tree New Generation + +The aim of this module is to provide a common base representation of +python source code for projects such as pychecker, pyreverse, +pylint... Well, actually the development of this library is essentially +governed by pylint's needs. + +It extends class defined in the python's _ast module with some +additional methods and attributes. Instance attributes are added by a +builder object, which can either generate extended ast (let's call +them astroid ;) by visiting an existent ast tree or by inspecting living +object. Methods are added by monkey patching ast classes. + +Main modules are: + +* nodes and scoped_nodes for more information about methods and + attributes added to different node classes + +* the manager contains a high level object to get astroid trees from + source files and living objects. It maintains a cache of previously + constructed tree for quick access + +* builder contains the class responsible to build astroid trees +""" +__doctype__ = "restructuredtext en" + +import sys +import re +from operator import attrgetter + +# WARNING: internal imports order matters ! + +# make all exception classes accessible from astroid package +from astroid.exceptions import * + +# make all node classes accessible from astroid package +from astroid.nodes import * + +# trigger extra monkey-patching +from astroid import inference + +# more stuff available +from astroid import raw_building +from astroid.bases import Instance, BoundMethod, UnboundMethod +from astroid.node_classes import are_exclusive, unpack_infer +from astroid.scoped_nodes import builtin_lookup +from astroid.builder import parse +from astroid.util import YES + +# make a manager instance (borg) as well as Project and Package classes +# accessible from astroid package +from astroid.manager import AstroidManager +MANAGER = AstroidManager() +del AstroidManager + +# transform utilities (filters and decorator) + +class AsStringRegexpPredicate(object): + """Class to be used as predicate that may be given to `register_transform` + + First argument is a regular expression that will be searched against the `as_string` + representation of the node onto which it's applied. + + If specified, the second argument is an `attrgetter` expression that will be + applied on the node first to get the actual node on which `as_string` should + be called. + + WARNING: This can be fairly slow, as it has to convert every AST node back + to Python code; you should consider examining the AST directly instead. + """ + def __init__(self, regexp, expression=None): + self.regexp = re.compile(regexp) + self.expression = expression + + def __call__(self, node): + if self.expression is not None: + node = attrgetter(self.expression)(node) + return self.regexp.search(node.as_string()) + +def inference_tip(infer_function): + """Given an instance specific inference function, return a function to be + given to MANAGER.register_transform to set this inference function. + + Typical usage + + .. sourcecode:: python + + MANAGER.register_transform(Call, inference_tip(infer_named_tuple), + predicate) + """ + def transform(node, infer_function=infer_function): + node._explicit_inference = infer_function + return node + return transform + + +def register_module_extender(manager, module_name, get_extension_mod): + def transform(node): + extension_module = get_extension_mod() + for name, objs in extension_module._locals.items(): + node._locals[name] = objs + for obj in objs: + if obj.parent is extension_module: + obj.parent = node + + manager.register_transform(Module, transform, lambda n: n.name == module_name) + + +# load brain plugins +from os import listdir +from os.path import join, dirname +BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') +if BRAIN_MODULES_DIR not in sys.path: + # add it to the end of the list so user path take precedence + sys.path.append(BRAIN_MODULES_DIR) +# load modules in this directory +for module in listdir(BRAIN_MODULES_DIR): + if module.endswith('.py'): + __import__(module[:-3]) diff --git a/pymode/libs/astroid/__pkginfo__.py b/pymode/libs/astroid/__pkginfo__.py new file mode 100644 index 00000000..7a5acfa5 --- /dev/null +++ b/pymode/libs/astroid/__pkginfo__.py @@ -0,0 +1,42 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""astroid packaging information""" +distname = 'astroid' + +modname = 'astroid' + +numversion = (1, 4, 9) +version = '.'.join([str(num) for num in numversion]) + +install_requires = ['six', 'lazy_object_proxy', 'wrapt'] + +license = 'LGPL' + +author = 'Python Code Quality Authority' +author_email = 'code-quality@python.org' +mailinglist = "mailto://%s" % author_email +web = 'https://github.com/PyCQA/astroid' + +description = "A abstract syntax tree for Python with inference support." + +classifiers = ["Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Software Development :: Quality Assurance", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", + ] diff --git a/pymode/libs/astroid/arguments.py b/pymode/libs/astroid/arguments.py new file mode 100644 index 00000000..f05d48a3 --- /dev/null +++ b/pymode/libs/astroid/arguments.py @@ -0,0 +1,233 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import nodes +from astroid import util + +import six + + +class CallSite(object): + """Class for understanding arguments passed into a call site + + It needs a call context, which contains the arguments and the + keyword arguments that were passed into a given call site. + In order to infer what an argument represents, call + :meth:`infer_argument` with the corresponding function node + and the argument name. + """ + + def __init__(self, callcontext): + args = callcontext.args + keywords = callcontext.keywords + self.duplicated_keywords = set() + self._unpacked_args = self._unpack_args(args) + self._unpacked_kwargs = self._unpack_keywords(keywords) + + self.positional_arguments = [ + arg for arg in self._unpacked_args + if arg is not util.YES + ] + self.keyword_arguments = { + key: value for key, value in self._unpacked_kwargs.items() + if value is not util.YES + } + + @classmethod + def from_call(cls, call_node): + """Get a CallSite object from the given Call node.""" + callcontext = contextmod.CallContext(call_node.args, + call_node.keywords) + return cls(callcontext) + + def has_invalid_arguments(self): + """Check if in the current CallSite were passed *invalid* arguments + + This can mean multiple things. For instance, if an unpacking + of an invalid object was passed, then this method will return True. + Other cases can be when the arguments can't be inferred by astroid, + for example, by passing objects which aren't known statically. + """ + return len(self.positional_arguments) != len(self._unpacked_args) + + def has_invalid_keywords(self): + """Check if in the current CallSite were passed *invalid* keyword arguments + + For instance, unpacking a dictionary with integer keys is invalid + (**{1:2}), because the keys must be strings, which will make this + method to return True. Other cases where this might return True if + objects which can't be inferred were passed. + """ + return len(self.keyword_arguments) != len(self._unpacked_kwargs) + + def _unpack_keywords(self, keywords): + values = {} + context = contextmod.InferenceContext() + for name, value in keywords: + if name is None: + # Then it's an unpacking operation (**) + try: + inferred = next(value.infer(context=context)) + except exceptions.InferenceError: + values[name] = util.YES + continue + + if not isinstance(inferred, nodes.Dict): + # Not something we can work with. + values[name] = util.YES + continue + + for dict_key, dict_value in inferred.items: + try: + dict_key = next(dict_key.infer(context=context)) + except exceptions.InferenceError: + values[name] = util.YES + continue + if not isinstance(dict_key, nodes.Const): + values[name] = util.YES + continue + if not isinstance(dict_key.value, six.string_types): + values[name] = util.YES + continue + if dict_key.value in values: + # The name is already in the dictionary + values[dict_key.value] = util.YES + self.duplicated_keywords.add(dict_key.value) + continue + values[dict_key.value] = dict_value + else: + values[name] = value + return values + + @staticmethod + def _unpack_args(args): + values = [] + context = contextmod.InferenceContext() + for arg in args: + if isinstance(arg, nodes.Starred): + try: + inferred = next(arg.value.infer(context=context)) + except exceptions.InferenceError: + values.append(util.YES) + continue + + if inferred is util.YES: + values.append(util.YES) + continue + if not hasattr(inferred, 'elts'): + values.append(util.YES) + continue + values.extend(inferred.elts) + else: + values.append(arg) + return values + + def infer_argument(self, funcnode, name, context): + """infer a function argument value according to the call context""" + if name in self.duplicated_keywords: + raise exceptions.InferenceError(name) + + # Look into the keywords first, maybe it's already there. + try: + return self.keyword_arguments[name].infer(context) + except KeyError: + pass + + # Too many arguments given and no variable arguments. + if len(self.positional_arguments) > len(funcnode.args.args): + if not funcnode.args.vararg: + raise exceptions.InferenceError(name) + + positional = self.positional_arguments[:len(funcnode.args.args)] + vararg = self.positional_arguments[len(funcnode.args.args):] + argindex = funcnode.args.find_argname(name)[0] + kwonlyargs = set(arg.name for arg in funcnode.args.kwonlyargs) + kwargs = { + key: value for key, value in self.keyword_arguments.items() + if key not in kwonlyargs + } + # If there are too few positionals compared to + # what the function expects to receive, check to see + # if the missing positional arguments were passed + # as keyword arguments and if so, place them into the + # positional args list. + if len(positional) < len(funcnode.args.args): + for func_arg in funcnode.args.args: + if func_arg.name in kwargs: + arg = kwargs.pop(func_arg.name) + positional.append(arg) + + if argindex is not None: + # 2. first argument of instance/class method + if argindex == 0 and funcnode.type in ('method', 'classmethod'): + if context.boundnode is not None: + boundnode = context.boundnode + else: + # XXX can do better ? + boundnode = funcnode.parent.frame() + if funcnode.type == 'method': + if not isinstance(boundnode, bases.Instance): + boundnode = bases.Instance(boundnode) + return iter((boundnode,)) + if funcnode.type == 'classmethod': + return iter((boundnode,)) + # if we have a method, extract one position + # from the index, so we'll take in account + # the extra parameter represented by `self` or `cls` + if funcnode.type in ('method', 'classmethod'): + argindex -= 1 + # 2. search arg index + try: + return self.positional_arguments[argindex].infer(context) + except IndexError: + pass + + if funcnode.args.kwarg == name: + # It wants all the keywords that were passed into + # the call site. + if self.has_invalid_keywords(): + raise exceptions.InferenceError + kwarg = nodes.Dict() + kwarg.lineno = funcnode.args.lineno + kwarg.col_offset = funcnode.args.col_offset + kwarg.parent = funcnode.args + items = [(nodes.const_factory(key), value) + for key, value in kwargs.items()] + kwarg.items = items + return iter((kwarg, )) + elif funcnode.args.vararg == name: + # It wants all the args that were passed into + # the call site. + if self.has_invalid_arguments(): + raise exceptions.InferenceError + args = nodes.Tuple() + args.lineno = funcnode.args.lineno + args.col_offset = funcnode.args.col_offset + args.parent = funcnode.args + args.elts = vararg + return iter((args, )) + + # Check if it's a default parameter. + try: + return funcnode.args.default_value(name).infer(context) + except exceptions.NoDefault: + pass + raise exceptions.InferenceError(name) diff --git a/pymode/libs/astroid/as_string.py b/pymode/libs/astroid/as_string.py new file mode 100644 index 00000000..2b07200c --- /dev/null +++ b/pymode/libs/astroid/as_string.py @@ -0,0 +1,548 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""This module renders Astroid nodes as string: + +* :func:`to_code` function return equivalent (hopefuly valid) python string + +* :func:`dump` function return an internal representation of nodes found + in the tree, useful for debugging or understanding the tree structure +""" +import sys + +import six + +INDENT = ' ' # 4 spaces ; keep indentation variable + + +def dump(node, ids=False): + """print a nice astroid tree representation. + + :param ids: if true, we also print the ids (usefull for debugging) + """ + result = [] + _repr_tree(node, result, ids=ids) + return "\n".join(result) + +def _repr_tree(node, result, indent='', _done=None, ids=False): + """built a tree representation of a node as a list of lines""" + if _done is None: + _done = set() + if not hasattr(node, '_astroid_fields'): # not a astroid node + return + if node in _done: + result.append(indent + 'loop in tree: %s' % node) + return + _done.add(node) + node_str = str(node) + if ids: + node_str += ' . \t%x' % id(node) + result.append(indent + node_str) + indent += INDENT + for field in node._astroid_fields: + value = getattr(node, field) + if isinstance(value, (list, tuple)): + result.append(indent + field + " = [") + for child in value: + if isinstance(child, (list, tuple)): + # special case for Dict # FIXME + _repr_tree(child[0], result, indent, _done, ids) + _repr_tree(child[1], result, indent, _done, ids) + result.append(indent + ',') + else: + _repr_tree(child, result, indent, _done, ids) + result.append(indent + "]") + else: + result.append(indent + field + " = ") + _repr_tree(value, result, indent, _done, ids) + + +class AsStringVisitor(object): + """Visitor to render an Astroid node as a valid python code string""" + + def __call__(self, node): + """Makes this visitor behave as a simple function""" + return node.accept(self) + + def _stmt_list(self, stmts): + """return a list of nodes to string""" + stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr]) + return INDENT + stmts.replace('\n', '\n'+INDENT) + + + ## visit_ methods ########################################### + + def visit_arguments(self, node): + """return an astroid.Function node as string""" + return node.format_args() + + def visit_assignattr(self, node): + """return an astroid.AssAttr node as string""" + return self.visit_attribute(node) + + def visit_assert(self, node): + """return an astroid.Assert node as string""" + if node.fail: + return 'assert %s, %s' % (node.test.accept(self), + node.fail.accept(self)) + return 'assert %s' % node.test.accept(self) + + def visit_assignname(self, node): + """return an astroid.AssName node as string""" + return node.name + + def visit_assign(self, node): + """return an astroid.Assign node as string""" + lhs = ' = '.join([n.accept(self) for n in node.targets]) + return '%s = %s' % (lhs, node.value.accept(self)) + + def visit_augassign(self, node): + """return an astroid.AugAssign node as string""" + return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self)) + + def visit_repr(self, node): + """return an astroid.Repr node as string""" + return '`%s`' % node.value.accept(self) + + def visit_binop(self, node): + """return an astroid.BinOp node as string""" + return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self)) + + def visit_boolop(self, node): + """return an astroid.BoolOp node as string""" + return (' %s ' % node.op).join(['(%s)' % n.accept(self) + for n in node.values]) + + def visit_break(self, node): + """return an astroid.Break node as string""" + return 'break' + + def visit_call(self, node): + """return an astroid.Call node as string""" + expr_str = node.func.accept(self) + args = [arg.accept(self) for arg in node.args] + if node.keywords: + keywords = [kwarg.accept(self) for kwarg in node.keywords] + else: + keywords = [] + + args.extend(keywords) + return '%s(%s)' % (expr_str, ', '.join(args)) + + def visit_classdef(self, node): + """return an astroid.ClassDef node as string""" + decorate = node.decorators and node.decorators.accept(self) or '' + bases = ', '.join([n.accept(self) for n in node.bases]) + if sys.version_info[0] == 2: + bases = bases and '(%s)' % bases or '' + else: + metaclass = node.metaclass() + if metaclass and not node.has_metaclass_hack(): + if bases: + bases = '(%s, metaclass=%s)' % (bases, metaclass.name) + else: + bases = '(metaclass=%s)' % metaclass.name + else: + bases = bases and '(%s)' % bases or '' + docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' + return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs, + self._stmt_list(node.body)) + + def visit_compare(self, node): + """return an astroid.Compare node as string""" + rhs_str = ' '.join(['%s %s' % (op, expr.accept(self)) + for op, expr in node.ops]) + return '%s %s' % (node.left.accept(self), rhs_str) + + def visit_comprehension(self, node): + """return an astroid.Comprehension node as string""" + ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs]) + return 'for %s in %s%s' % (node.target.accept(self), + node.iter.accept(self), ifs) + + def visit_const(self, node): + """return an astroid.Const node as string""" + return repr(node.value) + + def visit_continue(self, node): + """return an astroid.Continue node as string""" + return 'continue' + + def visit_delete(self, node): # XXX check if correct + """return an astroid.Delete node as string""" + return 'del %s' % ', '.join([child.accept(self) + for child in node.targets]) + + def visit_delattr(self, node): + """return an astroid.DelAttr node as string""" + return self.visit_attribute(node) + + def visit_delname(self, node): + """return an astroid.DelName node as string""" + return node.name + + def visit_decorators(self, node): + """return an astroid.Decorators node as string""" + return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes]) + + def visit_dict(self, node): + """return an astroid.Dict node as string""" + return '{%s}' % ', '.join(self._visit_dict(node)) + + def _visit_dict(self, node): + for key, value in node.items: + key = key.accept(self) + value = value.accept(self) + if key == '**': + # It can only be a DictUnpack node. + yield key + value + else: + yield '%s: %s' % (key, value) + + def visit_dictunpack(self, node): + return '**' + + def visit_dictcomp(self, node): + """return an astroid.DictComp node as string""" + return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self), + ' '.join([n.accept(self) for n in node.generators])) + + def visit_expr(self, node): + """return an astroid.Discard node as string""" + return node.value.accept(self) + + def visit_emptynode(self, node): + """dummy method for visiting an Empty node""" + return '' + + def visit_excepthandler(self, node): + if node.type: + if node.name: + excs = 'except %s, %s' % (node.type.accept(self), + node.name.accept(self)) + else: + excs = 'except %s' % node.type.accept(self) + else: + excs = 'except' + return '%s:\n%s' % (excs, self._stmt_list(node.body)) + + def visit_ellipsis(self, node): + """return an astroid.Ellipsis node as string""" + return '...' + + def visit_empty(self, node): + """return an Empty node as string""" + return '' + + def visit_exec(self, node): + """return an astroid.Exec node as string""" + if node.locals: + return 'exec %s in %s, %s' % (node.expr.accept(self), + node.locals.accept(self), + node.globals.accept(self)) + if node.globals: + return 'exec %s in %s' % (node.expr.accept(self), + node.globals.accept(self)) + return 'exec %s' % node.expr.accept(self) + + def visit_extslice(self, node): + """return an astroid.ExtSlice node as string""" + return ','.join([dim.accept(self) for dim in node.dims]) + + def visit_for(self, node): + """return an astroid.For node as string""" + fors = 'for %s in %s:\n%s' % (node.target.accept(self), + node.iter.accept(self), + self._stmt_list(node.body)) + if node.orelse: + fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse)) + return fors + + def visit_importfrom(self, node): + """return an astroid.ImportFrom node as string""" + return 'from %s import %s' % ('.' * (node.level or 0) + node.modname, + _import_string(node.names)) + + def visit_functiondef(self, node): + """return an astroid.Function node as string""" + decorate = node.decorators and node.decorators.accept(self) or '' + docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' + return_annotation = '' + if six.PY3 and node.returns: + return_annotation = '->' + node.returns.as_string() + trailer = return_annotation + ":" + else: + trailer = ":" + def_format = "\n%sdef %s(%s)%s%s\n%s" + return def_format % (decorate, node.name, + node.args.accept(self), + trailer, docs, + self._stmt_list(node.body)) + + def visit_generatorexp(self, node): + """return an astroid.GeneratorExp node as string""" + return '(%s %s)' % (node.elt.accept(self), + ' '.join([n.accept(self) for n in node.generators])) + + def visit_attribute(self, node): + """return an astroid.Getattr node as string""" + return '%s.%s' % (node.expr.accept(self), node.attrname) + + def visit_global(self, node): + """return an astroid.Global node as string""" + return 'global %s' % ', '.join(node.names) + + def visit_if(self, node): + """return an astroid.If node as string""" + ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))] + if node.orelse:# XXX use elif ??? + ifs.append('else:\n%s' % self._stmt_list(node.orelse)) + return '\n'.join(ifs) + + def visit_ifexp(self, node): + """return an astroid.IfExp node as string""" + return '%s if %s else %s' % (node.body.accept(self), + node.test.accept(self), + node.orelse.accept(self)) + + def visit_import(self, node): + """return an astroid.Import node as string""" + return 'import %s' % _import_string(node.names) + + def visit_keyword(self, node): + """return an astroid.Keyword node as string""" + if node.arg is None: + return '**%s' % node.value.accept(self) + return '%s=%s' % (node.arg, node.value.accept(self)) + + def visit_lambda(self, node): + """return an astroid.Lambda node as string""" + return 'lambda %s: %s' % (node.args.accept(self), + node.body.accept(self)) + + def visit_list(self, node): + """return an astroid.List node as string""" + return '[%s]' % ', '.join([child.accept(self) for child in node.elts]) + + def visit_listcomp(self, node): + """return an astroid.ListComp node as string""" + return '[%s %s]' % (node.elt.accept(self), + ' '.join([n.accept(self) for n in node.generators])) + + def visit_module(self, node): + """return an astroid.Module node as string""" + docs = node.doc and '"""%s"""\n\n' % node.doc or '' + return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n' + + def visit_name(self, node): + """return an astroid.Name node as string""" + return node.name + + def visit_pass(self, node): + """return an astroid.Pass node as string""" + return 'pass' + + def visit_print(self, node): + """return an astroid.Print node as string""" + nodes = ', '.join([n.accept(self) for n in node.values]) + if not node.nl: + nodes = '%s,' % nodes + if node.dest: + return 'print >> %s, %s' % (node.dest.accept(self), nodes) + return 'print %s' % nodes + + def visit_raise(self, node): + """return an astroid.Raise node as string""" + if node.exc: + if node.inst: + if node.tback: + return 'raise %s, %s, %s' % (node.exc.accept(self), + node.inst.accept(self), + node.tback.accept(self)) + return 'raise %s, %s' % (node.exc.accept(self), + node.inst.accept(self)) + return 'raise %s' % node.exc.accept(self) + return 'raise' + + def visit_return(self, node): + """return an astroid.Return node as string""" + if node.value: + return 'return %s' % node.value.accept(self) + else: + return 'return' + + def visit_index(self, node): + """return a astroid.Index node as string""" + return node.value.accept(self) + + def visit_set(self, node): + """return an astroid.Set node as string""" + return '{%s}' % ', '.join([child.accept(self) for child in node.elts]) + + def visit_setcomp(self, node): + """return an astroid.SetComp node as string""" + return '{%s %s}' % (node.elt.accept(self), + ' '.join([n.accept(self) for n in node.generators])) + + def visit_slice(self, node): + """return a astroid.Slice node as string""" + lower = node.lower and node.lower.accept(self) or '' + upper = node.upper and node.upper.accept(self) or '' + step = node.step and node.step.accept(self) or '' + if step: + return '%s:%s:%s' % (lower, upper, step) + return '%s:%s' % (lower, upper) + + def visit_subscript(self, node): + """return an astroid.Subscript node as string""" + return '%s[%s]' % (node.value.accept(self), node.slice.accept(self)) + + def visit_tryexcept(self, node): + """return an astroid.TryExcept node as string""" + trys = ['try:\n%s' % self._stmt_list(node.body)] + for handler in node.handlers: + trys.append(handler.accept(self)) + if node.orelse: + trys.append('else:\n%s' % self._stmt_list(node.orelse)) + return '\n'.join(trys) + + def visit_tryfinally(self, node): + """return an astroid.TryFinally node as string""" + return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body), + self._stmt_list(node.finalbody)) + + def visit_tuple(self, node): + """return an astroid.Tuple node as string""" + if len(node.elts) == 1: + return '(%s, )' % node.elts[0].accept(self) + return '(%s)' % ', '.join([child.accept(self) for child in node.elts]) + + def visit_unaryop(self, node): + """return an astroid.UnaryOp node as string""" + if node.op == 'not': + operator = 'not ' + else: + operator = node.op + return '%s%s' % (operator, node.operand.accept(self)) + + def visit_while(self, node): + """return an astroid.While node as string""" + whiles = 'while %s:\n%s' % (node.test.accept(self), + self._stmt_list(node.body)) + if node.orelse: + whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse)) + return whiles + + def visit_with(self, node): # 'with' without 'as' is possible + """return an astroid.With node as string""" + items = ', '.join(('(%s)' % expr.accept(self)) + + (vars and ' as (%s)' % (vars.accept(self)) or '') + for expr, vars in node.items) + return 'with %s:\n%s' % (items, self._stmt_list(node.body)) + + def visit_yield(self, node): + """yield an ast.Yield node as string""" + yi_val = node.value and (" " + node.value.accept(self)) or "" + expr = 'yield' + yi_val + if node.parent.is_statement: + return expr + else: + return "(%s)" % (expr,) + + def visit_starred(self, node): + """return Starred node as string""" + return "*" + node.value.accept(self) + + + # These aren't for real AST nodes, but for inference objects. + + def visit_frozenset(self, node): + return node.parent.accept(self) + + def visit_super(self, node): + return node.parent.accept(self) + + def visit_yes(self, node): + return "Uninferable" + + +class AsStringVisitor3k(AsStringVisitor): + """AsStringVisitor3k overwrites some AsStringVisitor methods""" + + def visit_excepthandler(self, node): + if node.type: + if node.name: + excs = 'except %s as %s' % (node.type.accept(self), + node.name.accept(self)) + else: + excs = 'except %s' % node.type.accept(self) + else: + excs = 'except' + return '%s:\n%s' % (excs, self._stmt_list(node.body)) + + def visit_nonlocal(self, node): + """return an astroid.Nonlocal node as string""" + return 'nonlocal %s' % ', '.join(node.names) + + def visit_raise(self, node): + """return an astroid.Raise node as string""" + if node.exc: + if node.cause: + return 'raise %s from %s' % (node.exc.accept(self), + node.cause.accept(self)) + return 'raise %s' % node.exc.accept(self) + return 'raise' + + def visit_yieldfrom(self, node): + """ Return an astroid.YieldFrom node as string. """ + yi_val = node.value and (" " + node.value.accept(self)) or "" + expr = 'yield from' + yi_val + if node.parent.is_statement: + return expr + else: + return "(%s)" % (expr,) + + def visit_asyncfunctiondef(self, node): + function = super(AsStringVisitor3k, self).visit_functiondef(node) + return 'async ' + function.strip() + + def visit_await(self, node): + return 'await %s' % node.value.accept(self) + + def visit_asyncwith(self, node): + return 'async %s' % self.visit_with(node) + + def visit_asyncfor(self, node): + return 'async %s' % self.visit_for(node) + + +def _import_string(names): + """return a list of (name, asname) formatted as a string""" + _names = [] + for name, asname in names: + if asname is not None: + _names.append('%s as %s' % (name, asname)) + else: + _names.append(name) + return ', '.join(_names) + + +if sys.version_info >= (3, 0): + AsStringVisitor = AsStringVisitor3k + +# this visitor is stateless, thus it can be reused +to_code = AsStringVisitor() diff --git a/pymode/libs/astroid/astpeephole.py b/pymode/libs/astroid/astpeephole.py new file mode 100644 index 00000000..af03462a --- /dev/null +++ b/pymode/libs/astroid/astpeephole.py @@ -0,0 +1,86 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""Small AST optimizations.""" + +import _ast + +from astroid import nodes + + +__all__ = ('ASTPeepholeOptimizer', ) + + +try: + _TYPES = (_ast.Str, _ast.Bytes) +except AttributeError: + _TYPES = (_ast.Str, ) + + +class ASTPeepholeOptimizer(object): + """Class for applying small optimizations to generate new AST.""" + + def optimize_binop(self, node): + """Optimize BinOps with string Const nodes on the lhs. + + This fixes an infinite recursion crash, where multiple + strings are joined using the addition operator. With a + sufficient number of such strings, astroid will fail + with a maximum recursion limit exceeded. The + function will return a Const node with all the strings + already joined. + Return ``None`` if no AST node can be obtained + through optimization. + """ + ast_nodes = [] + current = node + while isinstance(current, _ast.BinOp): + # lhs must be a BinOp with the addition operand. + if not isinstance(current.left, _ast.BinOp): + return + if (not isinstance(current.left.op, _ast.Add) + or not isinstance(current.op, _ast.Add)): + return + + # rhs must a str / bytes. + if not isinstance(current.right, _TYPES): + return + + ast_nodes.append(current.right.s) + current = current.left + + if (isinstance(current, _ast.BinOp) + and isinstance(current.left, _TYPES) + and isinstance(current.right, _TYPES)): + # Stop early if we are at the last BinOp in + # the operation + ast_nodes.append(current.right.s) + ast_nodes.append(current.left.s) + break + + if not ast_nodes: + return + + # If we have inconsistent types, bail out. + known = type(ast_nodes[0]) + if any(type(element) is not known + for element in ast_nodes[1:]): + return + + value = known().join(reversed(ast_nodes)) + newnode = nodes.Const(value) + return newnode diff --git a/pylibs/logilab/astng/bases.py b/pymode/libs/astroid/bases.py similarity index 60% rename from pylibs/logilab/astng/bases.py rename to pymode/libs/astroid/bases.py index 92f12aad..8dfa8126 100644 --- a/pylibs/logilab/astng/bases.py +++ b/pymode/libs/astroid/bases.py @@ -1,43 +1,67 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com # -# This file is part of logilab-astng. +# This file is part of astroid. # -# logilab-astng is free software: you can redistribute it and/or modify it +# astroid is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 2.1 of the License, or (at your # option) any later version. # -# logilab-astng is distributed in the hope that it will be useful, but +# astroid is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License # for more details. # # You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . +# with astroid. If not, see . """This module contains base classes and functions for the nodes and some inference utils. """ +import functools +import sys +import warnings + +import wrapt + +from astroid import context as contextmod +from astroid import decorators as decoratorsmod +from astroid import exceptions +from astroid import util + + +if sys.version_info >= (3, 0): + BUILTINS = 'builtins' +else: + BUILTINS = '__builtin__' +PROPERTIES = {BUILTINS + '.property', 'abc.abstractproperty'} +# List of possible property names. We use this list in order +# to see if a method is a property or not. This should be +# pretty reliable and fast, the alternative being to check each +# decorator to see if its a real property-like descriptor, which +# can be too complicated. +# Also, these aren't qualified, because each project can +# define them, we shouldn't expect to know every possible +# property-like decorator! +# TODO(cpopa): just implement descriptors already. +POSSIBLE_PROPERTIES = {"cached_property", "cachedproperty", + "lazyproperty", "lazy_property", "reify", + "lazyattribute", "lazy_attribute", + "LazyProperty", "lazy"} + + +def _is_property(meth): + if PROPERTIES.intersection(meth.decoratornames()): + return True + stripped = {name.split(".")[-1] for name in meth.decoratornames() + if name is not util.YES} + return any(name in stripped for name in POSSIBLE_PROPERTIES) -__docformat__ = "restructuredtext en" - -from contextlib import contextmanager - -from logilab.common.compat import builtins - -from logilab.astng import BUILTINS_MODULE -from logilab.astng.exceptions import InferenceError, ASTNGError, \ - NotFoundError, UnresolvableName -from logilab.astng.as_string import as_string - -BUILTINS_NAME = builtins.__name__ class Proxy(object): """a simple proxy object""" - _proxied = None + + _proxied = None # proxied object may be set by class or by instance def __init__(self, proxied=None): if proxied is not None: @@ -54,92 +78,34 @@ def infer(self, context=None): yield self -# Inference ################################################################## - -class InferenceContext(object): - __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode') - - def __init__(self, path=None): - if path is None: - self.path = set() - else: - self.path = path - self.lookupname = None - self.callcontext = None - self.boundnode = None - - def push(self, node): - name = self.lookupname - if (node, name) in self.path: - raise StopIteration() - self.path.add( (node, name) ) - - def clone(self): - # XXX copy lookupname/callcontext ? - clone = InferenceContext(self.path) - clone.callcontext = self.callcontext - clone.boundnode = self.boundnode - return clone - - @contextmanager - def restore_path(self): - path = set(self.path) - yield - self.path = path - -def copy_context(context): - if context is not None: - return context.clone() - else: - return InferenceContext() - - def _infer_stmts(stmts, context, frame=None): - """return an iterator on statements inferred by each statement in - """ + """Return an iterator on statements inferred by each statement in *stmts*.""" stmt = None - infered = False + inferred = False if context is not None: name = context.lookupname context = context.clone() else: name = None - context = InferenceContext() + context = contextmod.InferenceContext() + for stmt in stmts: - if stmt is YES: + if stmt is util.YES: yield stmt - infered = True + inferred = True continue context.lookupname = stmt._infer_name(frame, name) try: - for infered in stmt.infer(context): - yield infered - infered = True - except UnresolvableName: + for inferred in stmt.infer(context=context): + yield inferred + inferred = True + except exceptions.UnresolvableName: continue - except InferenceError: - yield YES - infered = True - if not infered: - raise InferenceError(str(stmt)) - - -# special inference objects (e.g. may be returned as nodes by .infer()) ####### - -class _Yes(object): - """a yes object""" - def __repr__(self): - return 'YES' - def __getattribute__(self, name): - if name.startswith('__') and name.endswith('__'): - # to avoid inspection pb - return super(_Yes, self).__getattribute__(name) - return self - def __call__(self, *args, **kwargs): - return self - - -YES = _Yes() + except exceptions.InferenceError: + yield util.YES + inferred = True + if not inferred: + raise exceptions.InferenceError(str(stmt)) class Instance(Proxy): @@ -147,7 +113,7 @@ class Instance(Proxy): def getattr(self, name, context=None, lookupclass=True): try: values = self._proxied.instance_attr(name, context) - except NotFoundError: + except exceptions.NotFoundError: if name == '__class__': return [self._proxied] if lookupclass: @@ -156,53 +122,73 @@ def getattr(self, name, context=None, lookupclass=True): if name in ('__name__', '__bases__', '__mro__', '__subclasses__'): return self._proxied.local_attr(name) return self._proxied.getattr(name, context) - raise NotFoundError(name) + raise exceptions.NotFoundError(name) # since we've no context information, return matching class members as # well if lookupclass: try: return values + self._proxied.getattr(name, context) - except NotFoundError: + except exceptions.NotFoundError: pass return values def igetattr(self, name, context=None): """inferred getattr""" + if not context: + context = contextmod.InferenceContext() try: + # avoid recursively inferring the same attr on the same class + context.push((self._proxied, name)) # XXX frame should be self._proxied, or not ? get_attr = self.getattr(name, context, lookupclass=False) - return _infer_stmts(self._wrap_attr(get_attr, context), context, - frame=self) - except NotFoundError: + return _infer_stmts( + self._wrap_attr(get_attr, context), + context, + frame=self, + ) + except exceptions.NotFoundError: try: # fallback to class'igetattr since it has some logic to handle # descriptors return self._wrap_attr(self._proxied.igetattr(name, context), context) - except NotFoundError: - raise InferenceError(name) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) def _wrap_attr(self, attrs, context=None): """wrap bound methods of attrs in a InstanceMethod proxies""" for attr in attrs: if isinstance(attr, UnboundMethod): - if BUILTINS_NAME + '.property' in attr.decoratornames(): - for infered in attr.infer_call_result(self, context): - yield infered + if _is_property(attr): + for inferred in attr.infer_call_result(self, context): + yield inferred else: yield BoundMethod(attr, self) + elif hasattr(attr, 'name') and attr.name == '': + # This is a lambda function defined at class level, + # since its scope is the underlying _proxied class. + # Unfortunately, we can't do an isinstance check here, + # because of the circular dependency between astroid.bases + # and astroid.scoped_nodes. + if attr.statement().scope() == self._proxied: + if attr.args.args and attr.args.args[0].name == 'self': + yield BoundMethod(attr, self) + continue + yield attr else: yield attr def infer_call_result(self, caller, context=None): """infer what a class instance is returning when called""" - infered = False + inferred = False for node in self._proxied.igetattr('__call__', context): + if node is util.YES or not node.callable(): + continue for res in node.infer_call_result(caller, context): - infered = True + inferred = True yield res - if not infered: - raise InferenceError() + if not inferred: + raise exceptions.InferenceError() def __repr__(self): return '' % (self._proxied.root().name, @@ -216,7 +202,7 @@ def callable(self): try: self._proxied.getattr('__call__') return True - except NotFoundError: + except exceptions.NotFoundError: return False def pytype(self): @@ -226,6 +212,12 @@ def display_type(self): return 'Instance of' + # TODO(cpopa): this is set in inference.py + # The circular dependency hell goes deeper and deeper. + # pylint: disable=unused-argument + def getitem(self, index, context=None): + pass + class UnboundMethod(Proxy): """a special node representing a method not bound to an instance""" def __repr__(self): @@ -240,44 +232,51 @@ def is_bound(self): def getattr(self, name, context=None): if name == 'im_func': return [self._proxied] - return super(UnboundMethod, self).getattr(name, context) + return self._proxied.getattr(name, context) def igetattr(self, name, context=None): if name == 'im_func': return iter((self._proxied,)) - return super(UnboundMethod, self).igetattr(name, context) + return self._proxied.igetattr(name, context) def infer_call_result(self, caller, context): # If we're unbound method __new__ of builtin object, the result is an # instance of the class given as first argument. if (self._proxied.name == '__new__' and - self._proxied.parent.frame().qname() == '%s.object' % BUILTINS_MODULE): - return (x is YES and x or Instance(x) for x in caller.args[0].infer()) + self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): + infer = caller.args[0].infer() if caller.args else [] + return ((x is util.YES and x or Instance(x)) for x in infer) return self._proxied.infer_call_result(caller, context) class BoundMethod(UnboundMethod): """a special node representing a method bound to an instance""" - def __init__(self, proxy, bound): + def __init__(self, proxy, bound): UnboundMethod.__init__(self, proxy) self.bound = bound def is_bound(self): return True - def infer_call_result(self, caller, context): + def infer_call_result(self, caller, context=None): + + if context is None: + context = contextmod.InferenceContext() context = context.clone() context.boundnode = self.bound - return self._proxied.infer_call_result(caller, context) + return super(BoundMethod, self).infer_call_result(caller, context) class Generator(Instance): - """a special node representing a generator""" + """a special node representing a generator. + + Proxied class is set once for all in raw_building. + """ def callable(self): - return True + return False def pytype(self): - return '%s.generator' % BUILTINS_MODULE + return '%s.generator' % BUILTINS def display_type(self): return 'Generator' @@ -293,10 +292,11 @@ def __str__(self): def path_wrapper(func): """return the given infer function wrapped to handle the path""" + @functools.wraps(func) def wrapped(node, context=None, _func=func, **kwargs): """wrapper function handling context""" if context is None: - context = InferenceContext() + context = contextmod.InferenceContext() context.push(node) yielded = set() for res in _func(node, context, **kwargs): @@ -305,42 +305,40 @@ def wrapped(node, context=None, _func=func, **kwargs): ares = res._proxied else: ares = res - if not ares in yielded: + if ares not in yielded: yield res yielded.add(ares) return wrapped -def yes_if_nothing_infered(func): - def wrapper(*args, **kwargs): - infered = False - for node in func(*args, **kwargs): - infered = True - yield node - if not infered: - yield YES - return wrapper - -def raise_if_nothing_infered(func): - def wrapper(*args, **kwargs): - infered = False - for node in func(*args, **kwargs): - infered = True - yield node - if not infered: - raise InferenceError() - return wrapper +@wrapt.decorator +def yes_if_nothing_inferred(func, instance, args, kwargs): + inferred = False + for node in func(*args, **kwargs): + inferred = True + yield node + if not inferred: + yield util.YES + +@wrapt.decorator +def raise_if_nothing_inferred(func, instance, args, kwargs): + inferred = False + for node in func(*args, **kwargs): + inferred = True + yield node + if not inferred: + raise exceptions.InferenceError() # Node ###################################################################### class NodeNG(object): - """Base Class for all ASTNG node classes. + """Base Class for all Astroid node classes. It represents a node of the new abstract syntax tree. """ is_statement = False - optional_assign = False # True for For (and for Comprehension if py <3.0) - is_function = False # True for Function nodes + optional_assign = False # True for For (and for Comprehension if py <3.0) + is_function = False # True for FunctionDef nodes # attributes below are set by the builder module or by raw factories lineno = None fromlineno = None @@ -349,7 +347,33 @@ class NodeNG(object): # parent node in the tree parent = None # attributes containing child node(s) redefined in most concrete classes: - _astng_fields = () + _astroid_fields = () + # instance specific inference function infer(node, context) + _explicit_inference = None + + def infer(self, context=None, **kwargs): + """main interface to the interface system, return a generator on infered + values. + + If the instance has some explicit inference function set, it will be + called instead of the default interface. + """ + if self._explicit_inference is not None: + # explicit_inference is not bound, give it self explicitly + try: + return self._explicit_inference(self, context, **kwargs) + except exceptions.UseInferenceDefault: + pass + + if not context: + return self._infer(context, **kwargs) + + key = (self, context.lookupname, + context.callcontext, context.boundnode) + if key in context.inferred: + return iter(context.inferred[key]) + + return context.cache_generator(key, self._infer(context, **kwargs)) def _repr_name(self): """return self.name or self.attrname or '' for nice representation""" @@ -359,20 +383,19 @@ def __str__(self): return '%s(%s)' % (self.__class__.__name__, self._repr_name()) def __repr__(self): - return '<%s(%s) l.%s [%s] at Ox%x>' % (self.__class__.__name__, - self._repr_name(), - self.fromlineno, - self.root().name, - id(self)) + return '<%s(%s) l.%s [%s] at 0x%x>' % (self.__class__.__name__, + self._repr_name(), + self.fromlineno, + self.root().name, + id(self)) def accept(self, visitor): - klass = self.__class__.__name__ func = getattr(visitor, "visit_" + self.__class__.__name__.lower()) return func(self) def get_children(self): - for field in self._astng_fields: + for field in self._astroid_fields: attr = getattr(self, field) if attr is None: continue @@ -384,7 +407,7 @@ def get_children(self): def last_child(self): """an optimized version of list(get_children())[-1]""" - for field in self._astng_fields[::-1]: + for field in self._astroid_fields[::-1]: attr = getattr(self, field) if not attr: # None or empty listy / tuple continue @@ -410,13 +433,16 @@ def statement(self): return self.parent.statement() def frame(self): - """return the first parent frame node (i.e. Module, Function or Class) + """return the first parent frame node (i.e. Module, FunctionDef or + ClassDef) + """ return self.parent.frame() def scope(self): - """return the first node defining a new scope (i.e. Module, Function, - Class, Lambda but also GenExpr) + """return the first node defining a new scope (i.e. Module, + FunctionDef, ClassDef, Lambda but also GenExpr) + """ return self.parent.scope() @@ -428,28 +454,29 @@ def root(self): def child_sequence(self, child): """search for the right sequence where the child lies in""" - for field in self._astng_fields: + for field in self._astroid_fields: node_or_sequence = getattr(self, field) if node_or_sequence is child: return [node_or_sequence] # /!\ compiler.ast Nodes have an __iter__ walking over child nodes - if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence: + if (isinstance(node_or_sequence, (tuple, list)) + and child in node_or_sequence): return node_or_sequence - else: - msg = 'Could not found %s in %s\'s children' - raise ASTNGError(msg % (repr(child), repr(self))) + + msg = 'Could not find %s in %s\'s children' + raise exceptions.AstroidError(msg % (repr(child), repr(self))) def locate_child(self, child): """return a 2-uple (child attribute name, sequence or node)""" - for field in self._astng_fields: + for field in self._astroid_fields: node_or_sequence = getattr(self, field) # /!\ compiler.ast Nodes have an __iter__ walking over child nodes if child is node_or_sequence: return field, child if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence: return field, node_or_sequence - msg = 'Could not found %s in %s\'s children' - raise ASTNGError(msg % (repr(child), repr(self))) + msg = 'Could not find %s in %s\'s children' + raise exceptions.AstroidError(msg % (repr(child), repr(self))) # FIXME : should we merge child_sequence and locate_child ? locate_child # is only used in are_exclusive, child_sequence one time in pylint. @@ -479,16 +506,28 @@ def nearest(self, nodes): # FIXME: raise an exception if nearest is None ? return nearest[0] - def set_line_info(self, lastchild): + # these are lazy because they're relatively expensive to compute for every + # single node, and they rarely get looked at + + @decoratorsmod.cachedproperty + def fromlineno(self): if self.lineno is None: - self.fromlineno = self._fixed_source_line() + return self._fixed_source_line() + else: + return self.lineno + + @decoratorsmod.cachedproperty + def tolineno(self): + if not self._astroid_fields: + # can't have children + lastchild = None else: - self.fromlineno = self.lineno + lastchild = self.last_child() if lastchild is None: - self.tolineno = self.fromlineno + return self.fromlineno else: - self.tolineno = lastchild.tolineno - return + return lastchild.tolineno + # TODO / FIXME: assert self.fromlineno is not None, self assert self.tolineno is not None, self @@ -503,7 +542,7 @@ def _fixed_source_line(self): _node = self try: while line is None: - _node = _node.get_children().next() + _node = next(_node.get_children()) line = _node.lineno except StopIteration: _node = self.parent @@ -535,20 +574,27 @@ def nodes_of_class(self, klass, skip_klass=None): yield matching def _infer_name(self, frame, name): - # overridden for From, Import, Global, TryExcept and Arguments + # overridden for ImportFrom, Import, Global, TryExcept and Arguments return None - def infer(self, context=None): + def _infer(self, context=None): """we don't know how to resolve a statement by default""" # this method is overridden by most concrete classes - raise InferenceError(self.__class__.__name__) + raise exceptions.InferenceError(self.__class__.__name__) - def infered(self): - '''return list of infered values for a more simple inference usage''' + def inferred(self): + '''return list of inferred values for a more simple inference usage''' return list(self.infer()) + def infered(self): + warnings.warn('%s.infered() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.inferred() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.inferred() + def instanciate_class(self): - """instanciate a node if it is a Class node, else return self""" + """instanciate a node if it is a ClassDef node, else return self""" return self def has_base(self, node): @@ -561,15 +607,12 @@ def eq(self, value): return False def as_string(self): - return as_string(self) + from astroid.as_string import to_code + return to_code(self) def repr_tree(self, ids=False): - """print a nice astng tree representation. - - :param ids: if true, we also print the ids (usefull for debugging)""" - result = [] - _repr_tree(self, result, ids=ids) - return "\n".join(result) + from astroid.as_string import dump + return dump(self) class Statement(NodeNG): @@ -591,39 +634,3 @@ def previous_sibling(self): index = stmts.index(self) if index >= 1: return stmts[index -1] - -INDENT = " " - -def _repr_tree(node, result, indent='', _done=None, ids=False): - """built a tree representation of a node as a list of lines""" - if _done is None: - _done = set() - if not hasattr(node, '_astng_fields'): # not a astng node - return - if node in _done: - result.append( indent + 'loop in tree: %s' % node ) - return - _done.add(node) - node_str = str(node) - if ids: - node_str += ' . \t%x' % id(node) - result.append( indent + node_str ) - indent += INDENT - for field in node._astng_fields: - value = getattr(node, field) - if isinstance(value, (list, tuple) ): - result.append( indent + field + " = [" ) - for child in value: - if isinstance(child, (list, tuple) ): - # special case for Dict # FIXME - _repr_tree(child[0], result, indent, _done, ids) - _repr_tree(child[1], result, indent, _done, ids) - result.append(indent + ',') - else: - _repr_tree(child, result, indent, _done, ids) - result.append( indent + "]" ) - else: - result.append( indent + field + " = " ) - _repr_tree(value, result, indent, _done, ids) - - diff --git a/pymode/libs/astroid/brain/brain_builtin_inference.py b/pymode/libs/astroid/brain/brain_builtin_inference.py new file mode 100644 index 00000000..ed78111f --- /dev/null +++ b/pymode/libs/astroid/brain/brain_builtin_inference.py @@ -0,0 +1,336 @@ +"""Astroid hooks for various builtins.""" + +import sys +from functools import partial +from textwrap import dedent + +import six +from astroid import (MANAGER, UseInferenceDefault, + inference_tip, YES, InferenceError, UnresolvableName) +from astroid import arguments +from astroid import nodes +from astroid import objects +from astroid.builder import AstroidBuilder +from astroid import util + +def _extend_str(class_node, rvalue): + """function to extend builtin str/unicode class""" + # TODO(cpopa): this approach will make astroid to believe + # that some arguments can be passed by keyword, but + # unfortunately, strings and bytes don't accept keyword arguments. + code = dedent(''' + class whatever(object): + def join(self, iterable): + return {rvalue} + def replace(self, old, new, count=None): + return {rvalue} + def format(self, *args, **kwargs): + return {rvalue} + def encode(self, encoding='ascii', errors=None): + return '' + def decode(self, encoding='ascii', errors=None): + return u'' + def capitalize(self): + return {rvalue} + def title(self): + return {rvalue} + def lower(self): + return {rvalue} + def upper(self): + return {rvalue} + def swapcase(self): + return {rvalue} + def index(self, sub, start=None, end=None): + return 0 + def find(self, sub, start=None, end=None): + return 0 + def count(self, sub, start=None, end=None): + return 0 + def strip(self, chars=None): + return {rvalue} + def lstrip(self, chars=None): + return {rvalue} + def rstrip(self, chars=None): + return {rvalue} + def rjust(self, width, fillchar=None): + return {rvalue} + def center(self, width, fillchar=None): + return {rvalue} + def ljust(self, width, fillchar=None): + return {rvalue} + ''') + code = code.format(rvalue=rvalue) + fake = AstroidBuilder(MANAGER).string_build(code)['whatever'] + for method in fake.mymethods(): + class_node._locals[method.name] = [method] + method.parent = class_node + +def extend_builtins(class_transforms): + from astroid.bases import BUILTINS + builtin_ast = MANAGER.astroid_cache[BUILTINS] + for class_name, transform in class_transforms.items(): + transform(builtin_ast[class_name]) + +if sys.version_info > (3, 0): + extend_builtins({'bytes': partial(_extend_str, rvalue="b''"), + 'str': partial(_extend_str, rvalue="''")}) +else: + extend_builtins({'str': partial(_extend_str, rvalue="''"), + 'unicode': partial(_extend_str, rvalue="u''")}) + + +def register_builtin_transform(transform, builtin_name): + """Register a new transform function for the given *builtin_name*. + + The transform function must accept two parameters, a node and + an optional context. + """ + def _transform_wrapper(node, context=None): + result = transform(node, context=context) + if result: + if not result.parent: + # Let the transformation function determine + # the parent for its result. Otherwise, + # we set it to be the node we transformed from. + result.parent = node + + result.lineno = node.lineno + result.col_offset = node.col_offset + return iter([result]) + + MANAGER.register_transform(nodes.Call, + inference_tip(_transform_wrapper), + lambda n: (isinstance(n.func, nodes.Name) and + n.func.name == builtin_name)) + + +def _generic_inference(node, context, node_type, transform): + args = node.args + if not args: + return node_type() + if len(node.args) > 1: + raise UseInferenceDefault() + + arg, = args + transformed = transform(arg) + if not transformed: + try: + inferred = next(arg.infer(context=context)) + except (InferenceError, StopIteration): + raise UseInferenceDefault() + if inferred is util.YES: + raise UseInferenceDefault() + transformed = transform(inferred) + if not transformed or transformed is util.YES: + raise UseInferenceDefault() + return transformed + + +def _generic_transform(arg, klass, iterables, build_elts): + if isinstance(arg, klass): + return arg + elif isinstance(arg, iterables): + if not all(isinstance(elt, nodes.Const) + for elt in arg.elts): + # TODO(cpopa): Don't support heterogenous elements. + # Not yet, though. + raise UseInferenceDefault() + elts = [elt.value for elt in arg.elts] + elif isinstance(arg, nodes.Dict): + if not all(isinstance(elt[0], nodes.Const) + for elt in arg.items): + raise UseInferenceDefault() + elts = [item[0].value for item in arg.items] + elif (isinstance(arg, nodes.Const) and + isinstance(arg.value, (six.string_types, six.binary_type))): + elts = arg.value + else: + return + return klass(elts=build_elts(elts)) + + +def _infer_builtin(node, context, + klass=None, iterables=None, + build_elts=None): + transform_func = partial( + _generic_transform, + klass=klass, + iterables=iterables, + build_elts=build_elts) + + return _generic_inference(node, context, klass, transform_func) + +# pylint: disable=invalid-name +infer_tuple = partial( + _infer_builtin, + klass=nodes.Tuple, + iterables=(nodes.List, nodes.Set), + build_elts=tuple) + +infer_list = partial( + _infer_builtin, + klass=nodes.List, + iterables=(nodes.Tuple, nodes.Set), + build_elts=list) + +infer_set = partial( + _infer_builtin, + klass=nodes.Set, + iterables=(nodes.List, nodes.Tuple), + build_elts=set) + +infer_frozenset = partial( + _infer_builtin, + klass=objects.FrozenSet, + iterables=(nodes.List, nodes.Tuple, nodes.Set), + build_elts=frozenset) + + +def _get_elts(arg, context): + is_iterable = lambda n: isinstance(n, + (nodes.List, nodes.Tuple, nodes.Set)) + try: + inferred = next(arg.infer(context)) + except (InferenceError, UnresolvableName): + raise UseInferenceDefault() + if isinstance(inferred, nodes.Dict): + items = inferred.items + elif is_iterable(inferred): + items = [] + for elt in inferred.elts: + # If an item is not a pair of two items, + # then fallback to the default inference. + # Also, take in consideration only hashable items, + # tuples and consts. We are choosing Names as well. + if not is_iterable(elt): + raise UseInferenceDefault() + if len(elt.elts) != 2: + raise UseInferenceDefault() + if not isinstance(elt.elts[0], + (nodes.Tuple, nodes.Const, nodes.Name)): + raise UseInferenceDefault() + items.append(tuple(elt.elts)) + else: + raise UseInferenceDefault() + return items + +def infer_dict(node, context=None): + """Try to infer a dict call to a Dict node. + + The function treats the following cases: + + * dict() + * dict(mapping) + * dict(iterable) + * dict(iterable, **kwargs) + * dict(mapping, **kwargs) + * dict(**kwargs) + + If a case can't be inferred, we'll fallback to default inference. + """ + call = arguments.CallSite.from_call(node) + if call.has_invalid_arguments() or call.has_invalid_keywords(): + raise UseInferenceDefault + + args = call.positional_arguments + kwargs = list(call.keyword_arguments.items()) + + if not args and not kwargs: + # dict() + return nodes.Dict() + elif kwargs and not args: + # dict(a=1, b=2, c=4) + items = [(nodes.Const(key), value) for key, value in kwargs] + elif len(args) == 1 and kwargs: + # dict(some_iterable, b=2, c=4) + elts = _get_elts(args[0], context) + keys = [(nodes.Const(key), value) for key, value in kwargs] + items = elts + keys + elif len(args) == 1: + items = _get_elts(args[0], context) + else: + raise UseInferenceDefault() + + empty = nodes.Dict() + empty.items = items + return empty + + +def _node_class(node): + klass = node.frame() + while klass is not None and not isinstance(klass, nodes.ClassDef): + if klass.parent is None: + klass = None + else: + klass = klass.parent.frame() + return klass + + +def infer_super(node, context=None): + """Understand super calls. + + There are some restrictions for what can be understood: + + * unbounded super (one argument form) is not understood. + + * if the super call is not inside a function (classmethod or method), + then the default inference will be used. + + * if the super arguments can't be infered, the default inference + will be used. + """ + if len(node.args) == 1: + # Ignore unbounded super. + raise UseInferenceDefault + + scope = node.scope() + if not isinstance(scope, nodes.FunctionDef): + # Ignore non-method uses of super. + raise UseInferenceDefault + if scope.type not in ('classmethod', 'method'): + # Not interested in staticmethods. + raise UseInferenceDefault + + cls = _node_class(scope) + if not len(node.args): + mro_pointer = cls + # In we are in a classmethod, the interpreter will fill + # automatically the class as the second argument, not an instance. + if scope.type == 'classmethod': + mro_type = cls + else: + mro_type = cls.instantiate_class() + else: + # TODO(cpopa): support flow control (multiple inference values). + try: + mro_pointer = next(node.args[0].infer(context=context)) + except InferenceError: + raise UseInferenceDefault + try: + mro_type = next(node.args[1].infer(context=context)) + except InferenceError: + raise UseInferenceDefault + + if mro_pointer is YES or mro_type is YES: + # No way we could understand this. + raise UseInferenceDefault + + super_obj = objects.Super(mro_pointer=mro_pointer, + mro_type=mro_type, + self_class=cls, + scope=scope) + super_obj.parent = node + return iter([super_obj]) + + +# Builtins inference +MANAGER.register_transform(nodes.Call, + inference_tip(infer_super), + lambda n: (isinstance(n.func, nodes.Name) and + n.func.name == 'super')) + +register_builtin_transform(infer_tuple, 'tuple') +register_builtin_transform(infer_set, 'set') +register_builtin_transform(infer_list, 'list') +register_builtin_transform(infer_dict, 'dict') +register_builtin_transform(infer_frozenset, 'frozenset') diff --git a/pymode/libs/astroid/brain/brain_dateutil.py b/pymode/libs/astroid/brain/brain_dateutil.py new file mode 100644 index 00000000..d077327b --- /dev/null +++ b/pymode/libs/astroid/brain/brain_dateutil.py @@ -0,0 +1,15 @@ +"""Astroid hooks for dateutil""" + +import textwrap + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + +def dateutil_transform(): + return AstroidBuilder(MANAGER).string_build(textwrap.dedent(''' + import datetime + def parse(timestr, parserinfo=None, **kwargs): + return datetime.datetime() + ''')) + +register_module_extender(MANAGER, 'dateutil.parser', dateutil_transform) diff --git a/pymode/libs/astroid/brain/brain_gi.py b/pymode/libs/astroid/brain/brain_gi.py new file mode 100644 index 00000000..d9fc1b45 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_gi.py @@ -0,0 +1,195 @@ +"""Astroid hooks for the Python 2 GObject introspection bindings. + +Helps with understanding everything imported from 'gi.repository' +""" + +import inspect +import itertools +import sys +import re +import warnings + +from astroid import MANAGER, AstroidBuildingException, nodes +from astroid.builder import AstroidBuilder + + +_inspected_modules = {} + +_identifier_re = r'^[A-Za-z_]\w*$' + +def _gi_build_stub(parent): + """ + Inspect the passed module recursively and build stubs for functions, + classes, etc. + """ + classes = {} + functions = {} + constants = {} + methods = {} + for name in dir(parent): + if name.startswith("__"): + continue + + # Check if this is a valid name in python + if not re.match(_identifier_re, name): + continue + + try: + obj = getattr(parent, name) + except: + continue + + if inspect.isclass(obj): + classes[name] = obj + elif (inspect.isfunction(obj) or + inspect.isbuiltin(obj)): + functions[name] = obj + elif (inspect.ismethod(obj) or + inspect.ismethoddescriptor(obj)): + methods[name] = obj + elif (str(obj).startswith(", ) + # Only accept function calls with two constant arguments + if len(node.args) != 2: + return False + + if not all(isinstance(arg, nodes.Const) for arg in node.args): + return False + + func = node.func + if isinstance(func, nodes.Attribute): + if func.attrname != 'require_version': + return False + if isinstance(func.expr, nodes.Name) and func.expr.name == 'gi': + return True + + return False + + if isinstance(func, nodes.Name): + return func.name == 'require_version' + + return False + +def _register_require_version(node): + # Load the gi.require_version locally + try: + import gi + gi.require_version(node.args[0].value, node.args[1].value) + except Exception: + pass + + return node + +MANAGER.register_failed_import_hook(_import_gi_module) +MANAGER.register_transform(nodes.Call, _register_require_version, _looks_like_require_version) diff --git a/pymode/libs/astroid/brain/brain_mechanize.py b/pymode/libs/astroid/brain/brain_mechanize.py new file mode 100644 index 00000000..20a253a4 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_mechanize.py @@ -0,0 +1,18 @@ +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + +def mechanize_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +class Browser(object): + def open(self, url, data=None, timeout=None): + return None + def open_novisit(self, url, data=None, timeout=None): + return None + def open_local_file(self, filename): + return None + +''') + + +register_module_extender(MANAGER, 'mechanize', mechanize_transform) diff --git a/pymode/libs/astroid/brain/brain_nose.py b/pymode/libs/astroid/brain/brain_nose.py new file mode 100644 index 00000000..4b077843 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_nose.py @@ -0,0 +1,82 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Hooks for nose library.""" + +import re +import textwrap + +import astroid +import astroid.builder + +_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER) + + +def _pep8(name, caps=re.compile('([A-Z])')): + return caps.sub(lambda m: '_' + m.groups()[0].lower(), name) + + +def _nose_tools_functions(): + """Get an iterator of names and bound methods.""" + module = _BUILDER.string_build(textwrap.dedent(''' + import unittest + + class Test(unittest.TestCase): + pass + a = Test() + ''')) + try: + case = next(module['a'].infer()) + except astroid.InferenceError: + return + for method in case.methods(): + if method.name.startswith('assert') and '_' not in method.name: + pep8_name = _pep8(method.name) + yield pep8_name, astroid.BoundMethod(method, case) + if method.name == 'assertEqual': + # nose also exports assert_equals. + yield 'assert_equals', astroid.BoundMethod(method, case) + + +def _nose_tools_transform(node): + for method_name, method in _nose_tools_functions(): + node._locals[method_name] = [method] + + +def _nose_tools_trivial_transform(): + """Custom transform for the nose.tools module.""" + stub = _BUILDER.string_build('''__all__ = []''') + all_entries = ['ok_', 'eq_'] + + for pep8_name, method in _nose_tools_functions(): + all_entries.append(pep8_name) + stub[pep8_name] = method + + # Update the __all__ variable, since nose.tools + # does this manually with .append. + all_assign = stub['__all__'].parent + all_object = astroid.List(all_entries) + all_object.parent = all_assign + all_assign.value = all_object + return stub + + +astroid.register_module_extender(astroid.MANAGER, 'nose.tools.trivial', + _nose_tools_trivial_transform) +astroid.MANAGER.register_transform(astroid.Module, _nose_tools_transform, + lambda n: n.name == 'nose.tools') diff --git a/pymode/libs/astroid/brain/brain_numpy.py b/pymode/libs/astroid/brain/brain_numpy.py new file mode 100644 index 00000000..75f4f18f --- /dev/null +++ b/pymode/libs/astroid/brain/brain_numpy.py @@ -0,0 +1,62 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# astroid is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Astroid hooks for numpy.""" + +import astroid + + +# TODO(cpopa): drop when understanding augmented assignments + +def numpy_core_transform(): + return astroid.parse(''' + from numpy.core import numeric + from numpy.core import fromnumeric + from numpy.core import defchararray + from numpy.core import records + from numpy.core import function_base + from numpy.core import machar + from numpy.core import getlimits + from numpy.core import shape_base + __all__ = (['char', 'rec', 'memmap', 'chararray'] + numeric.__all__ + + fromnumeric.__all__ + + records.__all__ + + function_base.__all__ + + machar.__all__ + + getlimits.__all__ + + shape_base.__all__) + ''') + + +def numpy_transform(): + return astroid.parse(''' + from numpy import core + from numpy import matrixlib as _mat + from numpy import lib + __all__ = ['add_newdocs', + 'ModuleDeprecationWarning', + 'VisibleDeprecationWarning', 'linalg', 'fft', 'random', + 'ctypeslib', 'ma', + '__version__', 'pkgload', 'PackageLoader', + 'show_config'] + core.__all__ + _mat.__all__ + lib.__all__ + + ''') + + +astroid.register_module_extender(astroid.MANAGER, 'numpy.core', numpy_core_transform) +astroid.register_module_extender(astroid.MANAGER, 'numpy', numpy_transform) diff --git a/pymode/libs/astroid/brain/brain_pytest.py b/pymode/libs/astroid/brain/brain_pytest.py new file mode 100644 index 00000000..1859b985 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_pytest.py @@ -0,0 +1,76 @@ +"""Astroid hooks for pytest.""" +from __future__ import absolute_import +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + + +def pytest_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +try: + import _pytest.mark + import _pytest.recwarn + import _pytest.runner + import _pytest.python + import _pytest.skipping + import _pytest.assertion +except ImportError: + pass +else: + deprecated_call = _pytest.recwarn.deprecated_call + warns = _pytest.recwarn.warns + + exit = _pytest.runner.exit + fail = _pytest.runner.fail + skip = _pytest.runner.skip + importorskip = _pytest.runner.importorskip + + xfail = _pytest.skipping.xfail + mark = _pytest.mark.MarkGenerator() + raises = _pytest.python.raises + + # New in pytest 3.0 + try: + approx = _pytest.python.approx + register_assert_rewrite = _pytest.assertion.register_assert_rewrite + except AttributeError: + pass + + +# Moved in pytest 3.0 + +try: + import _pytest.freeze_support + freeze_includes = _pytest.freeze_support.freeze_includes +except ImportError: + try: + import _pytest.genscript + freeze_includes = _pytest.genscript.freeze_includes + except ImportError: + pass + +try: + import _pytest.debugging + set_trace = _pytest.debugging.pytestPDB().set_trace +except ImportError: + try: + import _pytest.pdb + set_trace = _pytest.pdb.pytestPDB().set_trace + except ImportError: + pass + +try: + import _pytest.fixtures + fixture = _pytest.fixtures.fixture + yield_fixture = _pytest.fixtures.yield_fixture +except ImportError: + try: + import _pytest.python + fixture = _pytest.python.fixture + yield_fixture = _pytest.python.yield_fixture + except ImportError: + pass +''') + +register_module_extender(MANAGER, 'pytest', pytest_transform) +register_module_extender(MANAGER, 'py.test', pytest_transform) diff --git a/pymode/libs/astroid/brain/brain_qt.py b/pymode/libs/astroid/brain/brain_qt.py new file mode 100644 index 00000000..1a03b2be --- /dev/null +++ b/pymode/libs/astroid/brain/brain_qt.py @@ -0,0 +1,44 @@ +"""Astroid hooks for the PyQT library.""" + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder +from astroid import nodes +from astroid import parse + + +def _looks_like_signal(node, signal_name='pyqtSignal'): + if '__class__' in node._instance_attrs: + cls = node._instance_attrs['__class__'][0] + return cls.name == signal_name + return False + + +def transform_pyqt_signal(node): + module = parse(''' + class pyqtSignal(object): + def connect(self, slot, type=None, no_receiver_check=False): + pass + def disconnect(self, slot): + pass + def emit(self, *args): + pass + ''') + signal_cls = module['pyqtSignal'] + node._instance_attrs['emit'] = signal_cls['emit'] + node._instance_attrs['disconnect'] = signal_cls['disconnect'] + node._instance_attrs['connect'] = signal_cls['connect'] + + +def pyqt4_qtcore_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +def SIGNAL(signal_name): pass + +class QObject(object): + def emit(self, signal): pass +''') + + +register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform) +MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, + _looks_like_signal) \ No newline at end of file diff --git a/pymode/libs/astroid/brain/brain_six.py b/pymode/libs/astroid/brain/brain_six.py new file mode 100644 index 00000000..9596a6c8 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_six.py @@ -0,0 +1,288 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# astroid is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Astroid hooks for six.moves.""" + +import sys +from textwrap import dedent + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder +from astroid.exceptions import AstroidBuildingException, InferenceError +from astroid import nodes + + +SIX_ADD_METACLASS = 'six.add_metaclass' + + +def _indent(text, prefix, predicate=None): + """Adds 'prefix' to the beginning of selected lines in 'text'. + + If 'predicate' is provided, 'prefix' will only be added to the lines + where 'predicate(line)' is True. If 'predicate' is not provided, + it will default to adding 'prefix' to all non-empty lines that do not + consist solely of whitespace characters. + """ + if predicate is None: + predicate = lambda line: line.strip() + + def prefixed_lines(): + for line in text.splitlines(True): + yield prefix + line if predicate(line) else line + return ''.join(prefixed_lines()) + + +if sys.version_info[0] == 2: + _IMPORTS_2 = """ + import BaseHTTPServer + import CGIHTTPServer + import SimpleHTTPServer + + from StringIO import StringIO + from cStringIO import StringIO as cStringIO + from UserDict import UserDict + from UserList import UserList + from UserString import UserString + + import __builtin__ as builtins + import thread as _thread + import dummy_thread as _dummy_thread + import ConfigParser as configparser + import copy_reg as copyreg + from itertools import (imap as map, + ifilter as filter, + ifilterfalse as filterfalse, + izip_longest as zip_longest, + izip as zip) + import htmlentitydefs as html_entities + import HTMLParser as html_parser + import httplib as http_client + import cookielib as http_cookiejar + import Cookie as http_cookies + import Queue as queue + import repr as reprlib + from pipes import quote as shlex_quote + import SocketServer as socketserver + import SimpleXMLRPCServer as xmlrpc_server + import xmlrpclib as xmlrpc_client + import _winreg as winreg + import robotparser as urllib_robotparser + import Tkinter as tkinter + import tkFileDialog as tkinter_tkfiledialog + + input = raw_input + intern = intern + range = xrange + xrange = xrange + reduce = reduce + reload_module = reload + + class UrllibParse(object): + import urlparse as _urlparse + import urllib as _urllib + ParseResult = _urlparse.ParseResult + SplitResult = _urlparse.SplitResult + parse_qs = _urlparse.parse_qs + parse_qsl = _urlparse.parse_qsl + urldefrag = _urlparse.urldefrag + urljoin = _urlparse.urljoin + urlparse = _urlparse.urlparse + urlsplit = _urlparse.urlsplit + urlunparse = _urlparse.urlunparse + urlunsplit = _urlparse.urlunsplit + quote = _urllib.quote + quote_plus = _urllib.quote_plus + unquote = _urllib.unquote + unquote_plus = _urllib.unquote_plus + urlencode = _urllib.urlencode + splitquery = _urllib.splitquery + splittag = _urllib.splittag + splituser = _urllib.splituser + uses_fragment = _urlparse.uses_fragment + uses_netloc = _urlparse.uses_netloc + uses_params = _urlparse.uses_params + uses_query = _urlparse.uses_query + uses_relative = _urlparse.uses_relative + + class UrllibError(object): + import urllib2 as _urllib2 + import urllib as _urllib + URLError = _urllib2.URLError + HTTPError = _urllib2.HTTPError + ContentTooShortError = _urllib.ContentTooShortError + + class DummyModule(object): + pass + + class UrllibRequest(object): + import urlparse as _urlparse + import urllib2 as _urllib2 + import urllib as _urllib + urlopen = _urllib2.urlopen + install_opener = _urllib2.install_opener + build_opener = _urllib2.build_opener + pathname2url = _urllib.pathname2url + url2pathname = _urllib.url2pathname + getproxies = _urllib.getproxies + Request = _urllib2.Request + OpenerDirector = _urllib2.OpenerDirector + HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler + HTTPRedirectHandler = _urllib2.HTTPRedirectHandler + HTTPCookieProcessor = _urllib2.HTTPCookieProcessor + ProxyHandler = _urllib2.ProxyHandler + BaseHandler = _urllib2.BaseHandler + HTTPPasswordMgr = _urllib2.HTTPPasswordMgr + HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm + AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler + HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler + ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler + AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler + HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler + ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler + HTTPHandler = _urllib2.HTTPHandler + HTTPSHandler = _urllib2.HTTPSHandler + FileHandler = _urllib2.FileHandler + FTPHandler = _urllib2.FTPHandler + CacheFTPHandler = _urllib2.CacheFTPHandler + UnknownHandler = _urllib2.UnknownHandler + HTTPErrorProcessor = _urllib2.HTTPErrorProcessor + urlretrieve = _urllib.urlretrieve + urlcleanup = _urllib.urlcleanup + proxy_bypass = _urllib.proxy_bypass + + urllib_parse = UrllibParse() + urllib_error = UrllibError() + urllib = DummyModule() + urllib.request = UrllibRequest() + urllib.parse = UrllibParse() + urllib.error = UrllibError() + """ +else: + _IMPORTS_3 = """ + import _io + cStringIO = _io.StringIO + filter = filter + from itertools import filterfalse + input = input + from sys import intern + map = map + range = range + from imp import reload as reload_module + from functools import reduce + from shlex import quote as shlex_quote + from io import StringIO + from collections import UserDict, UserList, UserString + xrange = range + zip = zip + from itertools import zip_longest + import builtins + import configparser + import copyreg + import _dummy_thread + import http.cookiejar as http_cookiejar + import http.cookies as http_cookies + import html.entities as html_entities + import html.parser as html_parser + import http.client as http_client + import http.server + BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server + import pickle as cPickle + import queue + import reprlib + import socketserver + import _thread + import winreg + import xmlrpc.server as xmlrpc_server + import xmlrpc.client as xmlrpc_client + import urllib.robotparser as urllib_robotparser + import email.mime.multipart as email_mime_multipart + import email.mime.nonmultipart as email_mime_nonmultipart + import email.mime.text as email_mime_text + import email.mime.base as email_mime_base + import urllib.parse as urllib_parse + import urllib.error as urllib_error + import tkinter + import tkinter.dialog as tkinter_dialog + import tkinter.filedialog as tkinter_filedialog + import tkinter.scrolledtext as tkinter_scrolledtext + import tkinter.simpledialog as tkinder_simpledialog + import tkinter.tix as tkinter_tix + import tkinter.ttk as tkinter_ttk + import tkinter.constants as tkinter_constants + import tkinter.dnd as tkinter_dnd + import tkinter.colorchooser as tkinter_colorchooser + import tkinter.commondialog as tkinter_commondialog + import tkinter.filedialog as tkinter_tkfiledialog + import tkinter.font as tkinter_font + import tkinter.messagebox as tkinter_messagebox + import urllib.request + import urllib.robotparser as urllib_robotparser + import urllib.parse as urllib_parse + import urllib.error as urllib_error + """ +if sys.version_info[0] == 2: + _IMPORTS = dedent(_IMPORTS_2) +else: + _IMPORTS = dedent(_IMPORTS_3) + + +def six_moves_transform(): + code = dedent(''' + class Moves(object): + {} + moves = Moves() + ''').format(_indent(_IMPORTS, " ")) + module = AstroidBuilder(MANAGER).string_build(code) + module.name = 'six.moves' + return module + + +def _six_fail_hook(modname): + if modname != 'six.moves': + raise AstroidBuildingException + module = AstroidBuilder(MANAGER).string_build(_IMPORTS) + module.name = 'six.moves' + return module + +def transform_six_add_metaclass(node): + """Check if the given class node is decorated with *six.add_metaclass* + + If so, inject its argument as the metaclass of the underlying class. + """ + if not node.decorators: + return + + for decorator in node.decorators.nodes: + if not isinstance(decorator, nodes.Call): + continue + + try: + func = next(decorator.func.infer()) + except InferenceError: + continue + if func.qname() == SIX_ADD_METACLASS and decorator.args: + metaclass = decorator.args[0] + node._metaclass = metaclass + return node + + +register_module_extender(MANAGER, 'six', six_moves_transform) +register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six', + six_moves_transform) +MANAGER.register_failed_import_hook(_six_fail_hook) +MANAGER.register_transform(nodes.ClassDef, transform_six_add_metaclass) diff --git a/pymode/libs/astroid/brain/brain_ssl.py b/pymode/libs/astroid/brain/brain_ssl.py new file mode 100644 index 00000000..1cf8d1b8 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_ssl.py @@ -0,0 +1,65 @@ +"""Astroid hooks for the ssl library.""" + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder +from astroid import nodes +from astroid import parse + + +def ssl_transform(): + return parse(''' + from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION + from _ssl import _SSLContext, MemoryBIO + from _ssl import ( + SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError, + SSLSyscallError, SSLEOFError, + ) + from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED + from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj + from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes + try: + from _ssl import RAND_egd + except ImportError: + # LibreSSL does not provide RAND_egd + pass + from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE, + OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3, + OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2, + OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE) + + from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE, + ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE, + ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE, + ALERT_DESCRIPTION_BAD_RECORD_MAC, + ALERT_DESCRIPTION_CERTIFICATE_EXPIRED, + ALERT_DESCRIPTION_CERTIFICATE_REVOKED, + ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN, + ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE, + ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR, + ALERT_DESCRIPTION_DECOMPRESSION_FAILURE, + ALERT_DESCRIPTION_DECRYPT_ERROR, + ALERT_DESCRIPTION_HANDSHAKE_FAILURE, + ALERT_DESCRIPTION_ILLEGAL_PARAMETER, + ALERT_DESCRIPTION_INSUFFICIENT_SECURITY, + ALERT_DESCRIPTION_INTERNAL_ERROR, + ALERT_DESCRIPTION_NO_RENEGOTIATION, + ALERT_DESCRIPTION_PROTOCOL_VERSION, + ALERT_DESCRIPTION_RECORD_OVERFLOW, + ALERT_DESCRIPTION_UNEXPECTED_MESSAGE, + ALERT_DESCRIPTION_UNKNOWN_CA, + ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY, + ALERT_DESCRIPTION_UNRECOGNIZED_NAME, + ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE, + ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION, + ALERT_DESCRIPTION_USER_CANCELLED) + from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL, + SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ, + SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN) + from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT + from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN + from _ssl import _OPENSSL_API_VERSION + from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2 + ''') + + +register_module_extender(MANAGER, 'ssl', ssl_transform) diff --git a/pymode/libs/astroid/brain/brain_stdlib.py b/pymode/libs/astroid/brain/brain_stdlib.py new file mode 100644 index 00000000..ad395a27 --- /dev/null +++ b/pymode/libs/astroid/brain/brain_stdlib.py @@ -0,0 +1,473 @@ + +"""Astroid hooks for the Python 2 standard library. + +Currently help understanding of : + +* hashlib.md5 and hashlib.sha1 +""" + +import functools +import sys +from textwrap import dedent + +from astroid import ( + MANAGER, UseInferenceDefault, inference_tip, BoundMethod, + InferenceError, register_module_extender) +from astroid import exceptions +from astroid import nodes +from astroid.builder import AstroidBuilder +from astroid import util +from astroid import test_utils + +PY3K = sys.version_info > (3, 0) +PY33 = sys.version_info >= (3, 3) +PY34 = sys.version_info >= (3, 4) + +# general function + +def infer_func_form(node, base_type, context=None, enum=False): + """Specific inference function for namedtuple or Python 3 enum. """ + def infer_first(node): + if node is util.YES: + raise UseInferenceDefault + try: + value = next(node.infer(context=context)) + if value is util.YES: + raise UseInferenceDefault() + else: + return value + except StopIteration: + raise InferenceError() + + # node is a Call node, class name as first argument and generated class + # attributes as second argument + if len(node.args) != 2: + # something weird here, go back to class implementation + raise UseInferenceDefault() + # namedtuple or enums list of attributes can be a list of strings or a + # whitespace-separate string + try: + name = infer_first(node.args[0]).value + names = infer_first(node.args[1]) + try: + attributes = names.value.replace(',', ' ').split() + except AttributeError: + if not enum: + attributes = [infer_first(const).value for const in names.elts] + else: + # Enums supports either iterator of (name, value) pairs + # or mappings. + # TODO: support only list, tuples and mappings. + if hasattr(names, 'items') and isinstance(names.items, list): + attributes = [infer_first(const[0]).value + for const in names.items + if isinstance(const[0], nodes.Const)] + elif hasattr(names, 'elts'): + # Enums can support either ["a", "b", "c"] + # or [("a", 1), ("b", 2), ...], but they can't + # be mixed. + if all(isinstance(const, nodes.Tuple) + for const in names.elts): + attributes = [infer_first(const.elts[0]).value + for const in names.elts + if isinstance(const, nodes.Tuple)] + else: + attributes = [infer_first(const).value + for const in names.elts] + else: + raise AttributeError + if not attributes: + raise AttributeError + except (AttributeError, exceptions.InferenceError): + raise UseInferenceDefault() + + # If we can't iner the name of the class, don't crash, up to this point + # we know it is a namedtuple anyway. + name = name or 'Uninferable' + # we want to return a Class node instance with proper attributes set + class_node = nodes.ClassDef(name, 'docstring') + class_node.parent = node.parent + # set base class=tuple + class_node.bases.append(base_type) + # XXX add __init__(*attributes) method + for attr in attributes: + fake_node = nodes.EmptyNode() + fake_node.parent = class_node + fake_node.attrname = attr + class_node._instance_attrs[attr] = [fake_node] + return class_node, name, attributes + + +# module specific transformation functions ##################################### + +def hashlib_transform(): + template = ''' + +class %(name)s(object): + def __init__(self, value=''): pass + def digest(self): + return %(digest)s + def copy(self): + return self + def update(self, value): pass + def hexdigest(self): + return '' + @property + def name(self): + return %(name)r + @property + def block_size(self): + return 1 + @property + def digest_size(self): + return 1 +''' + algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') + classes = "".join( + template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'} + for hashfunc in algorithms) + return AstroidBuilder(MANAGER).string_build(classes) + + +def collections_transform(): + return AstroidBuilder(MANAGER).string_build(''' + +class defaultdict(dict): + default_factory = None + def __missing__(self, key): pass + +class deque(object): + maxlen = 0 + def __init__(self, iterable=None, maxlen=None): + self.iterable = iterable + def append(self, x): pass + def appendleft(self, x): pass + def clear(self): pass + def count(self, x): return 0 + def extend(self, iterable): pass + def extendleft(self, iterable): pass + def pop(self): pass + def popleft(self): pass + def remove(self, value): pass + def reverse(self): pass + def rotate(self, n): pass + def __iter__(self): return self + def __reversed__(self): return self.iterable[::-1] + def __getitem__(self, index): pass + def __setitem__(self, index, value): pass + def __delitem__(self, index): pass +''') + + +def pkg_resources_transform(): + return AstroidBuilder(MANAGER).string_build(''' +def require(*requirements): + return pkg_resources.working_set.require(*requirements) + +def run_script(requires, script_name): + return pkg_resources.working_set.run_script(requires, script_name) + +def iter_entry_points(group, name=None): + return pkg_resources.working_set.iter_entry_points(group, name) + +def resource_exists(package_or_requirement, resource_name): + return get_provider(package_or_requirement).has_resource(resource_name) + +def resource_isdir(package_or_requirement, resource_name): + return get_provider(package_or_requirement).resource_isdir( + resource_name) + +def resource_filename(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name) + +def resource_stream(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name) + +def resource_string(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_string( + self, resource_name) + +def resource_listdir(package_or_requirement, resource_name): + return get_provider(package_or_requirement).resource_listdir( + resource_name) + +def extraction_error(): + pass + +def get_cache_path(archive_name, names=()): + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + return target_path + +def postprocess(tempname, filename): + pass + +def set_extraction_path(path): + pass + +def cleanup_resources(force=False): + pass + +''') + + +def subprocess_transform(): + if PY3K: + communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) + communicate_signature = 'def communicate(self, input=None, timeout=None)' + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0, restore_signals=True, + start_new_session=False, pass_fds=()): + pass + """ + else: + communicate = ('string', 'string') + communicate_signature = 'def communicate(self, input=None)' + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + pass + """ + if PY33: + wait_signature = 'def wait(self, timeout=None)' + else: + wait_signature = 'def wait(self)' + if PY3K: + ctx_manager = ''' + def __enter__(self): return self + def __exit__(self, *args): pass + ''' + else: + ctx_manager = '' + code = dedent(''' + + class Popen(object): + returncode = pid = 0 + stdin = stdout = stderr = file() + + %(init)s + + %(communicate_signature)s: + return %(communicate)r + %(wait_signature)s: + return self.returncode + def poll(self): + return self.returncode + def send_signal(self, signal): + pass + def terminate(self): + pass + def kill(self): + pass + %(ctx_manager)s + ''' % {'init': init, + 'communicate': communicate, + 'communicate_signature': communicate_signature, + 'wait_signature': wait_signature, + 'ctx_manager': ctx_manager}) + return AstroidBuilder(MANAGER).string_build(code) + + +# namedtuple support ########################################################### + +def _looks_like(node, name): + func = node.func + if isinstance(func, nodes.Attribute): + return func.attrname == name + if isinstance(func, nodes.Name): + return func.name == name + return False + +_looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple') +_looks_like_enum = functools.partial(_looks_like, name='Enum') + + +def infer_named_tuple(node, context=None): + """Specific inference function for namedtuple Call node""" + class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, + context=context) + fake = AstroidBuilder(MANAGER).string_build(''' +class %(name)s(tuple): + _fields = %(fields)r + def _asdict(self): + return self.__dict__ + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + return new(cls, iterable) + def _replace(self, **kwds): + return self + ''' % {'name': name, 'fields': attributes}) + class_node._locals['_asdict'] = fake.body[0]._locals['_asdict'] + class_node._locals['_make'] = fake.body[0]._locals['_make'] + class_node._locals['_replace'] = fake.body[0]._locals['_replace'] + class_node._locals['_fields'] = fake.body[0]._locals['_fields'] + # we use UseInferenceDefault, we can't be a generator so return an iterator + return iter([class_node]) + + +def infer_enum(node, context=None): + """ Specific inference function for enum Call node. """ + enum_meta = test_utils.extract_node(''' + class EnumMeta(object): + 'docstring' + def __call__(self, node): + class EnumAttribute(object): + name = '' + value = 0 + return EnumAttribute() + ''') + class_node = infer_func_form(node, enum_meta, + context=context, enum=True)[0] + return iter([class_node.instantiate_class()]) + + +def infer_enum_class(node): + """ Specific inference for enums. """ + names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) + for basename in node.basenames: + # TODO: doesn't handle subclasses yet. This implementation + # is a hack to support enums. + if basename not in names: + continue + if node.root().name == 'enum': + # Skip if the class is directly from enum module. + break + for local, values in node._locals.items(): + if any(not isinstance(value, nodes.AssignName) + for value in values): + continue + + stmt = values[0].statement() + if isinstance(stmt.targets[0], nodes.Tuple): + targets = stmt.targets[0].itered() + else: + targets = stmt.targets + + new_targets = [] + for target in targets: + # Replace all the assignments with our mocked class. + classdef = dedent(''' + class %(name)s(%(types)s): + @property + def value(self): + # Not the best return. + return None + @property + def name(self): + return %(name)r + ''' % {'name': target.name, 'types': ', '.join(node.basenames)}) + fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] + fake.parent = target.parent + for method in node.mymethods(): + fake._locals[method.name] = [method] + new_targets.append(fake.instantiate_class()) + node._locals[local] = new_targets + break + return node + +def multiprocessing_transform(): + module = AstroidBuilder(MANAGER).string_build(dedent(''' + from multiprocessing.managers import SyncManager + def Manager(): + return SyncManager() + ''')) + if not PY34: + return module + + # On Python 3.4, multiprocessing uses a getattr lookup inside contexts, + # in order to get the attributes they need. Since it's extremely + # dynamic, we use this approach to fake it. + node = AstroidBuilder(MANAGER).string_build(dedent(''' + from multiprocessing.context import DefaultContext, BaseContext + default = DefaultContext() + base = BaseContext() + ''')) + try: + context = next(node['default'].infer()) + base = next(node['base'].infer()) + except InferenceError: + return module + + for node in (context, base): + for key, value in node._locals.items(): + if key.startswith("_"): + continue + + value = value[0] + if isinstance(value, nodes.FunctionDef): + # We need to rebound this, since otherwise + # it will have an extra argument (self). + value = BoundMethod(value, node) + module[key] = value + return module + +def multiprocessing_managers_transform(): + return AstroidBuilder(MANAGER).string_build(dedent(''' + import array + import threading + import multiprocessing.pool as pool + + import six + + class Namespace(object): + pass + + class Value(object): + def __init__(self, typecode, value, lock=True): + self._typecode = typecode + self._value = value + def get(self): + return self._value + def set(self, value): + self._value = value + def __repr__(self): + return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value) + value = property(get, set) + + def Array(typecode, sequence, lock=True): + return array.array(typecode, sequence) + + class SyncManager(object): + Queue = JoinableQueue = six.moves.queue.Queue + Event = threading.Event + RLock = threading.RLock + BoundedSemaphore = threading.BoundedSemaphore + Condition = threading.Condition + Barrier = threading.Barrier + Pool = pool.Pool + list = list + dict = dict + Value = Value + Array = Array + Namespace = Namespace + __enter__ = lambda self: self + __exit__ = lambda *args: args + + def start(self, initializer=None, initargs=None): + pass + def shutdown(self): + pass + ''')) + + +MANAGER.register_transform(nodes.Call, inference_tip(infer_named_tuple), + _looks_like_namedtuple) +MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), + _looks_like_enum) +MANAGER.register_transform(nodes.ClassDef, infer_enum_class) +register_module_extender(MANAGER, 'hashlib', hashlib_transform) +register_module_extender(MANAGER, 'collections', collections_transform) +register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) +register_module_extender(MANAGER, 'subprocess', subprocess_transform) +register_module_extender(MANAGER, 'multiprocessing.managers', + multiprocessing_managers_transform) +register_module_extender(MANAGER, 'multiprocessing', multiprocessing_transform) diff --git a/pymode/libs/astroid/builder.py b/pymode/libs/astroid/builder.py new file mode 100644 index 00000000..63c156a1 --- /dev/null +++ b/pymode/libs/astroid/builder.py @@ -0,0 +1,263 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""The AstroidBuilder makes astroid from living object and / or from _ast + +The builder is not thread safe and can't be used to parse different sources +at the same time. +""" +from __future__ import with_statement + +import _ast +import os +import sys +import textwrap + +from astroid import bases +from astroid import exceptions +from astroid import manager +from astroid import modutils +from astroid import raw_building +from astroid import rebuilder +from astroid import util + + +def _parse(string): + return compile(string, "", 'exec', _ast.PyCF_ONLY_AST) + + +if sys.version_info >= (3, 0): + # pylint: disable=no-name-in-module; We don't understand flows yet. + from tokenize import detect_encoding + + def open_source_file(filename): + with open(filename, 'rb') as byte_stream: + encoding = detect_encoding(byte_stream.readline)[0] + stream = open(filename, 'r', newline=None, encoding=encoding) + try: + data = stream.read() + except UnicodeError: # wrong encoding + # detect_encoding returns utf-8 if no encoding specified + msg = 'Wrong (%s) or no encoding specified' % encoding + raise exceptions.AstroidBuildingException(msg) + return stream, encoding, data + +else: + import re + + _ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)") + + def _guess_encoding(string): + """get encoding from a python file as string or return None if not found""" + # check for UTF-8 byte-order mark + if string.startswith('\xef\xbb\xbf'): + return 'UTF-8' + for line in string.split('\n', 2)[:2]: + # check for encoding declaration + match = _ENCODING_RGX.match(line) + if match is not None: + return match.group(1) + + def open_source_file(filename): + """get data for parsing a file""" + stream = open(filename, 'U') + data = stream.read() + encoding = _guess_encoding(data) + return stream, encoding, data + + +MANAGER = manager.AstroidManager() + + +class AstroidBuilder(raw_building.InspectBuilder): + """Class for building an astroid tree from source code or from a live module. + + The param *manager* specifies the manager class which should be used. + If no manager is given, then the default one will be used. The + param *apply_transforms* determines if the transforms should be + applied after the tree was built from source or from a live object, + by default being True. + """ + + def __init__(self, manager=None, apply_transforms=True): + super(AstroidBuilder, self).__init__() + self._manager = manager or MANAGER + self._apply_transforms = apply_transforms + + def module_build(self, module, modname=None): + """Build an astroid from a living module instance.""" + node = None + path = getattr(module, '__file__', None) + if path is not None: + path_, ext = os.path.splitext(modutils._path_from_filename(path)) + if ext in ('.py', '.pyc', '.pyo') and os.path.exists(path_ + '.py'): + node = self.file_build(path_ + '.py', modname) + if node is None: + # this is a built-in module + # get a partial representation by introspection + node = self.inspect_build(module, modname=modname, path=path) + if self._apply_transforms: + # We have to handle transformation by ourselves since the + # rebuilder isn't called for builtin nodes + node = self._manager.visit_transforms(node) + return node + + def file_build(self, path, modname=None): + """Build astroid from a source code file (i.e. from an ast) + + *path* is expected to be a python source file + """ + try: + stream, encoding, data = open_source_file(path) + except IOError as exc: + msg = 'Unable to load file %r (%s)' % (path, exc) + raise exceptions.AstroidBuildingException(msg) + except SyntaxError as exc: # py3k encoding specification error + raise exceptions.AstroidBuildingException(exc) + except LookupError as exc: # unknown encoding + raise exceptions.AstroidBuildingException(exc) + with stream: + # get module name if necessary + if modname is None: + try: + modname = '.'.join(modutils.modpath_from_file(path)) + except ImportError: + modname = os.path.splitext(os.path.basename(path))[0] + # build astroid representation + module = self._data_build(data, modname, path) + return self._post_build(module, encoding) + + def string_build(self, data, modname='', path=None): + """Build astroid from source code string.""" + module = self._data_build(data, modname, path) + module.source_code = data.encode('utf-8') + return self._post_build(module, 'utf-8') + + def _post_build(self, module, encoding): + """Handles encoding and delayed nodes after a module has been built""" + module.file_encoding = encoding + self._manager.cache_module(module) + # post tree building steps after we stored the module in the cache: + for from_node in module._import_from_nodes: + if from_node.modname == '__future__': + for symbol, _ in from_node.names: + module._future_imports.add(symbol) + self.add_from_names_to_locals(from_node) + # handle delayed assattr nodes + for delayed in module._delayed_assattr: + self.delayed_assattr(delayed) + + # Visit the transforms + if self._apply_transforms: + module = self._manager.visit_transforms(module) + return module + + def _data_build(self, data, modname, path): + """Build tree node from data and add some informations""" + try: + node = _parse(data + '\n') + except (TypeError, ValueError, SyntaxError) as exc: + raise exceptions.AstroidBuildingException(exc) + if path is not None: + node_file = os.path.abspath(path) + else: + node_file = '' + if modname.endswith('.__init__'): + modname = modname[:-9] + package = True + else: + package = path and path.find('__init__.py') > -1 or False + builder = rebuilder.TreeRebuilder(self._manager) + module = builder.visit_module(node, modname, node_file, package) + module._import_from_nodes = builder._import_from_nodes + module._delayed_assattr = builder._delayed_assattr + return module + + def add_from_names_to_locals(self, node): + """Store imported names to the locals + + Resort the locals if coming from a delayed node + """ + _key_func = lambda node: node.fromlineno + def sort_locals(my_list): + my_list.sort(key=_key_func) + + for (name, asname) in node.names: + if name == '*': + try: + imported = node.do_import_module() + except exceptions.InferenceError: + continue + for name in imported._public_names(): + node.parent.set_local(name, node) + sort_locals(node.parent.scope()._locals[name]) + else: + node.parent.set_local(asname or name, node) + sort_locals(node.parent.scope()._locals[asname or name]) + + def delayed_assattr(self, node): + """Visit a AssAttr node + + This adds name to locals and handle members definition. + """ + try: + frame = node.frame() + for inferred in node.expr.infer(): + if inferred is util.YES: + continue + try: + if inferred.__class__ is bases.Instance: + inferred = inferred._proxied + iattrs = inferred._instance_attrs + elif isinstance(inferred, bases.Instance): + # Const, Tuple, ... we may be wrong, may be not, but + # anyway we don't want to pollute builtin's namespace + continue + elif inferred.is_function: + iattrs = inferred._instance_attrs + else: + iattrs = inferred._locals + except AttributeError: + # XXX log error + continue + values = iattrs.setdefault(node.attrname, []) + if node in values: + continue + # get assign in __init__ first XXX useful ? + if (frame.name == '__init__' and values and + not values[0].frame().name == '__init__'): + values.insert(0, node) + else: + values.append(node) + except exceptions.InferenceError: + pass + + +def parse(code, module_name='', path=None, apply_transforms=True): + """Parses a source string in order to obtain an astroid AST from it + + :param str code: The code for the module. + :param str module_name: The name for the module, if any + :param str path: The path for the module + :param bool apply_transforms: + Apply the transforms for the give code. Use it if you + don't want the default transforms to be applied. + """ + code = textwrap.dedent(code) + builder = AstroidBuilder(manager=MANAGER, + apply_transforms=apply_transforms) + return builder.string_build(code, modname=module_name, path=path) diff --git a/pymode/libs/astroid/context.py b/pymode/libs/astroid/context.py new file mode 100644 index 00000000..284dfa18 --- /dev/null +++ b/pymode/libs/astroid/context.py @@ -0,0 +1,81 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Various context related utilities, including inference and call contexts.""" + +import contextlib + + +class InferenceContext(object): + __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'inferred') + + def __init__(self, path=None, inferred=None): + self.path = path or set() + self.lookupname = None + self.callcontext = None + self.boundnode = None + self.inferred = inferred or {} + + def push(self, node): + name = self.lookupname + if (node, name) in self.path: + raise StopIteration() + self.path.add((node, name)) + + def clone(self): + # XXX copy lookupname/callcontext ? + clone = InferenceContext(self.path, inferred=self.inferred) + clone.callcontext = self.callcontext + clone.boundnode = self.boundnode + return clone + + def cache_generator(self, key, generator): + results = [] + for result in generator: + results.append(result) + yield result + + self.inferred[key] = tuple(results) + return + + @contextlib.contextmanager + def restore_path(self): + path = set(self.path) + yield + self.path = path + + +class CallContext(object): + """Holds information for a call site.""" + + __slots__ = ('args', 'keywords') + + def __init__(self, args, keywords=None): + self.args = args + if keywords: + keywords = [(arg.arg, arg.value) for arg in keywords] + else: + keywords = [] + self.keywords = keywords + + +def copy_context(context): + if context is not None: + return context.clone() + else: + return InferenceContext() diff --git a/pymode/libs/astroid/decorators.py b/pymode/libs/astroid/decorators.py new file mode 100644 index 00000000..a446536c --- /dev/null +++ b/pymode/libs/astroid/decorators.py @@ -0,0 +1,75 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +# +# The code in this file was originally part of logilab-common, licensed under +# the same license. + +""" A few useful function/method decorators.""" + +import wrapt + + +@wrapt.decorator +def cached(func, instance, args, kwargs): + """Simple decorator to cache result of method calls without args.""" + cache = getattr(instance, '__cache', None) + if cache is None: + instance.__cache = cache = {} + try: + return cache[func] + except KeyError: + cache[func] = result = func(*args, **kwargs) + return result + + +class cachedproperty(object): + """ Provides a cached property equivalent to the stacking of + @cached and @property, but more efficient. + + After first usage, the becomes part of the object's + __dict__. Doing: + + del obj. empties the cache. + + Idea taken from the pyramid_ framework and the mercurial_ project. + + .. _pyramid: http://pypi.python.org/pypi/pyramid + .. _mercurial: http://pypi.python.org/pypi/Mercurial + """ + __slots__ = ('wrapped',) + + def __init__(self, wrapped): + try: + wrapped.__name__ + except AttributeError: + raise TypeError('%s must have a __name__ attribute' % + wrapped) + self.wrapped = wrapped + + @property + def __doc__(self): + doc = getattr(self.wrapped, '__doc__', None) + return ('%s' + % ('\n%s' % doc if doc else '')) + + def __get__(self, inst, objtype=None): + if inst is None: + return self + val = self.wrapped(inst) + setattr(inst, self.wrapped.__name__, val) + return val diff --git a/pymode/libs/astroid/exceptions.py b/pymode/libs/astroid/exceptions.py new file mode 100644 index 00000000..47f2fe50 --- /dev/null +++ b/pymode/libs/astroid/exceptions.py @@ -0,0 +1,71 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""this module contains exceptions used in the astroid library + +""" + +__doctype__ = "restructuredtext en" + +class AstroidError(Exception): + """base exception class for all astroid related exceptions""" + +class AstroidBuildingException(AstroidError): + """exception class when we are unable to build an astroid representation""" + +class ResolveError(AstroidError): + """base class of astroid resolution/inference error""" + +class MroError(ResolveError): + """Error raised when there is a problem with method resolution of a class.""" + + +class DuplicateBasesError(MroError): + """Error raised when there are duplicate bases in the same class bases.""" + + +class InconsistentMroError(MroError): + """Error raised when a class's MRO is inconsistent.""" + + +class SuperError(ResolveError): + """Error raised when there is a problem with a super call.""" + + +class SuperArgumentTypeError(SuperError): + """Error raised when the super arguments are invalid.""" + + +class NotFoundError(ResolveError): + """raised when we are unable to resolve a name""" + +class InferenceError(ResolveError): + """raised when we are unable to infer a node""" + +class UseInferenceDefault(Exception): + """exception to be raised in custom inference function to indicate that it + should go back to the default behaviour + """ + +class UnresolvableName(InferenceError): + """raised when we are unable to resolve a name""" + +class NoDefault(AstroidError): + """raised by function's `default_value` method when an argument has + no default value + """ + diff --git a/pymode/libs/astroid/inference.py b/pymode/libs/astroid/inference.py new file mode 100644 index 00000000..ddd43561 --- /dev/null +++ b/pymode/libs/astroid/inference.py @@ -0,0 +1,359 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""this module contains a set of functions to handle inference on astroid trees +""" + +from __future__ import print_function + +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import manager +from astroid import nodes +from astroid import protocols +from astroid import util + + +MANAGER = manager.AstroidManager() + + +# .infer method ############################################################### + + +def infer_end(self, context=None): + """inference's end for node such as Module, ClassDef, FunctionDef, + Const... + + """ + yield self +nodes.Module._infer = infer_end +nodes.ClassDef._infer = infer_end +nodes.FunctionDef._infer = infer_end +nodes.Lambda._infer = infer_end +nodes.Const._infer = infer_end +nodes.List._infer = infer_end +nodes.Tuple._infer = infer_end +nodes.Dict._infer = infer_end +nodes.Set._infer = infer_end + +def _higher_function_scope(node): + """ Search for the first function which encloses the given + scope. This can be used for looking up in that function's + scope, in case looking up in a lower scope for a particular + name fails. + + :param node: A scope node. + :returns: + ``None``, if no parent function scope was found, + otherwise an instance of :class:`astroid.scoped_nodes.Function`, + which encloses the given node. + """ + current = node + while current.parent and not isinstance(current.parent, nodes.FunctionDef): + current = current.parent + if current and current.parent: + return current.parent + +def infer_name(self, context=None): + """infer a Name: use name lookup rules""" + frame, stmts = self.lookup(self.name) + if not stmts: + # Try to see if the name is enclosed in a nested function + # and use the higher (first function) scope for searching. + # TODO: should this be promoted to other nodes as well? + parent_function = _higher_function_scope(self.scope()) + if parent_function: + _, stmts = parent_function.lookup(self.name) + + if not stmts: + raise exceptions.UnresolvableName(self.name) + context = context.clone() + context.lookupname = self.name + return bases._infer_stmts(stmts, context, frame) +nodes.Name._infer = bases.path_wrapper(infer_name) +nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper + + +@bases.path_wrapper +@bases.raise_if_nothing_inferred +def infer_call(self, context=None): + """infer a Call node by trying to guess what the function returns""" + callcontext = context.clone() + callcontext.callcontext = contextmod.CallContext(args=self.args, + keywords=self.keywords) + callcontext.boundnode = None + for callee in self.func.infer(context): + if callee is util.YES: + yield callee + continue + try: + if hasattr(callee, 'infer_call_result'): + for inferred in callee.infer_call_result(self, callcontext): + yield inferred + except exceptions.InferenceError: + ## XXX log error ? + continue +nodes.Call._infer = infer_call + + +@bases.path_wrapper +def infer_import(self, context=None, asname=True): + """infer an Import node: return the imported module/object""" + name = context.lookupname + if name is None: + raise exceptions.InferenceError() + if asname: + yield self.do_import_module(self.real_name(name)) + else: + yield self.do_import_module(name) +nodes.Import._infer = infer_import + + +def infer_name_module(self, name): + context = contextmod.InferenceContext() + context.lookupname = name + return self.infer(context, asname=False) +nodes.Import.infer_name_module = infer_name_module + + +@bases.path_wrapper +def infer_import_from(self, context=None, asname=True): + """infer a ImportFrom node: return the imported module/object""" + name = context.lookupname + if name is None: + raise exceptions.InferenceError() + if asname: + name = self.real_name(name) + module = self.do_import_module() + try: + context = contextmod.copy_context(context) + context.lookupname = name + stmts = module.getattr(name, ignore_locals=module is self.root()) + return bases._infer_stmts(stmts, context) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) +nodes.ImportFrom._infer = infer_import_from + + +@bases.raise_if_nothing_inferred +def infer_attribute(self, context=None): + """infer an Attribute node by using getattr on the associated object""" + for owner in self.expr.infer(context): + if owner is util.YES: + yield owner + continue + try: + context.boundnode = owner + for obj in owner.igetattr(self.attrname, context): + yield obj + context.boundnode = None + except (exceptions.NotFoundError, exceptions.InferenceError): + context.boundnode = None + except AttributeError: + # XXX method / function + context.boundnode = None +nodes.Attribute._infer = bases.path_wrapper(infer_attribute) +nodes.AssignAttr.infer_lhs = infer_attribute # # won't work with a path wrapper + + +@bases.path_wrapper +def infer_global(self, context=None): + if context.lookupname is None: + raise exceptions.InferenceError() + try: + return bases._infer_stmts(self.root().getattr(context.lookupname), + context) + except exceptions.NotFoundError: + raise exceptions.InferenceError() +nodes.Global._infer = infer_global + + +@bases.raise_if_nothing_inferred +def infer_subscript(self, context=None): + """Inference for subscripts + + We're understanding if the index is a Const + or a slice, passing the result of inference + to the value's `getitem` method, which should + handle each supported index type accordingly. + """ + + value = next(self.value.infer(context)) + if value is util.YES: + yield util.YES + return + + index = next(self.slice.infer(context)) + if index is util.YES: + yield util.YES + return + + if isinstance(index, nodes.Const): + try: + assigned = value.getitem(index.value, context) + except AttributeError: + raise exceptions.InferenceError() + except (IndexError, TypeError): + yield util.YES + return + + # Prevent inferring if the infered subscript + # is the same as the original subscripted object. + if self is assigned or assigned is util.YES: + yield util.YES + return + for infered in assigned.infer(context): + yield infered + else: + raise exceptions.InferenceError() +nodes.Subscript._infer = bases.path_wrapper(infer_subscript) +nodes.Subscript.infer_lhs = infer_subscript + +@bases.raise_if_nothing_inferred +def infer_unaryop(self, context=None): + for operand in self.operand.infer(context): + try: + yield operand.infer_unary_op(self.op) + except TypeError: + continue + except AttributeError: + meth = protocols.UNARY_OP_METHOD[self.op] + if meth is None: + yield util.YES + else: + try: + # XXX just suppose if the type implement meth, returned type + # will be the same + operand.getattr(meth) + yield operand + except GeneratorExit: + raise + except: + yield util.YES +nodes.UnaryOp._infer = bases.path_wrapper(infer_unaryop) + +def _infer_binop(binop, operand1, operand2, context, failures=None): + if operand1 is util.YES: + yield operand1 + return + try: + for valnode in operand1.infer_binary_op(binop, operand2, context): + yield valnode + except AttributeError: + try: + # XXX just suppose if the type implement meth, returned type + # will be the same + operand1.getattr(protocols.BIN_OP_METHOD[operator]) + yield operand1 + except: + if failures is None: + yield util.YES + else: + failures.append(operand1) + +@bases.yes_if_nothing_inferred +def infer_binop(self, context=None): + failures = [] + for lhs in self.left.infer(context): + for val in _infer_binop(self, lhs, self.right, context, failures): + yield val + for lhs in failures: + for rhs in self.right.infer(context): + for val in _infer_binop(self, rhs, lhs, context): + yield val +nodes.BinOp._infer = bases.path_wrapper(infer_binop) + + +def infer_arguments(self, context=None): + name = context.lookupname + if name is None: + raise exceptions.InferenceError() + return protocols._arguments_infer_argname(self, name, context) +nodes.Arguments._infer = infer_arguments + + +@bases.path_wrapper +def infer_assign(self, context=None): + """infer a AssignName/AssignAttr: need to inspect the RHS part of the + assign node + """ + stmt = self.statement() + if isinstance(stmt, nodes.AugAssign): + return stmt.infer(context) + + stmts = list(self.assigned_stmts(context=context)) + return bases._infer_stmts(stmts, context) +nodes.AssignName._infer = infer_assign +nodes.AssignAttr._infer = infer_assign + +def infer_augassign(self, context=None): + failures = [] + for lhs in self.target.infer_lhs(context): + for val in _infer_binop(self, lhs, self.value, context, failures): + yield val + for lhs in failures: + for rhs in self.value.infer(context): + for val in _infer_binop(self, rhs, lhs, context): + yield val +nodes.AugAssign._infer = bases.path_wrapper(infer_augassign) + + +# no infer method on DelName and DelAttr (expected InferenceError) + +@bases.path_wrapper +def infer_empty_node(self, context=None): + if not self.has_underlying_object(): + yield util.YES + else: + try: + for inferred in MANAGER.infer_ast_from_something(self.object, + context=context): + yield inferred + except exceptions.AstroidError: + yield util.YES +nodes.EmptyNode._infer = infer_empty_node + + +def infer_index(self, context=None): + return self.value.infer(context) +nodes.Index._infer = infer_index + +# TODO: move directly into bases.Instance when the dependency hell +# will be solved. +def instance_getitem(self, index, context=None): + # Rewrap index to Const for this case + index = nodes.Const(index) + if context: + new_context = context.clone() + else: + context = new_context = contextmod.InferenceContext() + + # Create a new callcontext for providing index as an argument. + new_context.callcontext = contextmod.CallContext(args=[index]) + new_context.boundnode = self + + method = next(self.igetattr('__getitem__', context=context)) + if not isinstance(method, bases.BoundMethod): + raise exceptions.InferenceError + + try: + return next(method.infer_call_result(self, new_context)) + except StopIteration: + raise exceptions.InferenceError + +bases.Instance.getitem = instance_getitem diff --git a/pymode/libs/astroid/manager.py b/pymode/libs/astroid/manager.py new file mode 100644 index 00000000..d08adc29 --- /dev/null +++ b/pymode/libs/astroid/manager.py @@ -0,0 +1,267 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""astroid manager: avoid multiple astroid build of a same module when +possible by providing a class responsible to get astroid representation +from various source and using a cache of built modules) +""" +from __future__ import print_function + +import imp +import os +import zipimport + +from astroid import exceptions +from astroid import modutils +from astroid import transforms + + +def safe_repr(obj): + try: + return repr(obj) + except Exception: # pylint: disable=broad-except + return '???' + + +class AstroidManager(object): + """the astroid manager, responsible to build astroid from files + or modules. + + Use the Borg pattern. + """ + + name = 'astroid loader' + brain = {} + + def __init__(self): + self.__dict__ = AstroidManager.brain + if not self.__dict__: + # NOTE: cache entries are added by the [re]builder + self.astroid_cache = {} + self._mod_file_cache = {} + self._failed_import_hooks = [] + self.always_load_extensions = False + self.optimize_ast = False + self.extension_package_whitelist = set() + self._transform = transforms.TransformVisitor() + + # Export these APIs for convenience + self.register_transform = self._transform.register_transform + self.unregister_transform = self._transform.unregister_transform + + def visit_transforms(self, node): + """Visit the transforms and apply them to the given *node*.""" + return self._transform.visit(node) + + def ast_from_file(self, filepath, modname=None, fallback=True, source=False): + """given a module name, return the astroid object""" + try: + filepath = modutils.get_source_file(filepath, include_no_ext=True) + source = True + except modutils.NoSourceFile: + pass + if modname is None: + try: + modname = '.'.join(modutils.modpath_from_file(filepath)) + except ImportError: + modname = filepath + if modname in self.astroid_cache and self.astroid_cache[modname].source_file == filepath: + return self.astroid_cache[modname] + if source: + from astroid.builder import AstroidBuilder + return AstroidBuilder(self).file_build(filepath, modname) + elif fallback and modname: + return self.ast_from_module_name(modname) + raise exceptions.AstroidBuildingException( + 'unable to get astroid for file %s' % filepath) + + def _build_stub_module(self, modname): + from astroid.builder import AstroidBuilder + return AstroidBuilder(self).string_build('', modname) + + def _can_load_extension(self, modname): + if self.always_load_extensions: + return True + if modutils.is_standard_module(modname): + return True + parts = modname.split('.') + return any( + '.'.join(parts[:x]) in self.extension_package_whitelist + for x in range(1, len(parts) + 1)) + + def ast_from_module_name(self, modname, context_file=None): + """given a module name, return the astroid object""" + if modname in self.astroid_cache: + return self.astroid_cache[modname] + if modname == '__main__': + return self._build_stub_module(modname) + old_cwd = os.getcwd() + if context_file: + os.chdir(os.path.dirname(context_file)) + try: + filepath, mp_type = self.file_from_module_name(modname, context_file) + if mp_type == modutils.PY_ZIPMODULE: + module = self.zip_import_data(filepath) + if module is not None: + return module + elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION): + if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname): + return self._build_stub_module(modname) + try: + module = modutils.load_module_from_name(modname) + except Exception as ex: + msg = 'Unable to load module %s (%s)' % (modname, ex) + raise exceptions.AstroidBuildingException(msg) + return self.ast_from_module(module, modname) + elif mp_type == imp.PY_COMPILED: + msg = "Unable to load compiled module %s" % (modname,) + raise exceptions.AstroidBuildingException(msg) + if filepath is None: + msg = "Unable to load module %s" % (modname,) + raise exceptions.AstroidBuildingException(msg) + return self.ast_from_file(filepath, modname, fallback=False) + except exceptions.AstroidBuildingException as e: + for hook in self._failed_import_hooks: + try: + return hook(modname) + except exceptions.AstroidBuildingException: + pass + raise e + finally: + os.chdir(old_cwd) + + def zip_import_data(self, filepath): + if zipimport is None: + return None + from astroid.builder import AstroidBuilder + builder = AstroidBuilder(self) + for ext in ('.zip', '.egg'): + try: + eggpath, resource = filepath.rsplit(ext + os.path.sep, 1) + except ValueError: + continue + try: + importer = zipimport.zipimporter(eggpath + ext) + zmodname = resource.replace(os.path.sep, '.') + if importer.is_package(resource): + zmodname = zmodname + '.__init__' + module = builder.string_build(importer.get_source(resource), + zmodname, filepath) + return module + except Exception: # pylint: disable=broad-except + continue + return None + + def file_from_module_name(self, modname, contextfile): + # pylint: disable=redefined-variable-type + try: + value = self._mod_file_cache[(modname, contextfile)] + except KeyError: + try: + value = modutils.file_info_from_modpath( + modname.split('.'), context_file=contextfile) + except ImportError as ex: + msg = 'Unable to load module %s (%s)' % (modname, ex) + value = exceptions.AstroidBuildingException(msg) + self._mod_file_cache[(modname, contextfile)] = value + if isinstance(value, exceptions.AstroidBuildingException): + raise value + return value + + def ast_from_module(self, module, modname=None): + """given an imported module, return the astroid object""" + modname = modname or module.__name__ + if modname in self.astroid_cache: + return self.astroid_cache[modname] + try: + # some builtin modules don't have __file__ attribute + filepath = module.__file__ + if modutils.is_python_source(filepath): + return self.ast_from_file(filepath, modname) + except AttributeError: + pass + from astroid.builder import AstroidBuilder + return AstroidBuilder(self).module_build(module, modname) + + def ast_from_class(self, klass, modname=None): + """get astroid for the given class""" + if modname is None: + try: + modname = klass.__module__ + except AttributeError: + msg = 'Unable to get module for class %s' % safe_repr(klass) + raise exceptions.AstroidBuildingException(msg) + modastroid = self.ast_from_module_name(modname) + return modastroid.getattr(klass.__name__)[0] # XXX + + def infer_ast_from_something(self, obj, context=None): + """infer astroid for the given class""" + if hasattr(obj, '__class__') and not isinstance(obj, type): + klass = obj.__class__ + else: + klass = obj + try: + modname = klass.__module__ + except AttributeError: + msg = 'Unable to get module for %s' % safe_repr(klass) + raise exceptions.AstroidBuildingException(msg) + except Exception as ex: + msg = ('Unexpected error while retrieving module for %s: %s' + % (safe_repr(klass), ex)) + raise exceptions.AstroidBuildingException(msg) + try: + name = klass.__name__ + except AttributeError: + msg = 'Unable to get name for %s' % safe_repr(klass) + raise exceptions.AstroidBuildingException(msg) + except Exception as ex: + exc = ('Unexpected error while retrieving name for %s: %s' + % (safe_repr(klass), ex)) + raise exceptions.AstroidBuildingException(exc) + # take care, on living object __module__ is regularly wrong :( + modastroid = self.ast_from_module_name(modname) + if klass is obj: + for inferred in modastroid.igetattr(name, context): + yield inferred + else: + for inferred in modastroid.igetattr(name, context): + yield inferred.instantiate_class() + + def register_failed_import_hook(self, hook): + """Registers a hook to resolve imports that cannot be found otherwise. + + `hook` must be a function that accepts a single argument `modname` which + contains the name of the module or package that could not be imported. + If `hook` can resolve the import, must return a node of type `astroid.Module`, + otherwise, it must raise `AstroidBuildingException`. + """ + self._failed_import_hooks.append(hook) + + def cache_module(self, module): + """Cache a module if no module with the same name is known yet.""" + self.astroid_cache.setdefault(module.name, module) + + def clear_cache(self, astroid_builtin=None): + # XXX clear transforms + self.astroid_cache.clear() + # force bootstrap again, else we may ends up with cache inconsistency + # between the manager and CONST_PROXY, making + # unittest_lookup.LookupTC.test_builtin_lookup fail depending on the + # test order + import astroid.raw_building + astroid.raw_building._astroid_bootstrapping( + astroid_builtin=astroid_builtin) diff --git a/pylibs/logilab/astng/mixins.py b/pymode/libs/astroid/mixins.py similarity index 60% rename from pylibs/logilab/astng/mixins.py rename to pymode/libs/astroid/mixins.py index 869a25ad..57082f0f 100644 --- a/pylibs/logilab/astng/mixins.py +++ b/pymode/libs/astroid/mixins.py @@ -1,47 +1,35 @@ -# This program is free software; you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free Software -# Foundation; either version 2 of the License, or (at your option) any later -# version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com # -# This file is part of logilab-astng. +# This file is part of astroid. # -# logilab-astng is free software: you can redistribute it and/or modify it +# astroid is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 2.1 of the License, or (at your # option) any later version. # -# logilab-astng is distributed in the hope that it will be useful, but +# astroid is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License # for more details. # # You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . +# with astroid. If not, see . """This module contains some mixins for the different nodes. """ -from logilab.astng.exceptions import (ASTNGBuildingException, InferenceError, - NotFoundError) +import warnings + +from astroid import decorators +from astroid import exceptions class BlockRangeMixIn(object): """override block range """ - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - self.tolineno = lastchild.tolineno - self.blockstart_tolineno = self._blockstart_toline() + + @decorators.cachedproperty + def blockstart_tolineno(self): + return self.lineno def _elsed_block_range(self, lineno, orelse, last=None): """handle block line numbers range for try/finally, for, if and while @@ -55,6 +43,7 @@ def _elsed_block_range(self, lineno, orelse, last=None): return lineno, orelse[0].fromlineno - 1 return lineno, last or self.tolineno + class FilterStmtsMixin(object): """Mixin for statement filtering and assignment type""" @@ -66,15 +55,29 @@ def _get_filtered_stmts(self, _, node, _stmts, mystmt): return [node], True return _stmts, False - def ass_type(self): + def assign_type(self): return self + def ass_type(self): + warnings.warn('%s.ass_type() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() + class AssignTypeMixin(object): - def ass_type(self): + def assign_type(self): return self + def ass_type(self): + warnings.warn('%s.ass_type() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() + def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt): """method used in filter_stmts""" if self is mystmt: @@ -88,18 +91,24 @@ def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt): class ParentAssignTypeMixin(AssignTypeMixin): - def ass_type(self): - return self.parent.ass_type() + def assign_type(self): + return self.parent.assign_type() + def ass_type(self): + warnings.warn('%s.ass_type() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() -class FromImportMixIn(FilterStmtsMixin): +class ImportFromMixin(FilterStmtsMixin): """MixIn for From and Import Nodes""" def _infer_name(self, frame, name): return name - def do_import_module(self, modname): + def do_import_module(self, modname=None): """return the ast for a module whose name is imported by """ # handle special case where we are on a package node importing a module @@ -108,17 +117,22 @@ def do_import_module(self, modname): # XXX: no more needed ? mymodule = self.root() level = getattr(self, 'level', None) # Import as no level + if modname is None: + modname = self.modname # XXX we should investigate deeper if we really want to check # importing itself: modname and mymodule.name be relative or absolute if mymodule.relative_to_absolute_name(modname, level) == mymodule.name: # FIXME: we used to raise InferenceError here, but why ? return mymodule try: - return mymodule.import_module(modname, level=level) - except ASTNGBuildingException: - raise InferenceError(modname) - except SyntaxError, ex: - raise InferenceError(str(ex)) + return mymodule.import_module(modname, level=level, + relative_only=level and level >= 1) + except exceptions.AstroidBuildingException as ex: + if isinstance(ex.args[0], SyntaxError): + raise exceptions.InferenceError(str(ex)) + raise exceptions.InferenceError(modname) + except SyntaxError as ex: + raise exceptions.InferenceError(str(ex)) def real_name(self, asname): """get name from 'as' name""" @@ -130,7 +144,4 @@ def real_name(self, asname): _asname = name if asname == _asname: return name - raise NotFoundError(asname) - - - + raise exceptions.NotFoundError(asname) diff --git a/pylibs/logilab/common/modutils.py b/pymode/libs/astroid/modutils.py similarity index 58% rename from pylibs/logilab/common/modutils.py rename to pymode/libs/astroid/modutils.py index ce0c2971..31104cb5 100644 --- a/pylibs/logilab/common/modutils.py +++ b/pymode/libs/astroid/modutils.py @@ -1,67 +1,114 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # -# This file is part of logilab-common. +# This file is part of astroid. # -# logilab-common is free software: you can redistribute it and/or modify it under +# astroid is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 2.1 of the License, or (at your option) any # later version. # -# logilab-common is distributed in the hope that it will be useful, but WITHOUT +# astroid is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License along -# with logilab-common. If not, see . +# with astroid. If not, see . """Python modules manipulation utility functions. :type PY_SOURCE_EXTS: tuple(str) :var PY_SOURCE_EXTS: list of possible python source file extension -:type STD_LIB_DIR: str -:var STD_LIB_DIR: directory where standard modules are located +:type STD_LIB_DIRS: set of str +:var STD_LIB_DIRS: directories where standard modules are located :type BUILTIN_MODULES: dict :var BUILTIN_MODULES: dictionary with builtin module names has key """ -__docformat__ = "restructuredtext en" +from __future__ import with_statement -import sys +import imp import os -from os.path import splitext, join, abspath, isdir, dirname, exists, basename -from imp import find_module, load_module, C_BUILTIN, PY_COMPILED, PKG_DIRECTORY -from distutils.sysconfig import get_config_var, get_python_lib, get_python_version +import platform +import sys +from distutils.sysconfig import get_python_lib +from distutils.errors import DistutilsPlatformError +import zipimport try: - import zipimport + import pkg_resources except ImportError: - zipimport = None - -ZIPFILE = object() + pkg_resources = None -from logilab.common import STD_BLACKLIST, _handle_blacklist +PY_ZIPMODULE = object() -# Notes about STD_LIB_DIR -# Consider arch-specific installation for STD_LIB_DIR definition -# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on -# -# :see: `Problems with /usr/lib64 builds `_ -# :see: `FHS `_ if sys.platform.startswith('win'): PY_SOURCE_EXTS = ('py', 'pyw') PY_COMPILED_EXTS = ('dll', 'pyd') - STD_LIB_DIR = get_python_lib(standard_lib=1) else: PY_SOURCE_EXTS = ('py',) PY_COMPILED_EXTS = ('so',) - # extend lib dir with some arch-dependant paths - STD_LIB_DIR = join(get_config_var("LIBDIR"), "python%s" % get_python_version()) -BUILTIN_MODULES = dict(zip(sys.builtin_module_names, - [1]*len(sys.builtin_module_names))) + +try: + # The explicit sys.prefix is to work around a patch in virtualenv that + # replaces the 'real' sys.prefix (i.e. the location of the binary) + # with the prefix from which the virtualenv was created. This throws + # off the detection logic for standard library modules, thus the + # workaround. + STD_LIB_DIRS = set([ + get_python_lib(standard_lib=True, prefix=sys.prefix), + # Take care of installations where exec_prefix != prefix. + get_python_lib(standard_lib=True, prefix=sys.exec_prefix), + get_python_lib(standard_lib=True)]) +# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to +# non-valid path, see https://bugs.pypy.org/issue1164 +except DistutilsPlatformError: + STD_LIB_DIRS = set() + +if os.name == 'nt': + STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls')) + try: + # real_prefix is defined when running inside virtualenv. + STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls')) + except AttributeError: + pass +if platform.python_implementation() == 'PyPy': + _root = os.path.join(sys.prefix, 'lib_pypy') + STD_LIB_DIRS.add(_root) + try: + # real_prefix is defined when running inside virtualenv. + STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'lib_pypy')) + except AttributeError: + pass + del _root +if os.name == 'posix': + # Need the real prefix is we're under a virtualenv, otherwise + # the usual one will do. + try: + prefix = sys.real_prefix + except AttributeError: + prefix = sys.prefix + + def _posix_path(path): + base_python = 'python%d.%d' % sys.version_info[:2] + return os.path.join(prefix, path, base_python) + + STD_LIB_DIRS.add(_posix_path('lib')) + if sys.maxsize > 2**32: + # This tries to fix a problem with /usr/lib64 builds, + # where systems are running both 32-bit and 64-bit code + # on the same machine, which reflects into the places where + # standard library could be found. More details can be found + # here http://bugs.python.org/issue1294959. + # An easy reproducing case would be + # https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753 + STD_LIB_DIRS.add(_posix_path('lib64')) + +EXT_LIB_DIR = get_python_lib() +IS_JYTHON = platform.python_implementation() == 'Jython' +BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True) class NoSourceFile(Exception): @@ -69,30 +116,53 @@ class NoSourceFile(Exception): source file for a precompiled file """ -class LazyObject(object): - def __init__(self, module, obj): - self.module = module - self.obj = obj - self._imported = None +def _normalize_path(path): + return os.path.normcase(os.path.abspath(path)) - def _getobj(self): - if self._imported is None: - self._imported = getattr(load_module_from_name(self.module), - self.obj) - return self._imported - def __getattribute__(self, attr): - try: - return super(LazyObject, self).__getattribute__(attr) - except AttributeError, ex: - return getattr(self._getobj(), attr) +def _path_from_filename(filename, is_jython=IS_JYTHON): + if not is_jython: + if sys.version_info > (3, 0): + return filename + else: + if filename.endswith(".pyc"): + return filename[:-1] + return filename + head, has_pyclass, _ = filename.partition("$py.class") + if has_pyclass: + return head + ".py" + return filename + - def __call__(self, *args, **kwargs): - return self._getobj()(*args, **kwargs) +def _handle_blacklist(blacklist, dirnames, filenames): + """remove files/directories in the black list + dirnames/filenames are usually from os.walk + """ + for norecurs in blacklist: + if norecurs in dirnames: + dirnames.remove(norecurs) + elif norecurs in filenames: + filenames.remove(norecurs) + + +_NORM_PATH_CACHE = {} + +def _cache_normalize_path(path): + """abspath with caching""" + # _module_file calls abspath on every path in sys.path every time it's + # called; on a larger codebase this easily adds up to half a second just + # assembling path components. This cache alleviates that. + try: + return _NORM_PATH_CACHE[path] + except KeyError: + if not path: # don't cache result for '' + return _normalize_path(path) + result = _NORM_PATH_CACHE[path] = _normalize_path(path) + return result -def load_module_from_name(dotted_name, path=None, use_sys=1): - """Load a Python module from it's name. +def load_module_from_name(dotted_name, path=None, use_sys=True): + """Load a Python module from its name. :type dotted_name: str :param dotted_name: python name of a module or package @@ -117,7 +187,7 @@ def load_module_from_name(dotted_name, path=None, use_sys=1): def load_module_from_modpath(parts, path=None, use_sys=1): - """Load a python module from it's splitted name. + """Load a python module from its splitted name. :type parts: list(str) or tuple(str) :param parts: @@ -151,20 +221,28 @@ def load_module_from_modpath(parts, path=None, use_sys=1): if len(modpath) != len(parts): # even with use_sys=False, should try to get outer packages from sys.modules module = sys.modules.get(curname) + elif use_sys: + # because it may have been indirectly loaded through a parent + module = sys.modules.get(curname) if module is None: - mp_file, mp_filename, mp_desc = find_module(part, path) - module = load_module(curname, mp_file, mp_filename, mp_desc) + mp_file, mp_filename, mp_desc = imp.find_module(part, path) + module = imp.load_module(curname, mp_file, mp_filename, mp_desc) + # mp_file still needs to be closed. + if mp_file: + mp_file.close() if prevmodule: setattr(prevmodule, part, module) _file = getattr(module, '__file__', '') - if not _file and len(modpath) != len(parts): - raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]) ) - path = [dirname( _file )] prevmodule = module + if not _file and _is_namespace(curname): + continue + if not _file and len(modpath) != len(parts): + raise ImportError('no module in %s' % '.'.join(parts[len(modpath):])) + path = [os.path.dirname(_file)] return module -def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None): +def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None): """Load a Python module from it's path. :type filepath: str @@ -192,9 +270,11 @@ def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None): def _check_init(path, mod_path): """check there are some __init__.py all along the way""" + modpath = [] for part in mod_path: - path = join(path, part) - if not _has_init(path): + modpath.append(part) + path = os.path.join(path, part) + if not _is_namespace('.'.join(modpath)) and not _has_init(path): return False return True @@ -219,21 +299,20 @@ def modpath_from_file(filename, extrapath=None): :rtype: list(str) :return: the corresponding splitted module's name """ - base = splitext(abspath(filename))[0] + filename = _path_from_filename(filename) + filename = os.path.abspath(filename) + base = os.path.splitext(filename)[0] if extrapath is not None: for path_ in extrapath: - path = abspath(path_) - if path and base[:len(path)] == path: + path = os.path.abspath(path_) + if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path): submodpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] if _check_init(path, submodpath[:-1]): return extrapath[path_].split('.') + submodpath for path in sys.path: - path = abspath(path) - if path and base[:len(path)] == path: - if filename.find('site-packages') != -1 and \ - path.find('site-packages') == -1: - continue + path = _cache_normalize_path(path) + if path and os.path.normcase(base).startswith(path): modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] if _check_init(path, modpath[:-1]): return modpath @@ -241,8 +320,10 @@ def modpath_from_file(filename, extrapath=None): filename, ', \n'.join(sys.path))) - def file_from_modpath(modpath, path=None, context_file=None): + return file_info_from_modpath(modpath, path, context_file)[0] + +def file_info_from_modpath(modpath, path=None, context_file=None): """given a mod path (i.e. splitted module / package name), return the corresponding file, giving priority to source file over precompiled file if it exists @@ -267,13 +348,13 @@ def file_from_modpath(modpath, path=None, context_file=None): :raise ImportError: if there is no such module in the directory - :rtype: str or None + :rtype: (str or None, import type) :return: the path to the module's file or None if it's an integrated builtin module such as 'sys' """ if context_file is not None: - context = dirname(context_file) + context = os.path.dirname(context_file) else: context = context_file if modpath[0] == 'xml': @@ -284,16 +365,15 @@ def file_from_modpath(modpath, path=None, context_file=None): return _file_from_modpath(modpath, path, context) elif modpath == ['os', 'path']: # FIXME: currently ignoring search_path... - return os.path.__file__ + return os.path.__file__, imp.PY_SOURCE return _file_from_modpath(modpath, path, context) - def get_module_part(dotted_name, context_file=None): """given a dotted name return the module part of the name : - >>> get_module_part('logilab.common.modutils.get_module_part') - 'logilab.common.modutils' + >>> get_module_part('astroid.as_string.dump') + 'astroid.as_string' :type dotted_name: str :param dotted_name: full name of the identifier we are interested in @@ -336,11 +416,11 @@ def get_module_part(dotted_name, context_file=None): starti = 1 while parts[starti] == '': # for all further dots: change context starti += 1 - context_file = dirname(context_file) + context_file = os.path.dirname(context_file) for i in range(starti, len(parts)): try: - file_from_modpath(parts[starti:i+1], - path=path, context_file=context_file) + file_from_modpath(parts[starti:i+1], path=path, + context_file=context_file) except ImportError: if not i >= max(1, len(parts) - 2): raise @@ -348,47 +428,7 @@ def get_module_part(dotted_name, context_file=None): return dotted_name -def get_modules(package, src_directory, blacklist=STD_BLACKLIST): - """given a package directory return a list of all available python - modules in the package and its subpackages - - :type package: str - :param package: the python name for the package - - :type src_directory: str - :param src_directory: - path of the directory corresponding to the package - - :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to - the value of `logilab.common.STD_BLACKLIST` - - :rtype: list - :return: - the list of all available python modules in the package and its - subpackages - """ - modules = [] - for directory, dirnames, filenames in os.walk(src_directory): - _handle_blacklist(blacklist, dirnames, filenames) - # check for __init__.py - if not '__init__.py' in filenames: - dirnames[:] = () - continue - if directory != src_directory: - dir_package = directory[len(src_directory):].replace(os.sep, '.') - modules.append(package + dir_package) - for filename in filenames: - if _is_python_file(filename) and filename != '__init__.py': - src = join(directory, filename) - module = package + src[len(src_directory):-3] - modules.append(module.replace(os.sep, '.')) - return modules - - - -def get_module_files(src_directory, blacklist=STD_BLACKLIST): +def get_module_files(src_directory, blacklist): """given a package directory return a list of all available python module's files in the package and its subpackages @@ -397,9 +437,8 @@ def get_module_files(src_directory, blacklist=STD_BLACKLIST): path of the directory corresponding to the package :type blacklist: list or tuple - :param blacklist: - optional list of files or directory to ignore, default to the value of - `logilab.common.STD_BLACKLIST` + :param blacklist: iterable + list of files or directories to ignore. :rtype: list :return: @@ -415,7 +454,7 @@ def get_module_files(src_directory, blacklist=STD_BLACKLIST): continue for filename in filenames: if _is_python_file(filename): - src = join(directory, filename) + src = os.path.join(directory, filename) files.append(src) return files @@ -434,37 +473,26 @@ def get_source_file(filename, include_no_ext=False): :rtype: str :return: the absolute path of the source file if it exists """ - base, orig_ext = splitext(abspath(filename)) + filename = os.path.abspath(_path_from_filename(filename)) + base, orig_ext = os.path.splitext(filename) for ext in PY_SOURCE_EXTS: source_path = '%s.%s' % (base, ext) - if exists(source_path): + if os.path.exists(source_path): return source_path - if include_no_ext and not orig_ext and exists(base): + if include_no_ext and not orig_ext and os.path.exists(base): return base raise NoSourceFile(filename) -def cleanup_sys_modules(directories): - """remove submodules of `directories` from `sys.modules`""" - for modname, module in sys.modules.items(): - modfile = getattr(module, '__file__', None) - if modfile: - for directory in directories: - if modfile.startswith(directory): - del sys.modules[modname] - break - - def is_python_source(filename): """ rtype: bool return: True if the filename is a python source file """ - return splitext(filename)[1][1:] in PY_SOURCE_EXTS + return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS - -def is_standard_module(modname, std_path=(STD_LIB_DIR,)): +def is_standard_module(modname, std_path=None): """try to guess if a module is a standard python module (by default, see `std_path` parameter's description) @@ -484,22 +512,23 @@ def is_standard_module(modname, std_path=(STD_LIB_DIR,)): modname = modname.split('.')[0] try: filename = file_from_modpath([modname]) - except ImportError, ex: + except ImportError: # import failed, i'm probably not so wrong by supposing it's # not standard... - return 0 + return False # modules which are not living in a file are considered standard # (sys and __builtin__ for instance) if filename is None: - return 1 - filename = abspath(filename) + # we assume there are no namespaces in stdlib + return not _is_namespace(modname) + filename = _normalize_path(filename) + if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)): + return False + if std_path is None: + std_path = STD_LIB_DIRS for path in std_path: - path = abspath(path) - if filename.startswith(path): - pfx_len = len(path) - if filename[pfx_len+1:pfx_len+14] != 'site-packages': - return 1 - return 0 + if filename.startswith(_cache_normalize_path(path)): + return True return False @@ -519,12 +548,16 @@ def is_relative(modname, from_file): :return: true if the module has been imported relatively to `from_file` """ - if not isdir(from_file): - from_file = dirname(from_file) + if not os.path.isdir(from_file): + from_file = os.path.dirname(from_file) if from_file in sys.path: return False try: - find_module(modname.split('.')[0], [from_file]) + stream, _, _ = imp.find_module(modname.split('.')[0], [from_file]) + + # Close the stream to avoid ResourceWarnings. + if stream: + stream.close() return True except ImportError: return False @@ -547,28 +580,42 @@ def _file_from_modpath(modpath, path=None, context=None): mtype, mp_filename = _module_file(modpath, path) else: mtype, mp_filename = _module_file(modpath, path) - if mtype == PY_COMPILED: + if mtype == imp.PY_COMPILED: try: - return get_source_file(mp_filename) + return get_source_file(mp_filename), imp.PY_SOURCE except NoSourceFile: - return mp_filename - elif mtype == C_BUILTIN: + return mp_filename, imp.PY_COMPILED + elif mtype == imp.C_BUILTIN: # integrated builtin module - return None - elif mtype == PKG_DIRECTORY: + return None, imp.C_BUILTIN + elif mtype == imp.PKG_DIRECTORY: mp_filename = _has_init(mp_filename) - return mp_filename + mtype = imp.PY_SOURCE + return mp_filename, mtype def _search_zip(modpath, pic): - for filepath, importer in pic.items(): + for filepath, importer in list(pic.items()): if importer is not None: if importer.find_module(modpath[0]): - if not importer.find_module('/'.join(modpath)): + if not importer.find_module(os.path.sep.join(modpath)): raise ImportError('No module named %s in %s/%s' % ( - '.'.join(modpath[1:]), file, modpath)) - return ZIPFILE, abspath(filepath) + '/' + '/'.join(modpath), filepath + '.'.join(modpath[1:]), filepath, modpath)) + return (PY_ZIPMODULE, + os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), + filepath) raise ImportError('No module named %s' % '.'.join(modpath)) +try: + import pkg_resources +except ImportError: + pkg_resources = None + + +def _is_namespace(modname): + return (pkg_resources is not None + and modname in pkg_resources._namespace_packages) + + def _module_file(modpath, path=None): """get a module type / file path @@ -599,19 +646,42 @@ def _module_file(modpath, path=None): checkeggs = True except AttributeError: checkeggs = False + # pkg_resources support (aka setuptools namespace packages) + if _is_namespace(modpath[0]) and modpath[0] in sys.modules: + # setuptools has added into sys.modules a module object with proper + # __path__, get back information from there + module = sys.modules[modpath.pop(0)] + path = list(module.__path__) + if not modpath: + return imp.C_BUILTIN, None imported = [] while modpath: + modname = modpath[0] + # take care to changes in find_module implementation wrt builtin modules + # + # Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23) + # >>> imp.find_module('posix') + # (None, 'posix', ('', '', 6)) + # + # Python 3.3.1 (default, Apr 26 2013, 12:08:46) + # >>> imp.find_module('posix') + # (None, None, ('', '', 6)) try: - _, mp_filename, mp_desc = find_module(modpath[0], path) + stream, mp_filename, mp_desc = imp.find_module(modname, path) except ImportError: if checkeggs: return _search_zip(modpath, pic)[:2] raise else: - if checkeggs: - fullabspath = [abspath(x) for x in _path] + # Don't forget to close the stream to avoid + # spurious ResourceWarnings. + if stream: + stream.close() + + if checkeggs and mp_filename: + fullabspath = [_cache_normalize_path(x) for x in _path] try: - pathindex = fullabspath.index(dirname(abspath(mp_filename))) + pathindex = fullabspath.index(os.path.dirname(_normalize_path(mp_filename))) emtype, emp_filename, zippath = _search_zip(modpath, pic) if pathindex > _path.index(zippath): # an egg takes priority @@ -625,10 +695,28 @@ def _module_file(modpath, path=None): imported.append(modpath.pop(0)) mtype = mp_desc[2] if modpath: - if mtype != PKG_DIRECTORY: + if mtype != imp.PKG_DIRECTORY: raise ImportError('No module %s in %s' % ('.'.join(modpath), '.'.join(imported))) - path = [mp_filename] + # XXX guess if package is using pkgutil.extend_path by looking for + # those keywords in the first four Kbytes + try: + with open(os.path.join(mp_filename, '__init__.py'), 'rb') as stream: + data = stream.read(4096) + except IOError: + path = [mp_filename] + else: + extend_path = b'pkgutil' in data and b'extend_path' in data + declare_namespace = ( + b"pkg_resources" in data + and b"declare_namespace(__name__)" in data) + if extend_path or declare_namespace: + # extend_path is called, search sys.path for module/packages + # of this name see pkgutil.extend_path documentation + path = [os.path.join(p, *imported) for p in sys.path + if os.path.isdir(os.path.join(p, *imported))] + else: + path = [mp_filename] return mtype, mp_filename def _is_python_file(filename): @@ -646,8 +734,8 @@ def _has_init(directory): """if the given directory has a valid __init__ file, return its path, else return None """ - mod_or_pack = join(directory, '__init__') + mod_or_pack = os.path.join(directory, '__init__') for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'): - if exists(mod_or_pack + '.' + ext): + if os.path.exists(mod_or_pack + '.' + ext): return mod_or_pack + '.' + ext return None diff --git a/pylibs/logilab/astng/node_classes.py b/pymode/libs/astroid/node_classes.py similarity index 54% rename from pylibs/logilab/astng/node_classes.py rename to pymode/libs/astroid/node_classes.py index 607ad907..ca773c3a 100644 --- a/pylibs/logilab/astng/node_classes.py +++ b/pymode/libs/astroid/node_classes.py @@ -1,51 +1,65 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com # -# This file is part of logilab-astng. +# This file is part of astroid. # -# logilab-astng is free software: you can redistribute it and/or modify it +# astroid is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 2.1 of the License, or (at your # option) any later version. # -# logilab-astng is distributed in the hope that it will be useful, but +# astroid is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License # for more details. # # You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . +# with astroid. If not, see . """Module for some node classes. More nodes in scoped_nodes.py """ -import sys +import abc +import warnings -from logilab.astng import BUILTINS_MODULE -from logilab.astng.exceptions import NoDefault -from logilab.astng.bases import (NodeNG, Statement, Instance, InferenceContext, - _infer_stmts, YES) -from logilab.astng.mixins import BlockRangeMixIn, AssignTypeMixin, \ - ParentAssignTypeMixin, FromImportMixIn +import lazy_object_proxy +import six +from astroid import bases +from astroid import context as contextmod +from astroid import decorators +from astroid import exceptions +from astroid import mixins +from astroid import util + +BUILTINS = six.moves.builtins.__name__ + + +@bases.raise_if_nothing_inferred def unpack_infer(stmt, context=None): """recursively generate nodes inferred by the given statement. If the inferred value is a list or a tuple, recurse on the elements """ if isinstance(stmt, (List, Tuple)): for elt in stmt.elts: - for infered_elt in unpack_infer(elt, context): - yield infered_elt + if elt is util.YES: + yield elt + continue + for inferred_elt in unpack_infer(elt, context): + yield inferred_elt return - infered = stmt.infer(context).next() - if infered is stmt or infered is YES: - yield infered + # if inferred is a final node, return it and stop + inferred = next(stmt.infer(context)) + if inferred is stmt: + yield inferred return - for infered in stmt.infer(context): - for inf_inf in unpack_infer(infered, context): - yield inf_inf + # else, infer recursivly, except YES object that should be returned as is + for inferred in stmt.infer(context): + if inferred is util.YES: + yield inferred + else: + for inf_inf in unpack_infer(inferred, context): + yield inf_inf def are_exclusive(stmt1, stmt2, exceptions=None): @@ -80,16 +94,18 @@ def are_exclusive(stmt1, stmt2, exceptions=None): # nodes are in exclusive branches if isinstance(node, If) and exceptions is None: if (node.locate_child(previous)[1] - is not node.locate_child(children[node])[1]): + is not node.locate_child(children[node])[1]): return True elif isinstance(node, TryExcept): c2attr, c2node = node.locate_child(previous) c1attr, c1node = node.locate_child(children[node]) if c1node is not c2node: - if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or - (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or - (c2attr == 'handlers' and c1attr == 'orelse') or - (c2attr == 'orelse' and c1attr == 'handlers')): + if ((c2attr == 'body' + and c1attr == 'handlers' + and children[node].catch(exceptions)) or + (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or + (c2attr == 'handlers' and c1attr == 'orelse') or + (c2attr == 'orelse' and c1attr == 'handlers')): return True elif c2attr == 'handlers' and c1attr == 'handlers': return previous is not children[node] @@ -99,6 +115,31 @@ def are_exclusive(stmt1, stmt2, exceptions=None): return False +@six.add_metaclass(abc.ABCMeta) +class _BaseContainer(mixins.ParentAssignTypeMixin, + bases.NodeNG, + bases.Instance): + """Base class for Set, FrozenSet, Tuple and List.""" + + _astroid_fields = ('elts',) + + def __init__(self, elts=None): + if elts is None: + self.elts = [] + else: + self.elts = [const_factory(e) for e in elts] + + def itered(self): + return self.elts + + def bool_value(self): + return bool(self.elts) + + @abc.abstractmethod + def pytype(self): + pass + + class LookupMixIn(object): """Mixin looking up a name in the right scope """ @@ -106,25 +147,25 @@ class LookupMixIn(object): def lookup(self, name): """lookup a variable name - return the scope node and the list of assignments associated to the given - name according to the scope where it has been found (locals, globals or - builtin) + return the scope node and the list of assignments associated to the + given name according to the scope where it has been found (locals, + globals or builtin) - The lookup is starting from self's scope. If self is not a frame itself and - the name is found in the inner frame locals, statements will be filtered - to remove ignorable statements according to self's location + The lookup is starting from self's scope. If self is not a frame itself + and the name is found in the inner frame locals, statements will be + filtered to remove ignorable statements according to self's location """ return self.scope().scope_lookup(self, name) def ilookup(self, name): - """infered lookup + """inferred lookup - return an iterator on infered values of the statements returned by + return an iterator on inferred values of the statements returned by the lookup method """ frame, stmts = self.lookup(name) - context = InferenceContext() - return _infer_stmts(stmts, context, frame) + context = contextmod.InferenceContext() + return bases._infer_stmts(stmts, context, frame) def _filter_stmts(self, stmts, frame, offset): """filter statements to remove ignorable statements. @@ -142,8 +183,21 @@ def _filter_stmts(self, stmts, frame, offset): myframe = self.frame().parent.frame() else: myframe = self.frame() - if not myframe is frame or self is frame: - return stmts + # If the frame of this node is the same as the statement + # of this node, then the node is part of a class or + # a function definition and the frame of this node should be the + # the upper frame, not the frame of the definition. + # For more information why this is important, + # see Pylint issue #295. + # For example, for 'b', the statement is the same + # as the frame / scope: + # + # def test(b=1): + # ... + + if self.statement() is myframe and myframe.parent: + myframe = myframe.parent.frame() + mystmt = self.statement() # line filtering if we are in the same frame # @@ -162,19 +216,18 @@ def _filter_stmts(self, stmts, frame, offset): # line filtering is on and we have reached our location, break if mylineno > 0 and stmt.fromlineno > mylineno: break - assert hasattr(node, 'ass_type'), (node, node.scope(), - node.scope().locals) - ass_type = node.ass_type() - + assert hasattr(node, 'assign_type'), (node, node.scope(), + node.scope().locals) + assign_type = node.assign_type() if node.has_base(self): break - _stmts, done = ass_type._get_filtered_stmts(self, node, _stmts, mystmt) + _stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt) if done: break - optional_assign = ass_type.optional_assign - if optional_assign and ass_type.parent_of(self): + optional_assign = assign_type.optional_assign + if optional_assign and assign_type.parent_of(self): # we are inside a loop, loop var assigment is hidding previous # assigment _stmts = [node] @@ -189,7 +242,7 @@ def _filter_stmts(self, stmts, frame, offset): else: # we got a parent index, this means the currently visited node # is at the same block level as a previously visited node - if _stmts[pindex].ass_type().parent_of(ass_type): + if _stmts[pindex].assign_type().parent_of(assign_type): # both statements are not at the same block level continue # if currently visited node is following previously considered @@ -218,7 +271,7 @@ def _filter_stmts(self, stmts, frame, offset): if not (optional_assign or are_exclusive(_stmts[pindex], node)): del _stmt_parents[pindex] del _stmts[pindex] - if isinstance(node, AssName): + if isinstance(node, AssignName): if not optional_assign and stmt.parent is mystmt.parent: _stmts = [] _stmt_parents = [] @@ -231,29 +284,47 @@ def _filter_stmts(self, stmts, frame, offset): _stmt_parents.append(stmt.parent) return _stmts + # Name classes -class AssName(LookupMixIn, ParentAssignTypeMixin, NodeNG): +class AssignName(LookupMixIn, mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing an AssName node""" -class DelName(LookupMixIn, ParentAssignTypeMixin, NodeNG): +class DelName(LookupMixIn, mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing a DelName node""" -class Name(LookupMixIn, NodeNG): +class Name(LookupMixIn, bases.NodeNG): """class representing a Name node""" - - -##################### node classes ######################################## - -class Arguments(NodeNG, AssignTypeMixin): +class Arguments(mixins.AssignTypeMixin, bases.NodeNG): """class representing an Arguments node""" - _astng_fields = ('args', 'defaults') + if six.PY3: + # Python 3.4+ uses a different approach regarding annotations, + # each argument is a new class, _ast.arg, which exposes an + # 'annotation' attribute. In astroid though, arguments are exposed + # as is in the Arguments node and the only way to expose annotations + # is by using something similar with Python 3.3: + # - we expose 'varargannotation' and 'kwargannotation' of annotations + # of varargs and kwargs. + # - we expose 'annotation', a list with annotations for + # for each normal argument. If an argument doesn't have an + # annotation, its value will be None. + + _astroid_fields = ('args', 'defaults', 'kwonlyargs', + 'kw_defaults', 'annotations', + 'varargannotation', 'kwargannotation') + annotations = None + varargannotation = None + kwargannotation = None + else: + _astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults') args = None defaults = None + kwonlyargs = None + kw_defaults = None def __init__(self, vararg=None, kwarg=None): self.vararg = vararg @@ -264,11 +335,25 @@ def _infer_name(self, frame, name): return name return None + @decorators.cachedproperty + def fromlineno(self): + lineno = super(Arguments, self).fromlineno + return max(lineno, self.parent.fromlineno or 0) + def format_args(self): """return arguments formatted as string""" - result = [_format_args(self.args, self.defaults)] + result = [] + if self.args: + result.append( + _format_args(self.args, self.defaults, + getattr(self, 'annotations', None)) + ) if self.vararg: result.append('*%s' % self.vararg) + if self.kwonlyargs: + if not self.vararg: + result.append('*') + result.append(_format_args(self.kwonlyargs, self.kw_defaults)) if self.kwarg: result.append('**%s' % self.kwarg) return ', '.join(result) @@ -283,7 +368,10 @@ def default_value(self, argname): idx = i - (len(self.args) - len(self.defaults)) if idx >= 0: return self.defaults[idx] - raise NoDefault() + i = _find_arg(argname, self.kwonlyargs)[0] + if i is not None and self.kw_defaults[i] is not None: + return self.kw_defaults[i] + raise exceptions.NoDefault() def is_argument(self, name): """return True if the name is defined in arguments""" @@ -299,6 +387,12 @@ def find_argname(self, argname, rec=False): return _find_arg(argname, self.args, rec) return None, None + def get_children(self): + """override get_children to skip over None elements in kw_defaults""" + for child in super(Arguments, self).get_children(): + if child is not None: + yield child + def _find_arg(argname, args, rec=False): for i, arg in enumerate(args): @@ -312,80 +406,93 @@ def _find_arg(argname, args, rec=False): return None, None -def _format_args(args, defaults=None): +def _format_args(args, defaults=None, annotations=None): values = [] if args is None: return '' + if annotations is None: + annotations = [] if defaults is not None: default_offset = len(args) - len(defaults) - for i, arg in enumerate(args): + packed = six.moves.zip_longest(args, annotations) + for i, (arg, annotation) in enumerate(packed): if isinstance(arg, Tuple): values.append('(%s)' % _format_args(arg.elts)) else: - values.append(arg.name) + argname = arg.name + if annotation is not None: + argname += ':' + annotation.as_string() + values.append(argname) + if defaults is not None and i >= default_offset: - values[-1] += '=' + defaults[i-default_offset].as_string() + if defaults[i-default_offset] is not None: + values[-1] += '=' + defaults[i-default_offset].as_string() return ', '.join(values) -class AssAttr(NodeNG, ParentAssignTypeMixin): - """class representing an AssAttr node""" - _astng_fields = ('expr',) +class AssignAttr(mixins.ParentAssignTypeMixin, bases.NodeNG): + """class representing an AssignAttr node""" + _astroid_fields = ('expr',) expr = None -class Assert(Statement): +class Assert(bases.Statement): """class representing an Assert node""" - _astng_fields = ('test', 'fail',) + _astroid_fields = ('test', 'fail',) test = None fail = None -class Assign(Statement, AssignTypeMixin): +class Assign(bases.Statement, mixins.AssignTypeMixin): """class representing an Assign node""" - _astng_fields = ('targets', 'value',) + _astroid_fields = ('targets', 'value',) targets = None value = None -class AugAssign(Statement, AssignTypeMixin): +class AugAssign(bases.Statement, mixins.AssignTypeMixin): """class representing an AugAssign node""" - _astng_fields = ('target', 'value',) + _astroid_fields = ('target', 'value',) target = None value = None -class Backquote(NodeNG): +class Repr(bases.NodeNG): """class representing a Backquote node""" - _astng_fields = ('value',) + _astroid_fields = ('value',) value = None -class BinOp(NodeNG): +class BinOp(bases.NodeNG): """class representing a BinOp node""" - _astng_fields = ('left', 'right',) + _astroid_fields = ('left', 'right',) left = None right = None -class BoolOp(NodeNG): +class BoolOp(bases.NodeNG): """class representing a BoolOp node""" - _astng_fields = ('values',) + _astroid_fields = ('values',) values = None -class Break(Statement): +class Break(bases.Statement): """class representing a Break node""" -class CallFunc(NodeNG): - """class representing a CallFunc node""" - _astng_fields = ('func', 'args', 'starargs', 'kwargs') +class Call(bases.NodeNG): + """class representing a Call node""" + _astroid_fields = ('func', 'args', 'keywords') func = None args = None - starargs = None - kwargs = None + keywords = None + + @property + def starargs(self): + args = self.args or [] + return [arg for arg in args if isinstance(arg, Starred)] - def __init__(self): - self.starargs = None - self.kwargs = None + @property + def kwargs(self): + keywords = self.keywords or [] + return [keyword for keyword in keywords if keyword.arg is None] -class Compare(NodeNG): +class Compare(bases.NodeNG): """class representing a Compare node""" - _astng_fields = ('left', 'ops',) + _astroid_fields = ('left', 'ops',) left = None ops = None @@ -401,17 +508,25 @@ def last_child(self): return self.ops[-1][1] #return self.left -class Comprehension(NodeNG): + +class Comprehension(bases.NodeNG): """class representing a Comprehension node""" - _astng_fields = ('target', 'iter' ,'ifs') + _astroid_fields = ('target', 'iter', 'ifs') target = None iter = None ifs = None optional_assign = True - def ass_type(self): + def assign_type(self): return self + def ass_type(self): + warnings.warn('%s.ass_type() is deprecated and slated for removal' + 'in astroid 2.0, use %s.assign_type() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.assign_type() + def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt): """method used in filter_stmts""" if self is mystmt: @@ -427,14 +542,19 @@ def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt): return stmts, False -class Const(NodeNG, Instance): +class Const(bases.NodeNG, bases.Instance): """represent a constant node like num, str, bool, None, bytes""" def __init__(self, value=None): self.value = value def getitem(self, index, context=None): - if isinstance(self.value, basestring): + if isinstance(self.value, six.string_types): + return Const(self.value[index]) + if isinstance(self.value, bytes) and six.PY3: + # Bytes aren't instances of six.string_types + # on Python 3. Also, indexing them should return + # integers. return Const(self.value[index]) raise TypeError('%r (value=%s)' % (self, self.value)) @@ -442,7 +562,7 @@ def has_dynamic_getattr(self): return False def itered(self): - if isinstance(self.value, basestring): + if isinstance(self.value, six.string_types): return self.value raise TypeError() @@ -450,13 +570,13 @@ def pytype(self): return self._proxied.qname() -class Continue(Statement): +class Continue(bases.Statement): """class representing a Continue node""" -class Decorators(NodeNG): +class Decorators(bases.NodeNG): """class representing a Decorators node""" - _astng_fields = ('nodes',) + _astroid_fields = ('nodes',) nodes = None def __init__(self, nodes=None): @@ -466,31 +586,33 @@ def scope(self): # skip the function node to go directly to the upper level scope return self.parent.parent.scope() -class DelAttr(NodeNG, ParentAssignTypeMixin): + +class DelAttr(mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing a DelAttr node""" - _astng_fields = ('expr',) + _astroid_fields = ('expr',) expr = None -class Delete(Statement, AssignTypeMixin): + +class Delete(mixins.AssignTypeMixin, bases.Statement): """class representing a Delete node""" - _astng_fields = ('targets',) + _astroid_fields = ('targets',) targets = None -class Dict(NodeNG, Instance): +class Dict(bases.NodeNG, bases.Instance): """class representing a Dict node""" - _astng_fields = ('items',) + _astroid_fields = ('items',) def __init__(self, items=None): if items is None: self.items = [] else: self.items = [(const_factory(k), const_factory(v)) - for k,v in items.iteritems()] + for k, v in list(items.items())] def pytype(self): - return '%s.dict' % BUILTINS_MODULE + return '%s.dict' % BUILTINS def get_children(self): """get children of a Dict node""" @@ -508,38 +630,48 @@ def last_child(self): def itered(self): return self.items[::2] - def getitem(self, key, context=None): - for i in xrange(0, len(self.items), 2): - for inferedkey in self.items[i].infer(context): - if inferedkey is YES: + def getitem(self, lookup_key, context=None): + for key, value in self.items: + # TODO(cpopa): no support for overriding yet, {1:2, **{1: 3}}. + if isinstance(key, DictUnpack): + try: + return value.getitem(lookup_key, context) + except IndexError: + continue + for inferredkey in key.infer(context): + if inferredkey is util.YES: continue - if isinstance(inferedkey, Const) and inferedkey.value == key: - return self.items[i+1] - raise IndexError(key) + if isinstance(inferredkey, Const) \ + and inferredkey.value == lookup_key: + return value + # This should raise KeyError, but all call sites only catch + # IndexError. Let's leave it like that for now. + raise IndexError(lookup_key) -class Discard(Statement): - """class representing a Discard node""" - _astng_fields = ('value',) +class Expr(bases.Statement): + """class representing a Expr node""" + _astroid_fields = ('value',) value = None -class Ellipsis(NodeNG): +class Ellipsis(bases.NodeNG): # pylint: disable=redefined-builtin """class representing an Ellipsis node""" -class EmptyNode(NodeNG): +class EmptyNode(bases.NodeNG): """class representing an EmptyNode node""" -class ExceptHandler(Statement, AssignTypeMixin): +class ExceptHandler(mixins.AssignTypeMixin, bases.Statement): """class representing an ExceptHandler node""" - _astng_fields = ('type', 'name', 'body',) + _astroid_fields = ('type', 'name', 'body',) type = None name = None body = None - def _blockstart_toline(self): + @decorators.cachedproperty + def blockstart_tolineno(self): if self.name: return self.name.tolineno elif self.type: @@ -547,11 +679,6 @@ def _blockstart_toline(self): else: return self.lineno - def set_line_info(self, lastchild): - self.fromlineno = self.lineno - self.tolineno = lastchild.tolineno - self.blockstart_tolineno = self._blockstart_toline() - def catch(self, exceptions): if self.type is None or exceptions is None: return True @@ -560,47 +687,62 @@ def catch(self, exceptions): return True -class Exec(Statement): +class Exec(bases.Statement): """class representing an Exec node""" - _astng_fields = ('expr', 'globals', 'locals',) + _astroid_fields = ('expr', 'globals', 'locals',) expr = None globals = None locals = None -class ExtSlice(NodeNG): +class ExtSlice(bases.NodeNG): """class representing an ExtSlice node""" - _astng_fields = ('dims',) + _astroid_fields = ('dims',) dims = None -class For(BlockRangeMixIn, AssignTypeMixin, Statement): +class For(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, bases.Statement): """class representing a For node""" - _astng_fields = ('target', 'iter', 'body', 'orelse',) + _astroid_fields = ('target', 'iter', 'body', 'orelse',) target = None iter = None body = None orelse = None optional_assign = True - def _blockstart_toline(self): + @decorators.cachedproperty + def blockstart_tolineno(self): return self.iter.tolineno -class From(FromImportMixIn, Statement): +class AsyncFor(For): + """Asynchronous For built with `async` keyword.""" + + +class Await(bases.NodeNG): + """Await node for the `await` keyword.""" + + _astroid_fields = ('value', ) + value = None + + def postinit(self, value=None): + self.value = value + + +class ImportFrom(mixins.ImportFromMixin, bases.Statement): """class representing a From node""" - def __init__(self, fromname, names, level=0): + def __init__(self, fromname, names, level=0): self.modname = fromname self.names = names self.level = level -class Getattr(NodeNG): - """class representing a Getattr node""" - _astng_fields = ('expr',) +class Attribute(bases.NodeNG): + """class representing a Attribute node""" + _astroid_fields = ('expr',) expr = None -class Global(Statement): +class Global(bases.Statement): """class representing a Global node""" def __init__(self, names): @@ -610,14 +752,15 @@ def _infer_name(self, frame, name): return name -class If(BlockRangeMixIn, Statement): +class If(mixins.BlockRangeMixIn, bases.Statement): """class representing an If node""" - _astng_fields = ('test', 'body', 'orelse') + _astroid_fields = ('test', 'body', 'orelse') test = None body = None orelse = None - def _blockstart_toline(self): + @decorators.cachedproperty + def blockstart_tolineno(self): return self.test.tolineno def block_range(self, lineno): @@ -630,51 +773,41 @@ def block_range(self, lineno): self.body[0].fromlineno - 1) -class IfExp(NodeNG): +class IfExp(bases.NodeNG): """class representing an IfExp node""" - _astng_fields = ('test', 'body', 'orelse') + _astroid_fields = ('test', 'body', 'orelse') test = None body = None orelse = None -class Import(FromImportMixIn, Statement): +class Import(mixins.ImportFromMixin, bases.Statement): """class representing an Import node""" -class Index(NodeNG): +class Index(bases.NodeNG): """class representing an Index node""" - _astng_fields = ('value',) + _astroid_fields = ('value',) value = None -class Keyword(NodeNG): +class Keyword(bases.NodeNG): """class representing a Keyword node""" - _astng_fields = ('value',) + _astroid_fields = ('value',) value = None -class List(NodeNG, Instance, ParentAssignTypeMixin): +class List(_BaseContainer): """class representing a List node""" - _astng_fields = ('elts',) - - def __init__(self, elts=None): - if elts is None: - self.elts = [] - else: - self.elts = [const_factory(e) for e in elts] def pytype(self): - return '%s.list' % BUILTINS_MODULE + return '%s.list' % BUILTINS def getitem(self, index, context=None): return self.elts[index] - def itered(self): - return self.elts - -class Nonlocal(Statement): +class Nonlocal(bases.Statement): """class representing a Nonlocal node""" def __init__(self, names): @@ -684,26 +817,26 @@ def _infer_name(self, frame, name): return name -class Pass(Statement): +class Pass(bases.Statement): """class representing a Pass node""" -class Print(Statement): +class Print(bases.Statement): """class representing a Print node""" - _astng_fields = ('dest', 'values',) + _astroid_fields = ('dest', 'values',) dest = None values = None -class Raise(Statement): +class Raise(bases.Statement): """class representing a Raise node""" exc = None - if sys.version_info < (3, 0): - _astng_fields = ('exc', 'inst', 'tback') + if six.PY2: + _astroid_fields = ('exc', 'inst', 'tback') inst = None tback = None else: - _astng_fields = ('exc', 'cause') + _astroid_fields = ('exc', 'cause') exc = None cause = None @@ -715,52 +848,42 @@ def raises_not_implemented(self): return True -class Return(Statement): +class Return(bases.Statement): """class representing a Return node""" - _astng_fields = ('value',) + _astroid_fields = ('value',) value = None -class Set(NodeNG, Instance, ParentAssignTypeMixin): +class Set(_BaseContainer): """class representing a Set node""" - _astng_fields = ('elts',) - - def __init__(self, elts=None): - if elts is None: - self.elts = [] - else: - self.elts = [const_factory(e) for e in elts] def pytype(self): - return '%s.set' % BUILTINS_MODULE + return '%s.set' % BUILTINS - def itered(self): - return self.elts - -class Slice(NodeNG): +class Slice(bases.NodeNG): """class representing a Slice node""" - _astng_fields = ('lower', 'upper', 'step') + _astroid_fields = ('lower', 'upper', 'step') lower = None upper = None step = None -class Starred(NodeNG): +class Starred(mixins.ParentAssignTypeMixin, bases.NodeNG): """class representing a Starred node""" - _astng_fields = ('value',) + _astroid_fields = ('value',) value = None -class Subscript(NodeNG): +class Subscript(bases.NodeNG): """class representing a Subscript node""" - _astng_fields = ('value', 'slice') + _astroid_fields = ('value', 'slice') value = None slice = None -class TryExcept(BlockRangeMixIn, Statement): +class TryExcept(mixins.BlockRangeMixIn, bases.Statement): """class representing a TryExcept node""" - _astng_fields = ('body', 'handlers', 'orelse',) + _astroid_fields = ('body', 'handlers', 'orelse',) body = None handlers = None orelse = None @@ -768,9 +891,6 @@ class TryExcept(BlockRangeMixIn, Statement): def _infer_name(self, frame, name): return name - def _blockstart_toline(self): - return self.lineno - def block_range(self, lineno): """handle block line numbers range for try/except statements""" last = None @@ -784,59 +904,47 @@ def block_range(self, lineno): return self._elsed_block_range(lineno, self.orelse, last) -class TryFinally(BlockRangeMixIn, Statement): +class TryFinally(mixins.BlockRangeMixIn, bases.Statement): """class representing a TryFinally node""" - _astng_fields = ('body', 'finalbody',) + _astroid_fields = ('body', 'finalbody',) body = None finalbody = None - def _blockstart_toline(self): - return self.lineno - def block_range(self, lineno): """handle block line numbers range for try/finally statements""" child = self.body[0] # py2.5 try: except: finally: if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno - and lineno > self.fromlineno and lineno <= child.tolineno): + and lineno > self.fromlineno and lineno <= child.tolineno): return child.block_range(lineno) return self._elsed_block_range(lineno, self.finalbody) -class Tuple(NodeNG, Instance, ParentAssignTypeMixin): +class Tuple(_BaseContainer): """class representing a Tuple node""" - _astng_fields = ('elts',) - - def __init__(self, elts=None): - if elts is None: - self.elts = [] - else: - self.elts = [const_factory(e) for e in elts] def pytype(self): - return '%s.tuple' % BUILTINS_MODULE + return '%s.tuple' % BUILTINS def getitem(self, index, context=None): return self.elts[index] - def itered(self): - return self.elts - -class UnaryOp(NodeNG): +class UnaryOp(bases.NodeNG): """class representing an UnaryOp node""" - _astng_fields = ('operand',) + _astroid_fields = ('operand',) operand = None -class While(BlockRangeMixIn, Statement): +class While(mixins.BlockRangeMixIn, bases.Statement): """class representing a While node""" - _astng_fields = ('test', 'body', 'orelse',) + _astroid_fields = ('test', 'body', 'orelse',) test = None body = None orelse = None - def _blockstart_toline(self): + @decorators.cachedproperty + def blockstart_tolineno(self): return self.test.tolineno def block_range(self, lineno): @@ -844,25 +952,42 @@ def block_range(self, lineno): return self. _elsed_block_range(lineno, self.orelse) -class With(BlockRangeMixIn, AssignTypeMixin, Statement): +class With(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, bases.Statement): """class representing a With node""" - _astng_fields = ('expr', 'vars', 'body') - expr = None - vars = None + _astroid_fields = ('items', 'body') + items = None body = None - def _blockstart_toline(self): - if self.vars: - return self.vars.tolineno - else: - return self.expr.tolineno + @decorators.cachedproperty + def blockstart_tolineno(self): + return self.items[-1][0].tolineno + def get_children(self): + for expr, var in self.items: + yield expr + if var: + yield var + for elt in self.body: + yield elt + + +class AsyncWith(With): + """Asynchronous `with` built with the `async` keyword.""" -class Yield(NodeNG): + +class Yield(bases.NodeNG): """class representing a Yield node""" - _astng_fields = ('value',) + _astroid_fields = ('value',) value = None +class YieldFrom(Yield): + """ Class representing a YieldFrom node. """ + + +class DictUnpack(bases.NodeNG): + """Represents the unpacking of dicts into dicts using PEP 448.""" + + # constants ############################################################## CONST_CLS = { @@ -871,33 +996,58 @@ class Yield(NodeNG): dict: Dict, set: Set, type(None): Const, + type(NotImplemented): Const, } def _update_const_classes(): """update constant classes, so the keys of CONST_CLS can be reused""" klasses = (bool, int, float, complex, str) - if sys.version_info < (3, 0): + if six.PY2: klasses += (unicode, long) - if sys.version_info >= (2, 6): - klasses += (bytes,) + klasses += (bytes,) for kls in klasses: CONST_CLS[kls] = Const _update_const_classes() + def const_factory(value): - """return an astng node for a python value""" - # since const_factory is called to evaluate content of container (eg list, - # tuple), it may be called with some node as argument that should be left - # untouched - if isinstance(value, NodeNG): - return value + """return an astroid node for a python value""" + # XXX we should probably be stricter here and only consider stuff in + # CONST_CLS or do better treatment: in case where value is not in CONST_CLS, + # we should rather recall the builder on this value than returning an empty + # node (another option being that const_factory shouldn't be called with something + # not in CONST_CLS) + assert not isinstance(value, bases.NodeNG) try: return CONST_CLS[value.__class__](value) except (KeyError, AttributeError): - # some constants (like from gtk._gtk) don't have their class in - # CONST_CLS, though we can "assert isinstance(value, tuple(CONST_CLS))" - if isinstance(value, tuple(CONST_CLS)): - return Const(value) node = EmptyNode() node.object = value return node + + +# Backward-compatibility aliases +def instancecheck(cls, other): + wrapped = cls.__wrapped__ + other_cls = other.__class__ + is_instance_of = wrapped is other_cls or issubclass(other_cls, wrapped) + warnings.warn("%r is deprecated and slated for removal in astroid " + "2.0, use %r instead" % (cls.__class__.__name__, + wrapped.__name__), + PendingDeprecationWarning, stacklevel=2) + return is_instance_of + + +def proxy_alias(alias_name, node_type): + proxy = type(alias_name, (lazy_object_proxy.Proxy,), + {'__class__': object.__dict__['__class__'], + '__instancecheck__': instancecheck}) + return proxy(lambda: node_type) + +Backquote = proxy_alias('Backquote', Repr) +Discard = proxy_alias('Discard', Expr) +AssName = proxy_alias('AssName', AssignName) +AssAttr = proxy_alias('AssAttr', AssignAttr) +Getattr = proxy_alias('Getattr', Attribute) +CallFunc = proxy_alias('CallFunc', Call) +From = proxy_alias('From', ImportFrom) diff --git a/pymode/libs/astroid/nodes.py b/pymode/libs/astroid/nodes.py new file mode 100644 index 00000000..2fd6cb65 --- /dev/null +++ b/pymode/libs/astroid/nodes.py @@ -0,0 +1,87 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +""" +on all nodes : + .is_statement, returning true if the node should be considered as a + statement node + .root(), returning the root node of the tree (i.e. a Module) + .previous_sibling(), returning previous sibling statement node + .next_sibling(), returning next sibling statement node + .statement(), returning the first parent node marked as statement node + .frame(), returning the first node defining a new local scope (i.e. + Module, FunctionDef or ClassDef) + .set_local(name, node), define an identifier on the first parent frame, + with the node defining it. This is used by the astroid builder and should not + be used from out there. + +on ImportFrom and Import : + .real_name(name), + + +""" +# pylint: disable=unused-import,redefined-builtin + +from astroid.node_classes import ( + Arguments, AssignAttr, Assert, Assign, + AssignName, AugAssign, Repr, BinOp, BoolOp, Break, Call, Compare, + Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, + Dict, Expr, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, + ImportFrom, Attribute, Global, If, IfExp, Import, Index, Keyword, + List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, + TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, + const_factory, + AsyncFor, Await, AsyncWith, + # Backwards-compatibility aliases + Backquote, Discard, AssName, AssAttr, Getattr, CallFunc, From, + # Node not present in the builtin ast module. + DictUnpack, +) +from astroid.scoped_nodes import ( + Module, GeneratorExp, Lambda, DictComp, + ListComp, SetComp, FunctionDef, ClassDef, + AsyncFunctionDef, + # Backwards-compatibility aliases + Class, Function, GenExpr, +) + + + +ALL_NODE_CLASSES = ( + AsyncFunctionDef, AsyncFor, AsyncWith, Await, + + Arguments, AssignAttr, Assert, Assign, AssignName, AugAssign, + Repr, BinOp, BoolOp, Break, + Call, ClassDef, Compare, Comprehension, Const, Continue, + Decorators, DelAttr, DelName, Delete, + Dict, DictComp, DictUnpack, Expr, + Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, + For, ImportFrom, FunctionDef, + Attribute, GeneratorExp, Global, + If, IfExp, Import, Index, + Keyword, + Lambda, List, ListComp, + Name, Nonlocal, + Module, + Pass, Print, + Raise, Return, + Set, SetComp, Slice, Starred, Subscript, + TryExcept, TryFinally, Tuple, + UnaryOp, + While, With, + Yield, YieldFrom, + ) diff --git a/pymode/libs/astroid/objects.py b/pymode/libs/astroid/objects.py new file mode 100644 index 00000000..d2f4270b --- /dev/null +++ b/pymode/libs/astroid/objects.py @@ -0,0 +1,186 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +""" +Inference objects are a way to represent composite AST nodes, +which are used only as inference results, so they can't be found in the +code tree. For instance, inferring the following frozenset use, leads to an +inferred FrozenSet: + + CallFunc(func=Name('frozenset'), args=Tuple(...)) + +""" + +import six + +from astroid import MANAGER +from astroid.bases import ( + BUILTINS, NodeNG, Instance, _infer_stmts, + BoundMethod, _is_property +) +from astroid.decorators import cachedproperty +from astroid.exceptions import ( + SuperError, SuperArgumentTypeError, + NotFoundError, MroError +) +from astroid.node_classes import const_factory +from astroid.scoped_nodes import ClassDef, FunctionDef +from astroid.mixins import ParentAssignTypeMixin + + +class FrozenSet(NodeNG, Instance, ParentAssignTypeMixin): + """class representing a FrozenSet composite node""" + + def __init__(self, elts=None): + if elts is None: + self.elts = [] + else: + self.elts = [const_factory(e) for e in elts] + + def pytype(self): + return '%s.frozenset' % BUILTINS + + def itered(self): + return self.elts + + def _infer(self, context=None): + yield self + + @cachedproperty + def _proxied(self): + builtins = MANAGER.astroid_cache[BUILTINS] + return builtins.getattr('frozenset')[0] + + +class Super(NodeNG): + """Proxy class over a super call. + + This class offers almost the same behaviour as Python's super, + which is MRO lookups for retrieving attributes from the parents. + + The *mro_pointer* is the place in the MRO from where we should + start looking, not counting it. *mro_type* is the object which + provides the MRO, it can be both a type or an instance. + *self_class* is the class where the super call is, while + *scope* is the function where the super call is. + """ + + def __init__(self, mro_pointer, mro_type, self_class, scope): + self.type = mro_type + self.mro_pointer = mro_pointer + self._class_based = False + self._self_class = self_class + self._scope = scope + self._model = { + '__thisclass__': self.mro_pointer, + '__self_class__': self._self_class, + '__self__': self.type, + '__class__': self._proxied, + } + + def _infer(self, context=None): + yield self + + def super_mro(self): + """Get the MRO which will be used to lookup attributes in this super.""" + if not isinstance(self.mro_pointer, ClassDef): + raise SuperArgumentTypeError("The first super argument must be type.") + + if isinstance(self.type, ClassDef): + # `super(type, type)`, most likely in a class method. + self._class_based = True + mro_type = self.type + else: + mro_type = getattr(self.type, '_proxied', None) + if not isinstance(mro_type, (Instance, ClassDef)): + raise SuperArgumentTypeError("super(type, obj): obj must be an " + "instance or subtype of type") + + if not mro_type.newstyle: + raise SuperError("Unable to call super on old-style classes.") + + mro = mro_type.mro() + if self.mro_pointer not in mro: + raise SuperArgumentTypeError("super(type, obj): obj must be an " + "instance or subtype of type") + + index = mro.index(self.mro_pointer) + return mro[index + 1:] + + @cachedproperty + def _proxied(self): + builtins = MANAGER.astroid_cache[BUILTINS] + return builtins.getattr('super')[0] + + def pytype(self): + return '%s.super' % BUILTINS + + def display_type(self): + return 'Super of' + + @property + def name(self): + """Get the name of the MRO pointer.""" + return self.mro_pointer.name + + def igetattr(self, name, context=None): + """Retrieve the inferred values of the given attribute name.""" + + local_name = self._model.get(name) + if local_name: + yield local_name + return + + try: + mro = self.super_mro() + except (MroError, SuperError) as exc: + # Don't let invalid MROs or invalid super calls + # to leak out as is from this function. + six.raise_from(NotFoundError, exc) + + found = False + for cls in mro: + if name not in cls._locals: + continue + + found = True + for infered in _infer_stmts([cls[name]], context, frame=self): + if not isinstance(infered, FunctionDef): + yield infered + continue + + # We can obtain different descriptors from a super depending + # on what we are accessing and where the super call is. + if infered.type == 'classmethod': + yield BoundMethod(infered, cls) + elif self._scope.type == 'classmethod' and infered.type == 'method': + yield infered + elif self._class_based or infered.type == 'staticmethod': + yield infered + elif _is_property(infered): + # TODO: support other descriptors as well. + for value in infered.infer_call_result(self, context): + yield value + else: + yield BoundMethod(infered, cls) + + if not found: + raise NotFoundError(name) + + def getattr(self, name, context=None): + return list(self.igetattr(name, context=context)) diff --git a/pymode/libs/astroid/protocols.py b/pymode/libs/astroid/protocols.py new file mode 100644 index 00000000..87a6d4d2 --- /dev/null +++ b/pymode/libs/astroid/protocols.py @@ -0,0 +1,470 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""this module contains a set of functions to handle python protocols for nodes +where it makes sense. +""" + +import collections +import operator +import sys + +from astroid import arguments +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import node_classes +from astroid import nodes +from astroid import util + +BIN_OP_METHOD = {'+': '__add__', + '-': '__sub__', + '/': '__div__', + '//': '__floordiv__', + '*': '__mul__', + '**': '__pow__', + '%': '__mod__', + '&': '__and__', + '|': '__or__', + '^': '__xor__', + '<<': '__lshift__', + '>>': '__rshift__', + '@': '__matmul__' + } + +UNARY_OP_METHOD = {'+': '__pos__', + '-': '__neg__', + '~': '__invert__', + 'not': None, # XXX not '__nonzero__' + } + +# unary operations ############################################################ + +def tl_infer_unary_op(self, operator): + if operator == 'not': + return node_classes.const_factory(not bool(self.elts)) + raise TypeError() # XXX log unsupported operation +nodes.Tuple.infer_unary_op = tl_infer_unary_op +nodes.List.infer_unary_op = tl_infer_unary_op + + +def dict_infer_unary_op(self, operator): + if operator == 'not': + return node_classes.const_factory(not bool(self.items)) + raise TypeError() # XXX log unsupported operation +nodes.Dict.infer_unary_op = dict_infer_unary_op + + +def const_infer_unary_op(self, operator): + if operator == 'not': + return node_classes.const_factory(not self.value) + # XXX log potentially raised TypeError + elif operator == '+': + return node_classes.const_factory(+self.value) + else: # operator == '-': + return node_classes.const_factory(-self.value) +nodes.Const.infer_unary_op = const_infer_unary_op + + +# binary operations ########################################################### + +BIN_OP_IMPL = {'+': lambda a, b: a + b, + '-': lambda a, b: a - b, + '/': lambda a, b: a / b, + '//': lambda a, b: a // b, + '*': lambda a, b: a * b, + '**': lambda a, b: a ** b, + '%': lambda a, b: a % b, + '&': lambda a, b: a & b, + '|': lambda a, b: a | b, + '^': lambda a, b: a ^ b, + '<<': lambda a, b: a << b, + '>>': lambda a, b: a >> b, + } + +if sys.version_info >= (3, 5): + # MatMult is available since Python 3.5+. + BIN_OP_IMPL['@'] = operator.matmul + +for key, impl in list(BIN_OP_IMPL.items()): + BIN_OP_IMPL[key+'='] = impl + +def const_infer_binary_op(self, binop, other, context): + operator = binop.op + for other in other.infer(context): + if isinstance(other, nodes.Const): + try: + impl = BIN_OP_IMPL[operator] + + try: + yield node_classes.const_factory(impl(self.value, other.value)) + except Exception: + # ArithmeticError is not enough: float >> float is a TypeError + # TODO : let pylint know about the problem + pass + except TypeError: + # XXX log TypeError + continue + elif other is util.YES: + yield other + else: + try: + for val in other.infer_binary_op(binop, self, context): + yield val + except AttributeError: + yield util.YES +nodes.Const.infer_binary_op = bases.yes_if_nothing_inferred(const_infer_binary_op) + + + +def _multiply_seq_by_int(self, binop, other, context): + node = self.__class__() + node.parent = binop + elts = [] + for elt in self.elts: + infered = util.safe_infer(elt, context) + if infered is None: + infered = util.YES + elts.append(infered) + node.elts = elts * other.value + return node + + +def _filter_uninferable_nodes(elts, context): + for elt in elts: + if elt is util.YES: + yield elt + else: + for inferred in elt.infer(context): + yield inferred + + +def tl_infer_binary_op(self, binop, other, context): + operator = binop.op + for other in other.infer(context): + if isinstance(other, self.__class__) and operator == '+': + node = self.__class__() + node.parent = binop + elts = list(_filter_uninferable_nodes(self.elts, context)) + elts += list(_filter_uninferable_nodes(other.elts, context)) + node.elts = elts + yield node + elif isinstance(other, nodes.Const) and operator == '*': + if not isinstance(other.value, int): + yield util.YES + continue + yield _multiply_seq_by_int(self, binop, other, context) + elif isinstance(other, bases.Instance) and not isinstance(other, nodes.Const): + yield util.YES + # XXX else log TypeError +nodes.Tuple.infer_binary_op = bases.yes_if_nothing_inferred(tl_infer_binary_op) +nodes.List.infer_binary_op = bases.yes_if_nothing_inferred(tl_infer_binary_op) + + +def dict_infer_binary_op(self, binop, other, context): + for other in other.infer(context): + if isinstance(other, bases.Instance) and isinstance(other._proxied, nodes.ClassDef): + yield util.YES + # XXX else log TypeError +nodes.Dict.infer_binary_op = bases.yes_if_nothing_inferred(dict_infer_binary_op) + +def instance_infer_binary_op(self, binop, other, context): + operator = binop.op + try: + methods = self.getattr(BIN_OP_METHOD[operator]) + except (exceptions.NotFoundError, KeyError): + # Unknown operator + yield util.YES + else: + for method in methods: + if not isinstance(method, nodes.FunctionDef): + continue + for result in method.infer_call_result(self, context): + if result is not util.YES: + yield result + # We are interested only in the first infered method, + # don't go looking in the rest of the methods of the ancestors. + break + +bases.Instance.infer_binary_op = bases.yes_if_nothing_inferred(instance_infer_binary_op) + + +# assignment ################################################################## + +"""the assigned_stmts method is responsible to return the assigned statement +(e.g. not inferred) according to the assignment type. + +The `asspath` argument is used to record the lhs path of the original node. +For instance if we want assigned statements for 'c' in 'a, (b,c)', asspath +will be [1, 1] once arrived to the Assign node. + +The `context` argument is the current inference context which should be given +to any intermediary inference necessary. +""" + +def _resolve_looppart(parts, asspath, context): + """recursive function to resolve multiple assignments on loops""" + asspath = asspath[:] + index = asspath.pop(0) + for part in parts: + if part is util.YES: + continue + # XXX handle __iter__ and log potentially detected errors + if not hasattr(part, 'itered'): + continue + try: + itered = part.itered() + except TypeError: + continue # XXX log error + for stmt in itered: + try: + assigned = stmt.getitem(index, context) + except (AttributeError, IndexError): + continue + except TypeError: # stmt is unsubscriptable Const + continue + if not asspath: + # we achieved to resolved the assignment path, + # don't infer the last part + yield assigned + elif assigned is util.YES: + break + else: + # we are not yet on the last part of the path + # search on each possibly inferred value + try: + for inferred in _resolve_looppart(assigned.infer(context), + asspath, context): + yield inferred + except exceptions.InferenceError: + break + + +@bases.raise_if_nothing_inferred +def for_assigned_stmts(self, node=None, context=None, asspath=None): + if asspath is None: + for lst in self.iter.infer(context): + if isinstance(lst, (nodes.Tuple, nodes.List)): + for item in lst.elts: + yield item + else: + for inferred in _resolve_looppart(self.iter.infer(context), + asspath, context): + yield inferred + +nodes.For.assigned_stmts = for_assigned_stmts +nodes.Comprehension.assigned_stmts = for_assigned_stmts + + +def sequence_assigned_stmts(self, node=None, context=None, asspath=None): + if asspath is None: + asspath = [] + try: + index = self.elts.index(node) + except ValueError: + util.reraise(exceptions.InferenceError( + 'Tried to retrieve a node {node!r} which does not exist', + node=self, assign_path=asspath, context=context)) + + asspath.insert(0, index) + return self.parent.assigned_stmts(node=self, context=context, asspath=asspath) + +nodes.Tuple.assigned_stmts = sequence_assigned_stmts +nodes.List.assigned_stmts = sequence_assigned_stmts + + +def assend_assigned_stmts(self, node=None, context=None, asspath=None): + return self.parent.assigned_stmts(node=self, context=context) +nodes.AssignName.assigned_stmts = assend_assigned_stmts +nodes.AssignAttr.assigned_stmts = assend_assigned_stmts + + +def _arguments_infer_argname(self, name, context): + # arguments information may be missing, in which case we can't do anything + # more + if not (self.args or self.vararg or self.kwarg): + yield util.YES + return + # first argument of instance/class method + if self.args and getattr(self.args[0], 'name', None) == name: + functype = self.parent.type + if functype == 'method': + yield bases.Instance(self.parent.parent.frame()) + return + if functype == 'classmethod': + yield self.parent.parent.frame() + return + + if context and context.callcontext: + call_site = arguments.CallSite(context.callcontext) + for value in call_site.infer_argument(self.parent, name, context): + yield value + return + + # TODO: just provide the type here, no need to have an empty Dict. + if name == self.vararg: + vararg = node_classes.const_factory(()) + vararg.parent = self + yield vararg + return + if name == self.kwarg: + kwarg = node_classes.const_factory({}) + kwarg.parent = self + yield kwarg + return + # if there is a default value, yield it. And then yield YES to reflect + # we can't guess given argument value + try: + context = contextmod.copy_context(context) + for inferred in self.default_value(name).infer(context): + yield inferred + yield util.YES + except exceptions.NoDefault: + yield util.YES + + +def arguments_assigned_stmts(self, node=None, context=None, asspath=None): + if context.callcontext: + # reset call context/name + callcontext = context.callcontext + context = contextmod.copy_context(context) + context.callcontext = None + args = arguments.CallSite(callcontext) + return args.infer_argument(self.parent, node.name, context) + return _arguments_infer_argname(self, node.name, context) + +nodes.Arguments.assigned_stmts = arguments_assigned_stmts + + +@bases.raise_if_nothing_inferred +def assign_assigned_stmts(self, node=None, context=None, asspath=None): + if not asspath: + yield self.value + return + for inferred in _resolve_asspart(self.value.infer(context), asspath, context): + yield inferred + +nodes.Assign.assigned_stmts = assign_assigned_stmts +nodes.AugAssign.assigned_stmts = assign_assigned_stmts + + +def _resolve_asspart(parts, asspath, context): + """recursive function to resolve multiple assignments""" + asspath = asspath[:] + index = asspath.pop(0) + for part in parts: + if hasattr(part, 'getitem'): + try: + assigned = part.getitem(index, context) + # XXX raise a specific exception to avoid potential hiding of + # unexpected exception ? + except (TypeError, IndexError): + return + if not asspath: + # we achieved to resolved the assignment path, don't infer the + # last part + yield assigned + elif assigned is util.YES: + return + else: + # we are not yet on the last part of the path search on each + # possibly inferred value + try: + for inferred in _resolve_asspart(assigned.infer(context), + asspath, context): + yield inferred + except exceptions.InferenceError: + return + + +@bases.raise_if_nothing_inferred +def excepthandler_assigned_stmts(self, node=None, context=None, asspath=None): + for assigned in node_classes.unpack_infer(self.type): + if isinstance(assigned, nodes.ClassDef): + assigned = bases.Instance(assigned) + yield assigned +nodes.ExceptHandler.assigned_stmts = bases.raise_if_nothing_inferred(excepthandler_assigned_stmts) + + +@bases.raise_if_nothing_inferred +def with_assigned_stmts(self, node=None, context=None, asspath=None): + if asspath is None: + for _, vars in self.items: + if vars is None: + continue + for lst in vars.infer(context): + if isinstance(lst, (nodes.Tuple, nodes.List)): + for item in lst.nodes: + yield item +nodes.With.assigned_stmts = with_assigned_stmts + + +@bases.yes_if_nothing_inferred +def starred_assigned_stmts(self, node=None, context=None, asspath=None): + stmt = self.statement() + if not isinstance(stmt, (nodes.Assign, nodes.For)): + raise exceptions.InferenceError() + + if isinstance(stmt, nodes.Assign): + value = stmt.value + lhs = stmt.targets[0] + + if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1: + # Too many starred arguments in the expression. + raise exceptions.InferenceError() + + if context is None: + context = contextmod.InferenceContext() + try: + rhs = next(value.infer(context)) + except exceptions.InferenceError: + yield util.YES + return + if rhs is util.YES or not hasattr(rhs, 'elts'): + # Not interested in inferred values without elts. + yield util.YES + return + + elts = collections.deque(rhs.elts[:]) + if len(lhs.elts) > len(rhs.elts): + # a, *b, c = (1, 2) + raise exceptions.InferenceError() + + # Unpack iteratively the values from the rhs of the assignment, + # until the find the starred node. What will remain will + # be the list of values which the Starred node will represent + # This is done in two steps, from left to right to remove + # anything before the starred node and from right to left + # to remvoe anything after the starred node. + + for index, node in enumerate(lhs.elts): + if not isinstance(node, nodes.Starred): + elts.popleft() + continue + lhs_elts = collections.deque(reversed(lhs.elts[index:])) + for node in lhs_elts: + if not isinstance(node, nodes.Starred): + elts.pop() + continue + # We're done + packed = nodes.List() + packed.elts = elts + packed.parent = self + yield packed + break + +nodes.Starred.assigned_stmts = starred_assigned_stmts diff --git a/pylibs/logilab/astng/raw_building.py b/pymode/libs/astroid/raw_building.py similarity index 67% rename from pylibs/logilab/astng/raw_building.py rename to pymode/libs/astroid/raw_building.py index 395c26ec..aaaf52f2 100644 --- a/pylibs/logilab/astng/raw_building.py +++ b/pymode/libs/astroid/raw_building.py @@ -1,47 +1,66 @@ -# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# copyright 2003-2010 Sylvain Thenault, all rights reserved. -# contact mailto:thenault@gmail.com # -# This file is part of logilab-astng. +# This file is part of astroid. # -# logilab-astng is free software: you can redistribute it and/or modify it +# astroid is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 2.1 of the License, or (at your # option) any later version. # -# logilab-astng is distributed in the hope that it will be useful, but +# astroid is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License # for more details. # # You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""this module contains a set of functions to create astng trees from scratch +# with astroid. If not, see . +"""this module contains a set of functions to create astroid trees from scratch (build_* functions) or from living object (object_build_* functions) """ -__docformat__ = "restructuredtext en" - import sys +import os from os.path import abspath from inspect import (getargspec, isdatadescriptor, isfunction, ismethod, - ismethoddescriptor, isclass, isbuiltin) + ismethoddescriptor, isclass, isbuiltin, ismodule) +import six -from logilab.astng import BUILTINS_MODULE -from logilab.astng.node_classes import CONST_CLS -from logilab.astng.nodes import (Module, Class, Const, const_factory, From, - Function, EmptyNode, Name, Arguments, Dict, List, Set, Tuple) -from logilab.astng.bases import Generator -from logilab.astng.manager import ASTNGManager -MANAGER = ASTNGManager() +from astroid.node_classes import CONST_CLS +from astroid.nodes import (Module, Class, Const, const_factory, From, + Function, EmptyNode, Name, Arguments) +from astroid.bases import BUILTINS, Generator +from astroid.manager import AstroidManager +MANAGER = AstroidManager() _CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types +_JYTHON = os.name == 'java' +_BUILTINS = vars(six.moves.builtins) + +def _io_discrepancy(member): + # _io module names itself `io`: http://bugs.python.org/issue18602 + member_self = getattr(member, '__self__', None) + return (member_self and + ismodule(member_self) and + member_self.__name__ == '_io' and + member.__module__ == 'io') def _attach_local_node(parent, node, name): node.name = name # needed by add_local_node parent.add_local_node(node) + +def _add_dunder_class(func, member): + """Add a __class__ member to the given func node, if we can determine it.""" + python_cls = member.__class__ + cls_name = getattr(python_cls, '__name__', None) + if not cls_name: + return + bases = [ancestor.__name__ for ancestor in python_cls.__bases__] + ast_klass = build_class(cls_name, bases, python_cls.__doc__) + func._instance_attrs['__class__'] = [ast_klass] + + _marker = object() def attach_dummy_node(node, name, object=_marker): @@ -52,7 +71,10 @@ def attach_dummy_node(node, name, object=_marker): enode.object = object _attach_local_node(node, enode, name) -EmptyNode.has_underlying_object = lambda self: self.object is not _marker +def _has_underlying_object(self): + return hasattr(self, 'object') and self.object is not _marker + +EmptyNode.has_underlying_object = _has_underlying_object def attach_const_node(node, name, value): """create a Const node and register it in the locals of the given @@ -70,14 +92,14 @@ def attach_import_node(node, modname, membername): def build_module(name, doc=None): - """create and initialize a astng Module node""" + """create and initialize a astroid Module node""" node = Module(name, doc, pure_python=False) node.package = False node.parent = None return node def build_class(name, basenames=(), doc=None): - """create and initialize a astng Class node""" + """create and initialize a astroid Class node""" node = Class(name, doc) for base in basenames: basenode = Name() @@ -87,7 +109,7 @@ def build_class(name, basenames=(), doc=None): return node def build_function(name, args=None, defaults=None, flag=0, doc=None): - """create and initialize a astng Function node""" + """create and initialize a astroid Function node""" args, defaults = args or [], defaults or [] # first argument is now a list of decorators func = Function(name, doc) @@ -110,7 +132,7 @@ def build_function(name, args=None, defaults=None, flag=0, doc=None): def build_from_import(fromname, names): - """create and initialize an astng From import statement""" + """create and initialize an astroid From import statement""" return From(fromname, [(name, None) for name in names]) def register_arguments(func, args=None): @@ -132,28 +154,28 @@ def register_arguments(func, args=None): register_arguments(func, arg.elts) def object_build_class(node, member, localname): - """create astng for a living class object""" + """create astroid for a living class object""" basenames = [base.__name__ for base in member.__bases__] return _base_class_object_build(node, member, basenames, localname=localname) def object_build_function(node, member, localname): - """create astng for a living function object""" + """create astroid for a living function object""" args, varargs, varkw, defaults = getargspec(member) if varargs is not None: args.append(varargs) if varkw is not None: args.append(varkw) func = build_function(getattr(member, '__name__', None) or localname, args, - defaults, member.func_code.co_flags, member.__doc__) + defaults, six.get_function_code(member).co_flags, member.__doc__) node.add_local_node(func, localname) def object_build_datadescriptor(node, member, name): - """create astng for a living data descriptor object""" + """create astroid for a living data descriptor object""" return _base_class_object_build(node, member, [], name) def object_build_methoddescriptor(node, member, localname): - """create astng for a living method descriptor object""" + """create astroid for a living method descriptor object""" # FIXME get arguments ? func = build_function(getattr(member, '__name__', None) or localname, doc=member.__doc__) @@ -161,9 +183,10 @@ def object_build_methoddescriptor(node, member, localname): # and empty argument list func.args.args = None node.add_local_node(func, localname) + _add_dunder_class(func, member) def _base_class_object_build(node, member, basenames, name=None, localname=None): - """create astng for a living class object, with a given set of base names + """create astroid for a living class object, with a given set of base names (e.g. ancestors) """ klass = build_class(name or getattr(member, '__name__', None) or localname, @@ -187,7 +210,7 @@ def _base_class_object_build(node, member, basenames, name=None, localname=None) valnode.object = obj valnode.parent = klass valnode.lineno = 1 - klass.instance_attrs[name] = [valnode] + klass._instance_attrs[name] = [valnode] return klass @@ -200,23 +223,28 @@ class InspectBuilder(object): Function and Class nodes and some others as guessed. """ - # astng from living objects ############################################### + # astroid from living objects ############################################### def __init__(self): self._done = {} self._module = None def inspect_build(self, module, modname=None, path=None): - """build astng from a living module (i.e. using inspect) + """build astroid from a living module (i.e. using inspect) this is used when there is no python source code available (either because it's a built-in module or because the .py is not available) """ self._module = module if modname is None: modname = module.__name__ - node = build_module(modname, module.__doc__) - node.file = node.path = path and abspath(path) or path - MANAGER.astng_cache[modname] = node + try: + node = build_module(modname, module.__doc__) + except AttributeError: + # in jython, java modules have no __doc__ (see #109562) + node = build_module(modname) + node.source_file = path and abspath(path) or path + node.name = modname + MANAGER.cache_module(node) node.package = hasattr(module, '__path__') self._done = {} self.object_build(node, module) @@ -237,17 +265,21 @@ def object_build(self, node, obj): attach_dummy_node(node, name) continue if ismethod(member): - member = member.im_func + member = six.get_method_function(member) if isfunction(member): # verify this is not an imported function - if member.func_code.co_filename != getattr(self._module, '__file__', None): + filename = getattr(six.get_function_code(member), + 'co_filename', None) + if filename is None: + assert isinstance(member, object) + object_build_methoddescriptor(node, member, name) + elif filename != getattr(self._module, '__file__', None): attach_dummy_node(node, name, member) - continue - object_build_function(node, member, name) + else: + object_build_function(node, member, name) elif isbuiltin(member): - if self.imported_member(node, member, name): - #if obj is object: - # print 'skippp', obj, name, member + if (not _io_discrepancy(member) and + self.imported_member(node, member, name)): continue object_build_methoddescriptor(node, member, name) elif isclass(member): @@ -255,7 +287,7 @@ def object_build(self, node, obj): continue if member in self._done: class_node = self._done[member] - if not class_node in node.locals.get(name, ()): + if not class_node in node._locals.get(name, ()): node.add_local_node(class_node, name) else: class_node = object_build_class(node, member, name) @@ -269,7 +301,7 @@ def object_build(self, node, obj): elif isdatadescriptor(member): assert isinstance(member, object) object_build_datadescriptor(node, member, name) - elif isinstance(member, _CONSTANTS): + elif type(member) in _CONSTANTS: attach_const_node(node, name, member) else: # create an empty node so that the name is actually defined @@ -284,20 +316,27 @@ def imported_member(self, node, member, name): modname = getattr(member, '__module__', None) except: # XXX use logging - print 'unexpected error while building astng from living object' + print('unexpected error while building astroid from living object') import traceback traceback.print_exc() modname = None if modname is None: - if name in ('__new__', '__subclasshook__'): + if (name in ('__new__', '__subclasshook__') + or (name in _BUILTINS and _JYTHON)): # Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14) # >>> print object.__new__.__module__ # None - modname = BUILTINS_MODULE + modname = BUILTINS else: attach_dummy_node(node, name, member) return True - if {'gtk': 'gtk._gtk'}.get(modname, modname) != self._module.__name__: + + real_name = { + 'gtk': 'gtk_gtk', + '_io': 'io', + }.get(modname, modname) + + if real_name != self._module.__name__: # check if it sounds valid and then add an import node, else use a # dummy node try: @@ -310,28 +349,33 @@ def imported_member(self, node, member, name): return False -### astng boot strapping ################################################### ### +### astroid bootstrapping ###################################################### +Astroid_BUILDER = InspectBuilder() _CONST_PROXY = {} -def astng_boot_strapping(): - """astng boot strapping the builtins module""" +def _astroid_bootstrapping(astroid_builtin=None): + """astroid boot strapping the builtins module""" # this boot strapping is necessary since we need the Const nodes to # inspect_build builtins, and then we can proxy Const - builder = InspectBuilder() - from logilab.common.compat import builtins - astng_builtin = builder.inspect_build(builtins) + if astroid_builtin is None: + from six.moves import builtins + astroid_builtin = Astroid_BUILDER.inspect_build(builtins) + for cls, node_cls in CONST_CLS.items(): if cls is type(None): proxy = build_class('NoneType') - proxy.parent = astng_builtin + proxy.parent = astroid_builtin + elif cls is type(NotImplemented): + proxy = build_class('NotImplementedType') + proxy.parent = astroid_builtin else: - proxy = astng_builtin.getattr(cls.__name__)[0] # XXX + proxy = astroid_builtin.getattr(cls.__name__)[0] if cls in (dict, list, set, tuple): node_cls._proxied = proxy else: _CONST_PROXY[cls] = proxy -astng_boot_strapping() +_astroid_bootstrapping() # TODO : find a nicer way to handle this situation; # However __proxied introduced an @@ -340,6 +384,7 @@ def _set_proxied(const): return _CONST_PROXY[const.value.__class__] Const._proxied = property(_set_proxied) -# FIXME : is it alright that Generator._proxied is not a astng node? -Generator._proxied = MANAGER.infer_astng_from_something(type(a for a in ())) +from types import GeneratorType +Generator._proxied = Class(GeneratorType.__name__, GeneratorType.__doc__) +Astroid_BUILDER.object_build(Generator._proxied, GeneratorType) diff --git a/pymode/libs/astroid/rebuilder.py b/pymode/libs/astroid/rebuilder.py new file mode 100644 index 00000000..859b8280 --- /dev/null +++ b/pymode/libs/astroid/rebuilder.py @@ -0,0 +1,989 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""this module contains utilities for rebuilding a _ast tree in +order to get a single Astroid representation +""" + +import sys +import _ast +from _ast import ( + # binary operators + Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor, + LShift, RShift, + # logical operators + And, Or, + # unary operators + UAdd, USub, Not, Invert, + # comparison operators + Eq, Gt, GtE, In, Is, IsNot, Lt, LtE, NotEq, NotIn, + ) + +from astroid import nodes as new +from astroid import astpeephole + + +_BIN_OP_CLASSES = {Add: '+', + BitAnd: '&', + BitOr: '|', + BitXor: '^', + Div: '/', + FloorDiv: '//', + Mod: '%', + Mult: '*', + Pow: '**', + Sub: '-', + LShift: '<<', + RShift: '>>', + } +if sys.version_info >= (3, 5): + from _ast import MatMult + _BIN_OP_CLASSES[MatMult] = '@' + +_BOOL_OP_CLASSES = {And: 'and', + Or: 'or', + } + +_UNARY_OP_CLASSES = {UAdd: '+', + USub: '-', + Not: 'not', + Invert: '~', + } + +_CMP_OP_CLASSES = {Eq: '==', + Gt: '>', + GtE: '>=', + In: 'in', + Is: 'is', + IsNot: 'is not', + Lt: '<', + LtE: '<=', + NotEq: '!=', + NotIn: 'not in', + } + +CONST_NAME_TRANSFORMS = {'None': None, + 'True': True, + 'False': False, + } + +REDIRECT = {'arguments': 'Arguments', + 'comprehension': 'Comprehension', + "ListCompFor": 'Comprehension', + "GenExprFor": 'Comprehension', + 'excepthandler': 'ExceptHandler', + 'keyword': 'Keyword', + } +PY3K = sys.version_info >= (3, 0) +PY34 = sys.version_info >= (3, 4) + +def _init_set_doc(node, newnode): + newnode.doc = None + try: + if isinstance(node.body[0], _ast.Expr) and isinstance(node.body[0].value, _ast.Str): + newnode.doc = node.body[0].value.s + node.body = node.body[1:] + + except IndexError: + pass # ast built from scratch + +def _lineno_parent(oldnode, newnode, parent): + newnode.parent = parent + newnode.lineno = oldnode.lineno + newnode.col_offset = oldnode.col_offset + +def _set_infos(oldnode, newnode, parent): + newnode.parent = parent + if hasattr(oldnode, 'lineno'): + newnode.lineno = oldnode.lineno + if hasattr(oldnode, 'col_offset'): + newnode.col_offset = oldnode.col_offset + +def _create_yield_node(node, parent, rebuilder, factory): + newnode = factory() + _lineno_parent(node, newnode, parent) + if node.value is not None: + newnode.value = rebuilder.visit(node.value, newnode, None) + return newnode + +def _visit_or_none(node, attr, visitor, parent, assign_ctx, visit='visit', + **kws): + """If the given node has an attribute, visits the attribute, and + otherwise returns None. + + """ + value = getattr(node, attr, None) + if value: + return getattr(visitor, visit)(value, parent, assign_ctx, **kws) + else: + return None + + +class TreeRebuilder(object): + """Rebuilds the _ast tree to become an Astroid tree""" + + def __init__(self, manager): + self._manager = manager + self.asscontext = None + self._global_names = [] + self._import_from_nodes = [] + self._delayed_assattr = [] + self._visit_meths = {} + self._peepholer = astpeephole.ASTPeepholeOptimizer() + + def visit_module(self, node, modname, modpath, package): + """visit a Module node by returning a fresh instance of it""" + newnode = new.Module(modname, None) + newnode.package = package + newnode.parent = None + _init_set_doc(node, newnode) + newnode.body = [self.visit(child, newnode) for child in node.body] + newnode.source_file = modpath + return newnode + + def visit(self, node, parent, assign_ctx=None): + cls = node.__class__ + if cls in self._visit_meths: + visit_method = self._visit_meths[cls] + else: + cls_name = cls.__name__ + visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower() + visit_method = getattr(self, visit_name) + self._visit_meths[cls] = visit_method + return visit_method(node, parent, assign_ctx) + + def _save_assignment(self, node, name=None): + """save assignement situation since node.parent is not available yet""" + if self._global_names and node.name in self._global_names[-1]: + node.root().set_local(node.name, node) + else: + node.parent.set_local(node.name, node) + + def visit_arguments(self, node, parent, assign_ctx=None): + """visit a Arguments node by returning a fresh instance of it""" + newnode = new.Arguments() + newnode.parent = parent + newnode.args = [self.visit(child, newnode, "Assign") + for child in node.args] + newnode.defaults = [self.visit(child, newnode, assign_ctx) + for child in node.defaults] + newnode.kwonlyargs = [] + newnode.kw_defaults = [] + vararg, kwarg = node.vararg, node.kwarg + # change added in 82732 (7c5c678e4164), vararg and kwarg + # are instances of `_ast.arg`, not strings + if vararg: + if PY34: + if vararg.annotation: + newnode.varargannotation = self.visit(vararg.annotation, + newnode, assign_ctx) + vararg = vararg.arg + elif PY3K and node.varargannotation: + newnode.varargannotation = self.visit(node.varargannotation, + newnode, assign_ctx) + if kwarg: + if PY34: + if kwarg.annotation: + newnode.kwargannotation = self.visit(kwarg.annotation, + newnode, assign_ctx) + kwarg = kwarg.arg + elif PY3K: + if node.kwargannotation: + newnode.kwargannotation = self.visit(node.kwargannotation, + newnode, assign_ctx) + newnode.vararg = vararg + newnode.kwarg = kwarg + # save argument names in locals: + if vararg: + newnode.parent.set_local(vararg, newnode) + if kwarg: + newnode.parent.set_local(kwarg, newnode) + return newnode + + def visit_assignattr(self, node, parent, assign_ctx=None): + """visit a AssAttr node by returning a fresh instance of it""" + newnode = new.AssignAttr() + _lineno_parent(node, newnode, parent) + newnode.expr = self.visit(node.expr, newnode, assign_ctx) + self._delayed_assattr.append(newnode) + return newnode + + def visit_assert(self, node, parent, assign_ctx=None): + """visit a Assert node by returning a fresh instance of it""" + newnode = new.Assert() + _lineno_parent(node, newnode, parent) + newnode.test = self.visit(node.test, newnode, assign_ctx) + if node.msg is not None: + newnode.fail = self.visit(node.msg, newnode, assign_ctx) + return newnode + + def visit_assign(self, node, parent, assign_ctx=None): + """visit a Assign node by returning a fresh instance of it""" + newnode = new.Assign() + _lineno_parent(node, newnode, parent) + newnode.targets = [self.visit(child, newnode, "Assign") + for child in node.targets] + newnode.value = self.visit(node.value, newnode, None) + return newnode + + def visit_assignname(self, node, parent, assign_ctx=None, node_name=None): + '''visit a node and return a AssName node''' + newnode = new.AssignName() + _set_infos(node, newnode, parent) + newnode.name = node_name + self._save_assignment(newnode) + return newnode + + def visit_augassign(self, node, parent, assign_ctx=None): + """visit a AugAssign node by returning a fresh instance of it""" + newnode = new.AugAssign() + _lineno_parent(node, newnode, parent) + newnode.op = _BIN_OP_CLASSES[node.op.__class__] + "=" + newnode.target = self.visit(node.target, newnode, "Assign") + newnode.value = self.visit(node.value, newnode, None) + return newnode + + def visit_repr(self, node, parent, assign_ctx=None): + """visit a Backquote node by returning a fresh instance of it""" + newnode = new.Repr() + _lineno_parent(node, newnode, parent) + newnode.value = self.visit(node.value, newnode, assign_ctx) + return newnode + + def visit_binop(self, node, parent, assign_ctx=None): + """visit a BinOp node by returning a fresh instance of it""" + if isinstance(node.left, _ast.BinOp) and self._manager.optimize_ast: + # Optimize BinOp operations in order to remove + # redundant recursion. For instance, if the + # following code is parsed in order to obtain + # its ast, then the rebuilder will fail with an + # infinite recursion, the same will happen with the + # inference engine as well. There's no need to hold + # so many objects for the BinOp if they can be reduced + # to something else (also, the optimization + # might handle only Const binops, which isn't a big + # problem for the correctness of the program). + # + # ("a" + "b" + # one thousand more + "c") + optimized = self._peepholer.optimize_binop(node) + if optimized: + _lineno_parent(node, optimized, parent) + return optimized + + newnode = new.BinOp() + _lineno_parent(node, newnode, parent) + newnode.left = self.visit(node.left, newnode, assign_ctx) + newnode.right = self.visit(node.right, newnode, assign_ctx) + newnode.op = _BIN_OP_CLASSES[node.op.__class__] + return newnode + + def visit_boolop(self, node, parent, assign_ctx=None): + """visit a BoolOp node by returning a fresh instance of it""" + newnode = new.BoolOp() + _lineno_parent(node, newnode, parent) + newnode.values = [self.visit(child, newnode, assign_ctx) + for child in node.values] + newnode.op = _BOOL_OP_CLASSES[node.op.__class__] + return newnode + + def visit_break(self, node, parent, assign_ctx=None): + """visit a Break node by returning a fresh instance of it""" + newnode = new.Break() + _set_infos(node, newnode, parent) + return newnode + + def visit_call(self, node, parent, assign_ctx=None): + """visit a CallFunc node by returning a fresh instance of it""" + newnode = new.Call() + _lineno_parent(node, newnode, parent) + newnode.func = self.visit(node.func, newnode, assign_ctx) + args = [self.visit(child, newnode, assign_ctx) + for child in node.args] + + starargs = _visit_or_none(node, 'starargs', self, newnode, + assign_ctx) + kwargs = _visit_or_none(node, 'kwargs', self, newnode, + assign_ctx) + keywords = None + if node.keywords: + keywords = [self.visit(child, newnode, assign_ctx) + for child in node.keywords] + + if starargs: + new_starargs = new.Starred() + new_starargs.col_offset = starargs.col_offset + new_starargs.lineno = starargs.lineno + new_starargs.parent = starargs.parent + new_starargs.value = starargs + args.append(new_starargs) + if kwargs: + new_kwargs = new.Keyword() + new_kwargs.arg = None + new_kwargs.col_offset = kwargs.col_offset + new_kwargs.lineno = kwargs.lineno + new_kwargs.parent = kwargs.parent + new_kwargs.value = kwargs + if keywords: + keywords.append(new_kwargs) + else: + keywords = [new_kwargs] + + newnode.args = args + newnode.keywords = keywords + return newnode + + def visit_classdef(self, node, parent, assign_ctx=None): + """visit a Class node to become astroid""" + newnode = new.ClassDef(node.name, None) + _lineno_parent(node, newnode, parent) + _init_set_doc(node, newnode) + newnode.bases = [self.visit(child, newnode, assign_ctx) + for child in node.bases] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + if node.decorator_list: + newnode.decorators = self.visit_decorators(node, newnode, assign_ctx) + newnode.parent.frame().set_local(newnode.name, newnode) + return newnode + + def visit_const(self, node, parent, assign_ctx=None): + """visit a Const node by returning a fresh instance of it""" + newnode = new.Const(node.value) + _set_infos(node, newnode, parent) + return newnode + + def visit_continue(self, node, parent, assign_ctx=None): + """visit a Continue node by returning a fresh instance of it""" + newnode = new.Continue() + _set_infos(node, newnode, parent) + return newnode + + def visit_compare(self, node, parent, assign_ctx=None): + """visit a Compare node by returning a fresh instance of it""" + newnode = new.Compare() + _lineno_parent(node, newnode, parent) + newnode.left = self.visit(node.left, newnode, assign_ctx) + newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode, assign_ctx)) + for (op, expr) in zip(node.ops, node.comparators)] + return newnode + + def visit_comprehension(self, node, parent, assign_ctx=None): + """visit a Comprehension node by returning a fresh instance of it""" + newnode = new.Comprehension() + newnode.parent = parent + newnode.target = self.visit(node.target, newnode, 'Assign') + newnode.iter = self.visit(node.iter, newnode, None) + newnode.ifs = [self.visit(child, newnode, None) + for child in node.ifs] + return newnode + + def visit_decorators(self, node, parent, assign_ctx=None): + """visit a Decorators node by returning a fresh instance of it""" + # /!\ node is actually a _ast.Function node while + # parent is a astroid.nodes.Function node + newnode = new.Decorators() + _lineno_parent(node, newnode, parent) + decorators = node.decorator_list + newnode.nodes = [self.visit(child, newnode, assign_ctx) + for child in decorators] + return newnode + + def visit_delete(self, node, parent, assign_ctx=None): + """visit a Delete node by returning a fresh instance of it""" + newnode = new.Delete() + _lineno_parent(node, newnode, parent) + newnode.targets = [self.visit(child, newnode, 'Del') + for child in node.targets] + return newnode + + def _visit_dict_items(self, node, parent, newnode, assign_ctx): + for key, value in zip(node.keys, node.values): + rebuilt_value = self.visit(value, newnode, assign_ctx) + if not key: + # Python 3.5 and extended unpacking + rebuilt_key = new.DictUnpack() + rebuilt_key.lineno = rebuilt_value.lineno + rebuilt_key.col_offset = rebuilt_value.col_offset + rebuilt_key.parent = rebuilt_value.parent + else: + rebuilt_key = self.visit(key, newnode, assign_ctx) + yield rebuilt_key, rebuilt_value + + def visit_dict(self, node, parent, assign_ctx=None): + """visit a Dict node by returning a fresh instance of it""" + newnode = new.Dict() + _lineno_parent(node, newnode, parent) + newnode.items = list(self._visit_dict_items(node, parent, newnode, assign_ctx)) + return newnode + + def visit_dictcomp(self, node, parent, assign_ctx=None): + """visit a DictComp node by returning a fresh instance of it""" + newnode = new.DictComp() + _lineno_parent(node, newnode, parent) + newnode.key = self.visit(node.key, newnode, assign_ctx) + newnode.value = self.visit(node.value, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) + for child in node.generators] + return newnode + + def visit_expr(self, node, parent, assign_ctx=None): + """visit a Discard node by returning a fresh instance of it""" + newnode = new.Expr() + _lineno_parent(node, newnode, parent) + newnode.value = self.visit(node.value, newnode, assign_ctx) + return newnode + + def visit_ellipsis(self, node, parent, assign_ctx=None): + """visit an Ellipsis node by returning a fresh instance of it""" + newnode = new.Ellipsis() + _set_infos(node, newnode, parent) + return newnode + + def visit_emptynode(self, node, parent, assign_ctx=None): + """visit an EmptyNode node by returning a fresh instance of it""" + newnode = new.EmptyNode() + _set_infos(node, newnode, parent) + return newnode + + def visit_excepthandler(self, node, parent, assign_ctx=None): + """visit an ExceptHandler node by returning a fresh instance of it""" + newnode = new.ExceptHandler() + _lineno_parent(node, newnode, parent) + if node.type is not None: + newnode.type = self.visit(node.type, newnode, assign_ctx) + if node.name is not None: + # /!\ node.name can be a tuple + newnode.name = self.visit(node.name, newnode, 'Assign') + newnode.body = [self.visit(child, newnode, None) + for child in node.body] + return newnode + + def visit_exec(self, node, parent, assign_ctx=None): + """visit an Exec node by returning a fresh instance of it""" + newnode = new.Exec() + _lineno_parent(node, newnode, parent) + newnode.expr = self.visit(node.body, newnode) + if node.globals is not None: + newnode.globals = self.visit(node.globals, newnode, + assign_ctx) + if node.locals is not None: + newnode.locals = self.visit(node.locals, newnode, + assign_ctx) + return newnode + + def visit_extslice(self, node, parent, assign_ctx=None): + """visit an ExtSlice node by returning a fresh instance of it""" + newnode = new.ExtSlice() + newnode.parent = parent + newnode.dims = [self.visit(dim, newnode, assign_ctx) + for dim in node.dims] + return newnode + + def _visit_for(self, cls, node, parent, assign_ctx=None): + """visit a For node by returning a fresh instance of it""" + newnode = cls() + _lineno_parent(node, newnode, parent) + newnode.target = self.visit(node.target, newnode, "Assign") + newnode.iter = self.visit(node.iter, newnode, None) + newnode.body = [self.visit(child, newnode, None) + for child in node.body] + newnode.orelse = [self.visit(child, newnode, None) + for child in node.orelse] + return newnode + + def visit_for(self, node, parent, assign_ctx=None): + return self._visit_for(new.For, node, parent, + assign_ctx=assign_ctx) + def visit_importfrom(self, node, parent, assign_ctx=None): + """visit a From node by returning a fresh instance of it""" + names = [(alias.name, alias.asname) for alias in node.names] + newnode = new.ImportFrom(node.module or '', names, node.level or None) + _set_infos(node, newnode, parent) + # store From names to add them to locals after building + self._import_from_nodes.append(newnode) + return newnode + + def _visit_functiondef(self, cls, node, parent, assign_ctx=None): + """visit an FunctionDef node to become astroid""" + self._global_names.append({}) + newnode = cls(node.name, None) + _lineno_parent(node, newnode, parent) + _init_set_doc(node, newnode) + newnode.args = self.visit(node.args, newnode, assign_ctx) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + decorators = node.decorator_list + if decorators: + newnode.decorators = self.visit_decorators( + node, newnode, assign_ctx) + if PY3K and node.returns: + newnode.returns = self.visit(node.returns, newnode, + assign_ctx) + self._global_names.pop() + frame = newnode.parent.frame() + frame.set_local(newnode.name, newnode) + return newnode + + def visit_functiondef(self, node, parent, assign_ctx=None): + return self._visit_functiondef(new.FunctionDef, node, parent, + assign_ctx=assign_ctx) + + def visit_generatorexp(self, node, parent, assign_ctx=None): + """visit a GenExpr node by returning a fresh instance of it""" + newnode = new.GeneratorExp() + _lineno_parent(node, newnode, parent) + newnode.elt = self.visit(node.elt, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) + for child in node.generators] + return newnode + + def visit_attribute(self, node, parent, assign_ctx=None): + """visit a Getattr node by returning a fresh instance of it""" + # pylint: disable=redefined-variable-type + if assign_ctx == "Del": + # FIXME : maybe we should reintroduce and visit_delattr ? + # for instance, deactivating asscontext + newnode = new.DelAttr() + elif assign_ctx == "Assign": + # FIXME : maybe we should call visit_assattr ? + # Prohibit a local save if we are in an ExceptHandler. + newnode = new.AssignAttr() + if not isinstance(parent, new.ExceptHandler): + self._delayed_assattr.append(newnode) + else: + newnode = new.Attribute() + _lineno_parent(node, newnode, parent) + newnode.expr = self.visit(node.value, newnode, None) + newnode.attrname = node.attr + return newnode + + def visit_global(self, node, parent, assign_ctx=None): + """visit an Global node to become astroid""" + newnode = new.Global(node.names) + _set_infos(node, newnode, parent) + if self._global_names: # global at the module level, no effect + for name in node.names: + self._global_names[-1].setdefault(name, []).append(newnode) + return newnode + + def visit_if(self, node, parent, assign_ctx=None): + """visit a If node by returning a fresh instance of it""" + newnode = new.If() + _lineno_parent(node, newnode, parent) + newnode.test = self.visit(node.test, newnode, assign_ctx) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] + return newnode + + def visit_ifexp(self, node, parent, assign_ctx=None): + """visit a IfExp node by returning a fresh instance of it""" + newnode = new.IfExp() + _lineno_parent(node, newnode, parent) + newnode.test = self.visit(node.test, newnode, assign_ctx) + newnode.body = self.visit(node.body, newnode, assign_ctx) + newnode.orelse = self.visit(node.orelse, newnode, assign_ctx) + return newnode + + def visit_import(self, node, parent, assign_ctx=None): + """visit a Import node by returning a fresh instance of it""" + newnode = new.Import() + _set_infos(node, newnode, parent) + newnode.names = [(alias.name, alias.asname) for alias in node.names] + # save import names in parent's locals: + for (name, asname) in newnode.names: + name = asname or name + newnode.parent.set_local(name.split('.')[0], newnode) + return newnode + + def visit_index(self, node, parent, assign_ctx=None): + """visit a Index node by returning a fresh instance of it""" + newnode = new.Index() + newnode.parent = parent + newnode.value = self.visit(node.value, newnode, assign_ctx) + return newnode + + def visit_keyword(self, node, parent, assign_ctx=None): + """visit a Keyword node by returning a fresh instance of it""" + newnode = new.Keyword() + newnode.parent = parent + newnode.arg = node.arg + newnode.value = self.visit(node.value, newnode, assign_ctx) + return newnode + + def visit_lambda(self, node, parent, assign_ctx=None): + """visit a Lambda node by returning a fresh instance of it""" + newnode = new.Lambda() + _lineno_parent(node, newnode, parent) + newnode.args = self.visit(node.args, newnode, assign_ctx) + newnode.body = self.visit(node.body, newnode, assign_ctx) + return newnode + + def visit_list(self, node, parent, assign_ctx=None): + """visit a List node by returning a fresh instance of it""" + newnode = new.List() + _lineno_parent(node, newnode, parent) + newnode.elts = [self.visit(child, newnode, assign_ctx) + for child in node.elts] + return newnode + + def visit_listcomp(self, node, parent, assign_ctx=None): + """visit a ListComp node by returning a fresh instance of it""" + newnode = new.ListComp() + _lineno_parent(node, newnode, parent) + newnode.elt = self.visit(node.elt, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) + for child in node.generators] + return newnode + + def visit_name(self, node, parent, assign_ctx=None): + """visit a Name node by returning a fresh instance of it""" + # True and False can be assigned to something in py2x, so we have to + # check first the asscontext + # pylint: disable=redefined-variable-type + if assign_ctx == "Del": + newnode = new.DelName() + elif assign_ctx is not None: # Ass + newnode = new.AssName() + elif node.id in CONST_NAME_TRANSFORMS: + newnode = new.Const(CONST_NAME_TRANSFORMS[node.id]) + _set_infos(node, newnode, parent) + return newnode + else: + newnode = new.Name() + _lineno_parent(node, newnode, parent) + newnode.name = node.id + # XXX REMOVE me : + if assign_ctx in ('Del', 'Assign'): # 'Aug' ?? + self._save_assignment(newnode) + return newnode + + def visit_bytes(self, node, parent, assign_ctx=None): + """visit a Bytes node by returning a fresh instance of Const""" + newnode = new.Const(node.s) + _set_infos(node, newnode, parent) + return newnode + + def visit_num(self, node, parent, assign_ctx=None): + """visit a Num node by returning a fresh instance of Const""" + newnode = new.Const(node.n) + _set_infos(node, newnode, parent) + return newnode + + def visit_pass(self, node, parent, assign_ctx=None): + """visit a Pass node by returning a fresh instance of it""" + newnode = new.Pass() + _set_infos(node, newnode, parent) + return newnode + + def visit_str(self, node, parent, assign_ctx=None): + """visit a Str node by returning a fresh instance of Const""" + newnode = new.Const(node.s) + _set_infos(node, newnode, parent) + return newnode + + def visit_print(self, node, parent, assign_ctx=None): + """visit a Print node by returning a fresh instance of it""" + newnode = new.Print() + _lineno_parent(node, newnode, parent) + newnode.nl = node.nl + if node.dest is not None: + newnode.dest = self.visit(node.dest, newnode, assign_ctx) + newnode.values = [self.visit(child, newnode, assign_ctx) + for child in node.values] + return newnode + + def visit_raise(self, node, parent, assign_ctx=None): + """visit a Raise node by returning a fresh instance of it""" + newnode = new.Raise() + _lineno_parent(node, newnode, parent) + if node.type is not None: + newnode.exc = self.visit(node.type, newnode, assign_ctx) + if node.inst is not None: + newnode.inst = self.visit(node.inst, newnode, assign_ctx) + if node.tback is not None: + newnode.tback = self.visit(node.tback, newnode, assign_ctx) + return newnode + + def visit_return(self, node, parent, assign_ctx=None): + """visit a Return node by returning a fresh instance of it""" + newnode = new.Return() + _lineno_parent(node, newnode, parent) + if node.value is not None: + newnode.value = self.visit(node.value, newnode, assign_ctx) + return newnode + + def visit_set(self, node, parent, assign_ctx=None): + """visit a Set node by returning a fresh instance of it""" + newnode = new.Set() + _lineno_parent(node, newnode, parent) + newnode.elts = [self.visit(child, newnode, assign_ctx) + for child in node.elts] + return newnode + + def visit_setcomp(self, node, parent, assign_ctx=None): + """visit a SetComp node by returning a fresh instance of it""" + newnode = new.SetComp() + _lineno_parent(node, newnode, parent) + newnode.elt = self.visit(node.elt, newnode, assign_ctx) + newnode.generators = [self.visit(child, newnode, assign_ctx) + for child in node.generators] + return newnode + + def visit_slice(self, node, parent, assign_ctx=None): + """visit a Slice node by returning a fresh instance of it""" + newnode = new.Slice() + newnode.parent = parent + if node.lower is not None: + newnode.lower = self.visit(node.lower, newnode, assign_ctx) + if node.upper is not None: + newnode.upper = self.visit(node.upper, newnode, assign_ctx) + if node.step is not None: + newnode.step = self.visit(node.step, newnode, assign_ctx) + return newnode + + def visit_subscript(self, node, parent, assign_ctx=None): + """visit a Subscript node by returning a fresh instance of it""" + newnode = new.Subscript() + _lineno_parent(node, newnode, parent) + newnode.value = self.visit(node.value, newnode, None) + newnode.slice = self.visit(node.slice, newnode, None) + return newnode + + def visit_tryexcept(self, node, parent, assign_ctx=None): + """visit a TryExcept node by returning a fresh instance of it""" + newnode = new.TryExcept() + _lineno_parent(node, newnode, parent) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.handlers = [self.visit(child, newnode, assign_ctx) + for child in node.handlers] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] + return newnode + + def visit_tryfinally(self, node, parent, assign_ctx=None): + """visit a TryFinally node by returning a fresh instance of it""" + newnode = new.TryFinally() + _lineno_parent(node, newnode, parent) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.finalbody = [self.visit(n, newnode, assign_ctx) + for n in node.finalbody] + return newnode + + def visit_tuple(self, node, parent, assign_ctx=None): + """visit a Tuple node by returning a fresh instance of it""" + newnode = new.Tuple() + _lineno_parent(node, newnode, parent) + newnode.elts = [self.visit(child, newnode, assign_ctx) + for child in node.elts] + return newnode + + def visit_unaryop(self, node, parent, assign_ctx=None): + """visit a UnaryOp node by returning a fresh instance of it""" + newnode = new.UnaryOp() + _lineno_parent(node, newnode, parent) + newnode.operand = self.visit(node.operand, newnode, assign_ctx) + newnode.op = _UNARY_OP_CLASSES[node.op.__class__] + return newnode + + def visit_while(self, node, parent, assign_ctx=None): + """visit a While node by returning a fresh instance of it""" + newnode = new.While() + _lineno_parent(node, newnode, parent) + newnode.test = self.visit(node.test, newnode, assign_ctx) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] + return newnode + + def visit_with(self, node, parent, assign_ctx=None): + newnode = new.With() + _lineno_parent(node, newnode, parent) + expr = self.visit(node.context_expr, newnode, assign_ctx) + if node.optional_vars is not None: + vars = self.visit(node.optional_vars, newnode, 'Assign') + else: + vars = None + self.asscontext = None + newnode.items = [(expr, vars)] + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + return newnode + + def visit_yield(self, node, parent, assign_ctx=None): + """visit a Yield node by returning a fresh instance of it""" + return _create_yield_node(node, parent, self, new.Yield) + +class TreeRebuilder3k(TreeRebuilder): + """extend and overwrite TreeRebuilder for python3k""" + + def visit_arg(self, node, parent, assign_ctx=None): + """visit a arg node by returning a fresh AssName instance""" + # TODO(cpopa): introduce an Arg node instead of using AssignName. + return self.visit_assignname(node, parent, assign_ctx, node.arg) + + def visit_nameconstant(self, node, parent, assign_ctx=None): + # in Python 3.4 we have NameConstant for True / False / None + newnode = new.Const(node.value) + _set_infos(node, newnode, parent) + return newnode + + def visit_arguments(self, node, parent, assign_ctx=None): + newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent, assign_ctx) + newnode.kwonlyargs = [self.visit(child, newnode, 'Assign') + for child in node.kwonlyargs] + newnode.kw_defaults = [self.visit(child, newnode, None) + if child else None for child in node.kw_defaults] + newnode.annotations = [ + self.visit(arg.annotation, newnode, None) if arg.annotation else None + for arg in node.args] + return newnode + + def visit_excepthandler(self, node, parent, assign_ctx=None): + """visit an ExceptHandler node by returning a fresh instance of it""" + newnode = new.ExceptHandler() + _lineno_parent(node, newnode, parent) + if node.type is not None: + newnode.type = self.visit(node.type, newnode, assign_ctx) + if node.name is not None: + newnode.name = self.visit_assignname(node, newnode, 'Assign', node.name) + newnode.body = [self.visit(child, newnode, None) + for child in node.body] + return newnode + + def visit_nonlocal(self, node, parent, assign_ctx=None): + """visit a Nonlocal node and return a new instance of it""" + newnode = new.Nonlocal(node.names) + _set_infos(node, newnode, parent) + return newnode + + def visit_raise(self, node, parent, assign_ctx=None): + """visit a Raise node by returning a fresh instance of it""" + newnode = new.Raise() + _lineno_parent(node, newnode, parent) + # no traceback; anyway it is not used in Pylint + if node.exc is not None: + newnode.exc = self.visit(node.exc, newnode, assign_ctx) + if node.cause is not None: + newnode.cause = self.visit(node.cause, newnode, assign_ctx) + return newnode + + def visit_starred(self, node, parent, assign_ctx=None): + """visit a Starred node and return a new instance of it""" + newnode = new.Starred() + _lineno_parent(node, newnode, parent) + newnode.value = self.visit(node.value, newnode, assign_ctx) + return newnode + + def visit_try(self, node, parent, assign_ctx=None): + # python 3.3 introduce a new Try node replacing TryFinally/TryExcept nodes + # pylint: disable=redefined-variable-type + if node.finalbody: + newnode = new.TryFinally() + _lineno_parent(node, newnode, parent) + newnode.finalbody = [self.visit(n, newnode, assign_ctx) + for n in node.finalbody] + if node.handlers: + excnode = new.TryExcept() + _lineno_parent(node, excnode, newnode) + excnode.body = [self.visit(child, excnode, assign_ctx) + for child in node.body] + excnode.handlers = [self.visit(child, excnode, assign_ctx) + for child in node.handlers] + excnode.orelse = [self.visit(child, excnode, assign_ctx) + for child in node.orelse] + newnode.body = [excnode] + else: + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + elif node.handlers: + newnode = new.TryExcept() + _lineno_parent(node, newnode, parent) + newnode.body = [self.visit(child, newnode, assign_ctx) + for child in node.body] + newnode.handlers = [self.visit(child, newnode, assign_ctx) + for child in node.handlers] + newnode.orelse = [self.visit(child, newnode, assign_ctx) + for child in node.orelse] + return newnode + + def _visit_with(self, cls, node, parent, assign_ctx=None): + if 'items' not in node._fields: + # python < 3.3 + return super(TreeRebuilder3k, self).visit_with(node, parent, + assign_ctx) + + newnode = cls() + _lineno_parent(node, newnode, parent) + def visit_child(child): + expr = self.visit(child.context_expr, newnode) + if child.optional_vars: + var = self.visit(child.optional_vars, newnode, + 'Assign') + else: + var = None + return expr, var + newnode.items = [visit_child(child) + for child in node.items] + newnode.body = [self.visit(child, newnode, None) + for child in node.body] + return newnode + + def visit_with(self, node, parent, assign_ctx=None): + return self._visit_with(new.With, node, parent, assign_ctx=assign_ctx) + + def visit_yieldfrom(self, node, parent, assign_ctx=None): + return _create_yield_node(node, parent, self, new.YieldFrom) + + def visit_classdef(self, node, parent, assign_ctx=None): + newnode = super(TreeRebuilder3k, self).visit_classdef(node, parent, assign_ctx) + newnode._newstyle = True + for keyword in node.keywords: + if keyword.arg == 'metaclass': + newnode._metaclass = self.visit(keyword, newnode, assign_ctx).value + break + return newnode + + # Async structs added in Python 3.5 + def visit_asyncfunctiondef(self, node, parent, assign_ctx=None): + return self._visit_functiondef(new.AsyncFunctionDef, node, parent, + assign_ctx=assign_ctx) + + + def visit_asyncfor(self, node, parent, assign_ctx=None): + return self._visit_for(new.AsyncFor, node, parent, + assign_ctx=assign_ctx) + + def visit_await(self, node, parent, assign_ctx=None): + newnode = new.Await() + newnode.lineno = node.lineno + newnode.col_offset = node.col_offset + newnode.parent = parent + newnode.value = self.visit(node.value, newnode, None) + return newnode + + def visit_asyncwith(self, node, parent, assign_ctx=None): + return self._visit_with(new.AsyncWith, node, parent, + assign_ctx=assign_ctx) + + +if sys.version_info >= (3, 0): + TreeRebuilder = TreeRebuilder3k diff --git a/pymode/libs/astroid/scoped_nodes.py b/pymode/libs/astroid/scoped_nodes.py new file mode 100644 index 00000000..d78d1510 --- /dev/null +++ b/pymode/libs/astroid/scoped_nodes.py @@ -0,0 +1,1716 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +""" +This module contains the classes for "scoped" node, i.e. which are opening a +new local scope in the language definition : Module, ClassDef, FunctionDef (and +Lambda, GeneratorExp, DictComp and SetComp to some extent). +""" + +import io +import itertools +import warnings + +import six +import wrapt + +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import manager +from astroid import mixins +from astroid import node_classes +from astroid import decorators as decorators_mod +from astroid import util + + +BUILTINS = six.moves.builtins.__name__ +ITER_METHODS = ('__iter__', '__getitem__') + + +def _c3_merge(sequences): + """Merges MROs in *sequences* to a single MRO using the C3 algorithm. + + Adapted from http://www.python.org/download/releases/2.3/mro/. + + """ + result = [] + while True: + sequences = [s for s in sequences if s] # purge empty sequences + if not sequences: + return result + for s1 in sequences: # find merge candidates among seq heads + candidate = s1[0] + for s2 in sequences: + if candidate in s2[1:]: + candidate = None + break # reject the current head, it appears later + else: + break + if not candidate: + # Show all the remaining bases, which were considered as + # candidates for the next mro sequence. + bases = ["({})".format(", ".join(base.name + for base in subsequence)) + for subsequence in sequences] + raise exceptions.InconsistentMroError( + "Cannot create a consistent method resolution " + "order for bases %s" % ", ".join(bases)) + + result.append(candidate) + # remove the chosen candidate + for seq in sequences: + if seq[0] == candidate: + del seq[0] + + +def _verify_duplicates_mro(sequences): + for sequence in sequences: + names = [node.qname() for node in sequence] + if len(names) != len(set(names)): + raise exceptions.DuplicateBasesError('Duplicates found in the mro.') + + +def remove_nodes(cls): + @wrapt.decorator + def decorator(func, instance, args, kwargs): + nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)] + if not nodes: + raise exceptions.NotFoundError() + return nodes + return decorator + + +def function_to_method(n, klass): + if isinstance(n, FunctionDef): + if n.type == 'classmethod': + return bases.BoundMethod(n, klass) + if n.type != 'staticmethod': + return bases.UnboundMethod(n) + return n + + +def std_special_attributes(self, name, add_locals=True): + if add_locals: + locals = self._locals + else: + locals = {} + if name == '__name__': + return [node_classes.const_factory(self.name)] + locals.get(name, []) + if name == '__doc__': + return [node_classes.const_factory(self.doc)] + locals.get(name, []) + if name == '__dict__': + return [node_classes.Dict()] + locals.get(name, []) + raise exceptions.NotFoundError(name) + + +MANAGER = manager.AstroidManager() +def builtin_lookup(name): + """lookup a name into the builtin module + return the list of matching statements and the astroid for the builtin + module + """ + builtin_astroid = MANAGER.ast_from_module(six.moves.builtins) + if name == '__dict__': + return builtin_astroid, () + try: + stmts = builtin_astroid._locals[name] + except KeyError: + stmts = () + return builtin_astroid, stmts + + +# TODO move this Mixin to mixins.py; problem: 'FunctionDef' in _scope_lookup +class LocalsDictNodeNG(node_classes.LookupMixIn, bases.NodeNG): + """ this class provides locals handling common to Module, FunctionDef + and ClassDef nodes, including a dict like interface for direct access + to locals information + """ + + # attributes below are set by the builder module or by raw factories + + # dictionary of locals with name as key and node defining the local as + # value + @property + def locals(self): + util.attribute_to_function_warning('locals', 2.0, 'get_locals') + return self._locals + @locals.setter + def locals(self, _locals): + util.attribute_to_function_warning('locals', 2.0, 'get_locals') + self._locals = _locals + @locals.deleter + def locals(self): + util.attribute_to_function_warning('locals', 2.0, 'get_locals') + del self._locals + + def qname(self): + """return the 'qualified' name of the node, eg module.name, + module.class.name ... + """ + if self.parent is None: + return self.name + return '%s.%s' % (self.parent.frame().qname(), self.name) + + def frame(self): + """return the first parent frame node (i.e. Module, FunctionDef or ClassDef) + """ + return self + + def scope(self): + """return the first node defining a new scope (i.e. Module, + FunctionDef, ClassDef, Lambda but also GeneratorExp, DictComp and SetComp) + """ + return self + + def _scope_lookup(self, node, name, offset=0): + """XXX method for interfacing the scope lookup""" + try: + stmts = node._filter_stmts(self._locals[name], self, offset) + except KeyError: + stmts = () + if stmts: + return self, stmts + if self.parent: # i.e. not Module + # nested scope: if parent scope is a function, that's fine + # else jump to the module + pscope = self.parent.scope() + if not pscope.is_function: + pscope = pscope.root() + return pscope.scope_lookup(node, name) + return builtin_lookup(name) # Module + + def set_local(self, name, stmt): + """define in locals ( is the node defining the name) + if the node is a Module node (i.e. has globals), add the name to + globals + + if the name is already defined, ignore it + """ + #assert not stmt in self._locals.get(name, ()), (self, stmt) + self._locals.setdefault(name, []).append(stmt) + + __setitem__ = set_local + + def _append_node(self, child): + """append a child, linking it in the tree""" + self.body.append(child) + child.parent = self + + def add_local_node(self, child_node, name=None): + """append a child which should alter locals to the given node""" + if name != '__class__': + # add __class__ node as a child will cause infinite recursion later! + self._append_node(child_node) + self.set_local(name or child_node.name, child_node) + + def __getitem__(self, item): + """method from the `dict` interface returning the first node + associated with the given name in the locals dictionary + + :type item: str + :param item: the name of the locally defined object + :raises KeyError: if the name is not defined + """ + return self._locals[item][0] + + def __iter__(self): + """method from the `dict` interface returning an iterator on + `self.keys()` + """ + return iter(self.keys()) + + def keys(self): + """method from the `dict` interface returning a tuple containing + locally defined names + """ + return list(self._locals.keys()) + + def values(self): + """method from the `dict` interface returning a tuple containing + locally defined nodes which are instance of `FunctionDef` or `ClassDef` + """ + return [self[key] for key in self.keys()] + + def items(self): + """method from the `dict` interface returning a list of tuple + containing each locally defined name with its associated node, + which is an instance of `FunctionDef` or `ClassDef` + """ + return list(zip(self.keys(), self.values())) + + def __contains__(self, name): + return name in self._locals + + +class Module(LocalsDictNodeNG): + _astroid_fields = ('body',) + + fromlineno = 0 + lineno = 0 + + # attributes below are set by the builder module or by raw factories + + # the file from which as been extracted the astroid representation. It may + # be None if the representation has been built from a built-in module + source_file = None + # Alternatively, if built from a string/bytes, this can be set + source_code = None + # encoding of python source file, so we can get unicode out of it (python2 + # only) + file_encoding = None + # the module name + name = None + # boolean for astroid built from source (i.e. ast) + pure_python = None + # boolean for package module + package = None + # dictionary of globals with name as key and node defining the global + # as value + _globals = None + + # Future imports + _future_imports = None + + # names of python special attributes (handled by getattr impl.) + special_attributes = set(('__name__', '__doc__', '__file__', '__path__', + '__dict__')) + # names of module attributes available through the global scope + scope_attrs = set(('__name__', '__doc__', '__file__', '__path__')) + + def __init__(self, name, doc, pure_python=True): + self.name = name + self.doc = doc + self.pure_python = pure_python + self._locals = self._globals = {} + self.body = [] + self._future_imports = set() + + # Future deprecation warnings + @property + def file(self): + util.rename_warning('file', 2.0, 'source_file') + return self.source_file + @file.setter + def file(self, source_file): + util.rename_warning('file', 2.0, 'source_file') + self.source_file = source_file + @file.deleter + def file(self): + util.rename_warning('file', 2.0, 'source_file') + del self.source_file + + @property + def path(self): + util.rename_warning('path', 2.0, 'source_file') + return self.source_file + @path.setter + def path(self, source_file): + util.rename_warning('path', 2.0, 'source_file') + self.source_file = source_file + @path.deleter + def path(self): + util.rename_warning('path', 2.0, 'source_file') + del self.source_file + + @property + def file_bytes(self): + util.rename_warning('file_bytes', 2.0, 'source_code') + return self.source_code + @file_bytes.setter + def file_bytes(self, source_code): + util.rename_warning('file_bytes', 2.0, 'source_code') + self.source_code = source_code + @file_bytes.deleter + def file_bytes(self): + util.rename_warning('file_bytes', 2.0, 'source_code') + del self.source_code + + @property + def globals(self): + util.attribute_to_function_warning('globals', 2.0, 'get_locals') + return self._locals + @globals.setter + def globals(self, _globals): + util.attribute_to_function_warning('globals', 2.0, 'get_locals') + self._locals = _globals + @globals.deleter + def globals(self): + util.attribute_to_function_warning('globals', 2.0, 'get_locals') + del self._locals + + @property + def future_imports(self): + util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') + return self._future_imports + @future_imports.setter + def future_imports(self, _future_imports): + util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') + self._future_imports = _future_imports + @future_imports.deleter + def future_imports(self): + util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') + del self._future_imports + + def _get_stream(self): + if self.source_code is not None: + return io.BytesIO(self.source_code) + if self.source_file is not None: + stream = open(self.source_file, 'rb') + return stream + return None + + @property + def file_stream(self): + warnings.warn("file_stream property is deprecated and " + "it is slated for removal in astroid 1.6." + "Use the new method 'stream' instead.", + PendingDeprecationWarning, + stacklevel=2) + return self._get_stream() + + def stream(self): + """Get a stream to the underlying file or bytes.""" + return self._get_stream() + + def close(self): + """Close the underlying file streams.""" + warnings.warn("close method is deprecated and it is " + "slated for removal in astroid 1.6, along " + "with 'file_stream' property. " + "Its behaviour is replaced by managing each " + "file stream returned by the 'stream' method.", + PendingDeprecationWarning, + stacklevel=2) + + def block_range(self, lineno): + """return block line numbers. + + start from the beginning whatever the given lineno + """ + return self.fromlineno, self.tolineno + + def scope_lookup(self, node, name, offset=0): + if name in self.scope_attrs and name not in self._locals: + try: + return self, self.getattr(name) + except exceptions.NotFoundError: + return self, () + return self._scope_lookup(node, name, offset) + + def pytype(self): + return '%s.module' % BUILTINS + + def display_type(self): + return 'Module' + + @remove_nodes(node_classes.DelName) + def getattr(self, name, context=None, ignore_locals=False): + if name in self.special_attributes: + if name == '__file__': + return [node_classes.const_factory(self.source_file)] + self._locals.get(name, []) + if name == '__path__' and self.package: + return [node_classes.List()] + self._locals.get(name, []) + return std_special_attributes(self, name) + if not ignore_locals and name in self._locals: + return self._locals[name] + if self.package: + try: + return [self.import_module(name, relative_only=True)] + except exceptions.AstroidBuildingException: + raise exceptions.NotFoundError(name) + except SyntaxError: + raise exceptions.NotFoundError(name) + raise exceptions.NotFoundError(name) + + def igetattr(self, name, context=None): + """inferred getattr""" + # set lookup name since this is necessary to infer on import nodes for + # instance + context = contextmod.copy_context(context) + context.lookupname = name + try: + return bases._infer_stmts(self.getattr(name, context), + context, frame=self) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) + + def fully_defined(self): + """return True if this module has been built from a .py file + and so contains a complete representation including the code + """ + return self.source_file is not None and self.source_file.endswith('.py') + + def statement(self): + """return the first parent node marked as statement node + consider a module as a statement... + """ + return self + + def previous_sibling(self): + """module has no sibling""" + return + + def next_sibling(self): + """module has no sibling""" + return + + if six.PY2: + @decorators_mod.cachedproperty + def _absolute_import_activated(self): + for stmt in self._locals.get('absolute_import', ()): + if isinstance(stmt, node_classes.ImportFrom) and stmt.modname == '__future__': + return True + return False + else: + _absolute_import_activated = True + + def absolute_import_activated(self): + return self._absolute_import_activated + + def import_module(self, modname, relative_only=False, level=None): + """import the given module considering self as context""" + if relative_only and level is None: + level = 0 + absmodname = self.relative_to_absolute_name(modname, level) + try: + return MANAGER.ast_from_module_name(absmodname) + except exceptions.AstroidBuildingException: + # we only want to import a sub module or package of this module, + # skip here + if relative_only: + raise + return MANAGER.ast_from_module_name(modname) + + def relative_to_absolute_name(self, modname, level): + """return the absolute module name for a relative import. + + The relative import can be implicit or explicit. + """ + # XXX this returns non sens when called on an absolute import + # like 'pylint.checkers.astroid.utils' + # XXX doesn't return absolute name if self.name isn't absolute name + if self.absolute_import_activated() and level is None: + return modname + if level: + if self.package: + level = level - 1 + package_name = self.name.rsplit('.', level)[0] + elif self.package: + package_name = self.name + else: + package_name = self.name.rsplit('.', 1)[0] + if package_name: + if not modname: + return package_name + return '%s.%s' % (package_name, modname) + return modname + + def wildcard_import_names(self): + """return the list of imported names when this module is 'wildcard + imported' + + It doesn't include the '__builtins__' name which is added by the + current CPython implementation of wildcard imports. + """ + # We separate the different steps of lookup in try/excepts + # to avoid catching too many Exceptions + default = [name for name in self.keys() if not name.startswith('_')] + try: + all = self['__all__'] + except KeyError: + return default + + try: + explicit = next(all.assigned_stmts()) + except exceptions.InferenceError: + return default + except AttributeError: + # not an assignment node + # XXX infer? + return default + + # Try our best to detect the exported name. + inferred = [] + try: + explicit = next(explicit.infer()) + except exceptions.InferenceError: + return default + if not isinstance(explicit, (node_classes.Tuple, node_classes.List)): + return default + + str_const = lambda node: (isinstance(node, node_classes.Const) and + isinstance(node.value, six.string_types)) + for node in explicit.elts: + if str_const(node): + inferred.append(node.value) + else: + try: + inferred_node = next(node.infer()) + except exceptions.InferenceError: + continue + if str_const(inferred_node): + inferred.append(inferred_node.value) + return inferred + + def _public_names(self): + """Get the list of the names which are publicly available in this module.""" + return [name for name in self.keys() if not name.startswith('_')] + + def bool_value(self): + return True + + +class ComprehensionScope(LocalsDictNodeNG): + def frame(self): + return self.parent.frame() + + scope_lookup = LocalsDictNodeNG._scope_lookup + + +class GeneratorExp(ComprehensionScope): + _astroid_fields = ('elt', 'generators') + + def __init__(self): + self._locals = {} + self.elt = None + self.generators = [] + + +class DictComp(ComprehensionScope): + _astroid_fields = ('key', 'value', 'generators') + + def __init__(self): + self._locals = {} + self.key = None + self.value = None + self.generators = [] + + +class SetComp(ComprehensionScope): + _astroid_fields = ('elt', 'generators') + + def __init__(self): + self._locals = {} + self.elt = None + self.generators = [] + + +class _ListComp(bases.NodeNG): + """class representing a ListComp node""" + _astroid_fields = ('elt', 'generators') + elt = None + generators = None + + +if six.PY3: + class ListComp(_ListComp, ComprehensionScope): + """class representing a ListComp node""" + def __init__(self): + self._locals = {} +else: + class ListComp(_ListComp): + """class representing a ListComp node""" + + +def _infer_decorator_callchain(node): + """Detect decorator call chaining and see if the end result is a + static or a classmethod. + """ + if not isinstance(node, FunctionDef): + return + if not node.parent: + return + try: + # TODO: We don't handle multiple inference results right now, + # because there's no flow to reason when the return + # is what we are looking for, a static or a class method. + result = next(node.infer_call_result(node.parent)) + except (StopIteration, exceptions.InferenceError): + return + if isinstance(result, bases.Instance): + result = result._proxied + if isinstance(result, ClassDef): + if result.is_subtype_of('%s.classmethod' % BUILTINS): + return 'classmethod' + if result.is_subtype_of('%s.staticmethod' % BUILTINS): + return 'staticmethod' + + +class Lambda(mixins.FilterStmtsMixin, LocalsDictNodeNG): + _astroid_fields = ('args', 'body',) + name = '' + + # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod' + type = 'function' + + def __init__(self): + self._locals = {} + self.args = [] + self.body = [] + + def pytype(self): + if 'method' in self.type: + return '%s.instancemethod' % BUILTINS + return '%s.function' % BUILTINS + + def display_type(self): + if 'method' in self.type: + return 'Method' + return 'Function' + + def callable(self): + return True + + def argnames(self): + """return a list of argument names""" + if self.args.args: # maybe None with builtin functions + names = _rec_get_names(self.args.args) + else: + names = [] + if self.args.vararg: + names.append(self.args.vararg) + if self.args.kwarg: + names.append(self.args.kwarg) + return names + + def infer_call_result(self, caller, context=None): + """infer what a function is returning when called""" + return self.body.infer(context) + + def scope_lookup(self, node, name, offset=0): + if node in self.args.defaults or node in self.args.kw_defaults: + frame = self.parent.frame() + # line offset to avoid that def func(f=func) resolve the default + # value to the defined function + offset = -1 + else: + # check this is not used in function decorators + frame = self + return frame._scope_lookup(node, name, offset) + + + +class FunctionDef(bases.Statement, Lambda): + if six.PY3: + _astroid_fields = ('decorators', 'args', 'returns', 'body') + returns = None + else: + _astroid_fields = ('decorators', 'args', 'body') + + special_attributes = set(('__name__', '__doc__', '__dict__')) + is_function = True + # attributes below are set by the builder module or by raw factories + decorators = None + + def __init__(self, name, doc): + self._locals = {} + self.args = [] + self.body = [] + self.name = name + self.doc = doc + self._instance_attrs = {} + + @property + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + return self._instance_attrs + @instance_attrs.setter + def instance_attrs(self, _instance_attrs): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + self._instance_attrs = _instance_attrs + @instance_attrs.deleter + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + del self._instance_attrs + + @decorators_mod.cachedproperty + def extra_decorators(self): + """Get the extra decorators that this function can haves + Additional decorators are considered when they are used as + assignments, as in `method = staticmethod(method)`. + The property will return all the callables that are used for + decoration. + """ + frame = self.parent.frame() + if not isinstance(frame, ClassDef): + return [] + + decorators = [] + for assign in frame.nodes_of_class(node_classes.Assign): + if (isinstance(assign.value, node_classes.Call) + and isinstance(assign.value.func, node_classes.Name)): + for assign_node in assign.targets: + if not isinstance(assign_node, node_classes.AssignName): + # Support only `name = callable(name)` + continue + + if assign_node.name != self.name: + # Interested only in the assignment nodes that + # decorates the current method. + continue + try: + meth = frame[self.name] + except KeyError: + continue + else: + # Must be a function and in the same frame as the + # original method. + if (isinstance(meth, FunctionDef) + and assign_node.frame() == frame): + decorators.append(assign.value) + return decorators + + @decorators_mod.cachedproperty + def type(self): + """Get the function type for this node. + + Possible values are: method, function, staticmethod, classmethod. + """ + builtin_descriptors = {'classmethod', 'staticmethod'} + + for decorator in self.extra_decorators: + if decorator.func.name in builtin_descriptors: + return decorator.func.name + + frame = self.parent.frame() + type_name = 'function' + if isinstance(frame, ClassDef): + if self.name == '__new__': + return 'classmethod' + else: + type_name = 'method' + + if self.decorators: + for node in self.decorators.nodes: + if isinstance(node, node_classes.Name): + if node.name in builtin_descriptors: + return node.name + + if isinstance(node, node_classes.Call): + # Handle the following case: + # @some_decorator(arg1, arg2) + # def func(...) + # + try: + current = next(node.func.infer()) + except exceptions.InferenceError: + continue + _type = _infer_decorator_callchain(current) + if _type is not None: + return _type + + try: + for inferred in node.infer(): + # Check to see if this returns a static or a class method. + _type = _infer_decorator_callchain(inferred) + if _type is not None: + return _type + + if not isinstance(inferred, ClassDef): + continue + for ancestor in inferred.ancestors(): + if not isinstance(ancestor, ClassDef): + continue + if ancestor.is_subtype_of('%s.classmethod' % BUILTINS): + return 'classmethod' + elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS): + return 'staticmethod' + except exceptions.InferenceError: + pass + return type_name + + @decorators_mod.cachedproperty + def fromlineno(self): + # lineno is the line number of the first decorator, we want the def + # statement lineno + lineno = self.lineno + if self.decorators is not None: + lineno += sum(node.tolineno - node.lineno + 1 + for node in self.decorators.nodes) + + return lineno + + @decorators_mod.cachedproperty + def blockstart_tolineno(self): + return self.args.tolineno + + def block_range(self, lineno): + """return block line numbers. + + start from the "def" position whatever the given lineno + """ + return self.fromlineno, self.tolineno + + def getattr(self, name, context=None): + """this method doesn't look in the instance_attrs dictionary since it's + done by an Instance proxy at inference time. + """ + if name == '__module__': + return [node_classes.const_factory(self.root().qname())] + if name in self._instance_attrs: + return self._instance_attrs[name] + return std_special_attributes(self, name, False) + + def igetattr(self, name, context=None): + """Inferred getattr, which returns an iterator of inferred statements.""" + try: + return bases._infer_stmts(self.getattr(name, context), + context, frame=self) + except exceptions.NotFoundError: + raise exceptions.InferenceError(name) + + def is_method(self): + """return true if the function node should be considered as a method""" + # check we are defined in a ClassDef, because this is usually expected + # (e.g. pylint...) when is_method() return True + return self.type != 'function' and isinstance(self.parent.frame(), ClassDef) + + @decorators_mod.cached + def decoratornames(self): + """return a list of decorator qualified names""" + result = set() + decoratornodes = [] + if self.decorators is not None: + # pylint: disable=unsupported-binary-operation; damn flow control. + decoratornodes += self.decorators.nodes + decoratornodes += self.extra_decorators + for decnode in decoratornodes: + try: + for infnode in decnode.infer(): + result.add(infnode.qname()) + except exceptions.InferenceError: + continue + return result + + def is_bound(self): + """return true if the function is bound to an Instance or a class""" + return self.type == 'classmethod' + + def is_abstract(self, pass_is_abstract=True): + """Returns True if the method is abstract. + + A method is considered abstract if + - the only statement is 'raise NotImplementedError', or + - the only statement is 'pass' and pass_is_abstract is True, or + - the method is annotated with abc.astractproperty/abc.abstractmethod + """ + if self.decorators: + for node in self.decorators.nodes: + try: + inferred = next(node.infer()) + except exceptions.InferenceError: + continue + if inferred and inferred.qname() in ('abc.abstractproperty', + 'abc.abstractmethod'): + return True + + for child_node in self.body: + if isinstance(child_node, node_classes.Raise): + if child_node.raises_not_implemented(): + return True + return pass_is_abstract and isinstance(child_node, node_classes.Pass) + # empty function is the same as function with a single "pass" statement + if pass_is_abstract: + return True + + def is_generator(self): + """return true if this is a generator function""" + yield_nodes = (node_classes.Yield, node_classes.YieldFrom) + return next(self.nodes_of_class(yield_nodes, + skip_klass=(FunctionDef, Lambda)), False) + + def infer_call_result(self, caller, context=None): + """infer what a function is returning when called""" + if self.is_generator(): + result = bases.Generator() + result.parent = self + yield result + return + # This is really a gigantic hack to work around metaclass generators + # that return transient class-generating functions. Pylint's AST structure + # cannot handle a base class object that is only used for calling __new__, + # but does not contribute to the inheritance structure itself. We inject + # a fake class into the hierarchy here for several well-known metaclass + # generators, and filter it out later. + if (self.name == 'with_metaclass' and + len(self.args.args) == 1 and + self.args.vararg is not None): + metaclass = next(caller.args[0].infer(context)) + if isinstance(metaclass, ClassDef): + c = ClassDef('temporary_class', None) + c.hide = True + c.parent = self + class_bases = [next(b.infer(context)) for b in caller.args[1:]] + c.bases = [base for base in class_bases if base != util.YES] + c._metaclass = metaclass + yield c + return + returns = self.nodes_of_class(node_classes.Return, skip_klass=FunctionDef) + for returnnode in returns: + if returnnode.value is None: + yield node_classes.Const(None) + else: + try: + for inferred in returnnode.value.infer(context): + yield inferred + except exceptions.InferenceError: + yield util.YES + + +class AsyncFunctionDef(FunctionDef): + """Asynchronous function created with the `async` keyword.""" + + +def _rec_get_names(args, names=None): + """return a list of all argument names""" + if names is None: + names = [] + for arg in args: + if isinstance(arg, node_classes.Tuple): + _rec_get_names(arg.elts, names) + else: + names.append(arg.name) + return names + + +def _is_metaclass(klass, seen=None): + """ Return if the given class can be + used as a metaclass. + """ + if klass.name == 'type': + return True + if seen is None: + seen = set() + for base in klass.bases: + try: + for baseobj in base.infer(): + baseobj_name = baseobj.qname() + if baseobj_name in seen: + continue + else: + seen.add(baseobj_name) + if isinstance(baseobj, bases.Instance): + # not abstract + return False + if baseobj is util.YES: + continue + if baseobj is klass: + continue + if not isinstance(baseobj, ClassDef): + continue + if baseobj._type == 'metaclass': + return True + if _is_metaclass(baseobj, seen): + return True + except exceptions.InferenceError: + continue + return False + + +def _class_type(klass, ancestors=None): + """return a ClassDef node type to differ metaclass and exception + from 'regular' classes + """ + # XXX we have to store ancestors in case we have a ancestor loop + if klass._type is not None: + return klass._type + if _is_metaclass(klass): + klass._type = 'metaclass' + elif klass.name.endswith('Exception'): + klass._type = 'exception' + else: + if ancestors is None: + ancestors = set() + klass_name = klass.qname() + if klass_name in ancestors: + # XXX we are in loop ancestors, and have found no type + klass._type = 'class' + return 'class' + ancestors.add(klass_name) + for base in klass.ancestors(recurs=False): + name = _class_type(base, ancestors) + if name != 'class': + if name == 'metaclass' and not _is_metaclass(klass): + # don't propagate it if the current class + # can't be a metaclass + continue + klass._type = base.type + break + if klass._type is None: + klass._type = 'class' + return klass._type + + +class ClassDef(mixins.FilterStmtsMixin, LocalsDictNodeNG, bases.Statement): + + # some of the attributes below are set by the builder module or + # by a raw factories + + # a dictionary of class instances attributes + _astroid_fields = ('decorators', 'bases', 'body') # name + + decorators = None + special_attributes = set(('__name__', '__doc__', '__dict__', '__module__', + '__bases__', '__mro__', '__subclasses__')) + + _type = None + _metaclass_hack = False + hide = False + type = property(_class_type, + doc="class'type, possible values are 'class' | " + "'metaclass' | 'exception'") + + def __init__(self, name, doc): + self._instance_attrs = {} + self._locals = {} + self.bases = [] + self.body = [] + self.name = name + self.doc = doc + + @property + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + return self._instance_attrs + @instance_attrs.setter + def instance_attrs(self, _instance_attrs): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + self._instance_attrs = _instance_attrs + @instance_attrs.deleter + def instance_attrs(self): + util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') + del self._instance_attrs + + def _newstyle_impl(self, context=None): + if context is None: + context = contextmod.InferenceContext() + if self._newstyle is not None: + return self._newstyle + for base in self.ancestors(recurs=False, context=context): + if base._newstyle_impl(context): + self._newstyle = True + break + klass = self._explicit_metaclass() + # could be any callable, we'd need to infer the result of klass(name, + # bases, dict). punt if it's not a class node. + if klass is not None and isinstance(klass, ClassDef): + self._newstyle = klass._newstyle_impl(context) + if self._newstyle is None: + self._newstyle = False + return self._newstyle + + _newstyle = None + newstyle = property(_newstyle_impl, + doc="boolean indicating if it's a new style class" + "or not") + + @decorators_mod.cachedproperty + def blockstart_tolineno(self): + if self.bases: + return self.bases[-1].tolineno + else: + return self.fromlineno + + def block_range(self, lineno): + """return block line numbers. + + start from the "class" position whatever the given lineno + """ + return self.fromlineno, self.tolineno + + def pytype(self): + if self.newstyle: + return '%s.type' % BUILTINS + return '%s.classobj' % BUILTINS + + def display_type(self): + return 'Class' + + def callable(self): + return True + + def is_subtype_of(self, type_name, context=None): + if self.qname() == type_name: + return True + for anc in self.ancestors(context=context): + if anc.qname() == type_name: + return True + + def _infer_type_call(self, caller, context): + name_node = next(caller.args[0].infer(context)) + if (isinstance(name_node, node_classes.Const) and + isinstance(name_node.value, six.string_types)): + name = name_node.value + else: + return util.YES + + result = ClassDef(name, None) + + # Get the bases of the class. + class_bases = next(caller.args[1].infer(context)) + if isinstance(class_bases, (node_classes.Tuple, node_classes.List)): + result.bases = class_bases.itered() + else: + # There is currently no AST node that can represent an 'unknown' + # node (YES is not an AST node), therefore we simply return YES here + # although we know at least the name of the class. + return util.YES + + # Get the members of the class + try: + members = next(caller.args[2].infer(context)) + except exceptions.InferenceError: + members = None + + if members and isinstance(members, node_classes.Dict): + for attr, value in members.items: + if (isinstance(attr, node_classes.Const) and + isinstance(attr.value, six.string_types)): + result._locals[attr.value] = [value] + + result.parent = caller.parent + return result + + def infer_call_result(self, caller, context=None): + """infer what a class is returning when called""" + if (self.is_subtype_of('%s.type' % (BUILTINS,), context) + and len(caller.args) == 3): + result = self._infer_type_call(caller, context) + yield result + else: + yield bases.Instance(self) + + def scope_lookup(self, node, name, offset=0): + # pylint: disable=redefined-variable-type + if any(node == base or base.parent_of(node) + for base in self.bases): + # Handle the case where we have either a name + # in the bases of a class, which exists before + # the actual definition or the case where we have + # a Getattr node, with that name. + # + # name = ... + # class A(name): + # def name(self): ... + # + # import name + # class A(name.Name): + # def name(self): ... + + frame = self.parent.frame() + # line offset to avoid that class A(A) resolve the ancestor to + # the defined class + offset = -1 + else: + frame = self + return frame._scope_lookup(node, name, offset) + + @property + def basenames(self): + """Get the list of parent class names, as they appear in the class definition.""" + return [bnode.as_string() for bnode in self.bases] + + def ancestors(self, recurs=True, context=None): + """return an iterator on the node base classes in a prefixed + depth first order + + :param recurs: + boolean indicating if it should recurse or return direct + ancestors only + """ + # FIXME: should be possible to choose the resolution order + # FIXME: inference make infinite loops possible here + yielded = set([self]) + if context is None: + context = contextmod.InferenceContext() + if six.PY3: + if not self.bases and self.qname() != 'builtins.object': + yield builtin_lookup("object")[1][0] + return + + for stmt in self.bases: + with context.restore_path(): + try: + for baseobj in stmt.infer(context): + if not isinstance(baseobj, ClassDef): + if isinstance(baseobj, bases.Instance): + baseobj = baseobj._proxied + else: + continue + if not baseobj.hide: + if baseobj in yielded: + continue + yielded.add(baseobj) + yield baseobj + if recurs: + for grandpa in baseobj.ancestors(recurs=True, + context=context): + if grandpa is self: + # This class is the ancestor of itself. + break + if grandpa in yielded: + continue + yielded.add(grandpa) + yield grandpa + except exceptions.InferenceError: + continue + + def local_attr_ancestors(self, name, context=None): + """return an iterator on astroid representation of parent classes + which have defined in their locals + """ + if self.newstyle and all(n.newstyle for n in self.ancestors(context)): + # Look up in the mro if we can. This will result in the + # attribute being looked up just as Python does it. + try: + ancestors = self.mro(context)[1:] + except exceptions.MroError: + # Fallback to use ancestors, we can't determine + # a sane MRO. + ancestors = self.ancestors(context=context) + else: + ancestors = self.ancestors(context=context) + for astroid in ancestors: + if name in astroid: + yield astroid + + def instance_attr_ancestors(self, name, context=None): + """return an iterator on astroid representation of parent classes + which have defined in their instance attribute dictionary + """ + for astroid in self.ancestors(context=context): + if name in astroid._instance_attrs: + yield astroid + + def has_base(self, node): + return node in self.bases + + @remove_nodes(node_classes.DelAttr) + def local_attr(self, name, context=None): + """return the list of assign node associated to name in this class + locals or in its parents + + :raises `NotFoundError`: + if no attribute with this name has been find in this class or + its parent classes + """ + try: + return self._locals[name] + except KeyError: + for class_node in self.local_attr_ancestors(name, context): + return class_node._locals[name] + raise exceptions.NotFoundError(name) + + @remove_nodes(node_classes.DelAttr) + def instance_attr(self, name, context=None): + """return the astroid nodes associated to name in this class instance + attributes dictionary and in its parents + + :raises `NotFoundError`: + if no attribute with this name has been find in this class or + its parent classes + """ + # Return a copy, so we don't modify self._instance_attrs, + # which could lead to infinite loop. + values = list(self._instance_attrs.get(name, [])) + # get all values from parents + for class_node in self.instance_attr_ancestors(name, context): + values += class_node._instance_attrs[name] + if not values: + raise exceptions.NotFoundError(name) + return values + + def instantiate_class(self): + """return Instance of ClassDef node, else return self""" + return bases.Instance(self) + + def instanciate_class(self): + """return Instance of ClassDef node, else return self""" + util.rename_warning('instanciate_class()', 2.0, 'instantiate_class()') + return self.instantiate_class() + + def getattr(self, name, context=None): + """this method doesn't look in the instance_attrs dictionary since it's + done by an Instance proxy at inference time. + + It may return a YES object if the attribute has not been actually + found but a __getattr__ or __getattribute__ method is defined + """ + values = self._locals.get(name, []) + if name in self.special_attributes: + if name == '__module__': + return [node_classes.const_factory(self.root().qname())] + values + if name == '__bases__': + node = node_classes.Tuple() + elts = list(self._inferred_bases(context)) + node.elts = elts + return [node] + values + if name == '__mro__' and self.newstyle: + mro = self.mro() + node = node_classes.Tuple() + node.elts = mro + return [node] + return std_special_attributes(self, name) + # don't modify the list in self._locals! + values = list(values) + for classnode in self.ancestors(recurs=True, context=context): + values += classnode._locals.get(name, []) + if not values: + raise exceptions.NotFoundError(name) + return values + + def igetattr(self, name, context=None): + """inferred getattr, need special treatment in class to handle + descriptors + """ + # set lookup name since this is necessary to infer on import nodes for + # instance + context = contextmod.copy_context(context) + context.lookupname = name + try: + for inferred in bases._infer_stmts(self.getattr(name, context), + context, frame=self): + # yield YES object instead of descriptors when necessary + if (not isinstance(inferred, node_classes.Const) + and isinstance(inferred, bases.Instance)): + try: + inferred._proxied.getattr('__get__', context) + except exceptions.NotFoundError: + yield inferred + else: + yield util.YES + else: + yield function_to_method(inferred, self) + except exceptions.NotFoundError: + if not name.startswith('__') and self.has_dynamic_getattr(context): + # class handle some dynamic attributes, return a YES object + yield util.YES + else: + raise exceptions.InferenceError(name) + + def has_dynamic_getattr(self, context=None): + """ + Check if the current instance has a custom __getattr__ + or a custom __getattribute__. + + If any such method is found and it is not from + builtins, nor from an extension module, then the function + will return True. + """ + def _valid_getattr(node): + root = node.root() + return root.name != BUILTINS and getattr(root, 'pure_python', None) + + try: + return _valid_getattr(self.getattr('__getattr__', context)[0]) + except exceptions.NotFoundError: + #if self.newstyle: XXX cause an infinite recursion error + try: + getattribute = self.getattr('__getattribute__', context)[0] + return _valid_getattr(getattribute) + except exceptions.NotFoundError: + pass + return False + + def methods(self): + """return an iterator on all methods defined in the class and + its ancestors + """ + done = {} + for astroid in itertools.chain(iter((self,)), self.ancestors()): + for meth in astroid.mymethods(): + if meth.name in done: + continue + done[meth.name] = None + yield meth + + def mymethods(self): + """return an iterator on all methods defined in the class""" + for member in self.values(): + if isinstance(member, FunctionDef): + yield member + + def implicit_metaclass(self): + """Get the implicit metaclass of the current class + + For newstyle classes, this will return an instance of builtins.type. + For oldstyle classes, it will simply return None, since there's + no implicit metaclass there. + """ + + if self.newstyle: + return builtin_lookup('type')[1][0] + + _metaclass = None + def _explicit_metaclass(self): + """ Return the explicit defined metaclass + for the current class. + + An explicit defined metaclass is defined + either by passing the ``metaclass`` keyword argument + in the class definition line (Python 3) or (Python 2) by + having a ``__metaclass__`` class attribute, or if there are + no explicit bases but there is a global ``__metaclass__`` variable. + """ + for base in self.bases: + try: + for baseobj in base.infer(): + if isinstance(baseobj, ClassDef) and baseobj.hide: + self._metaclass = baseobj._metaclass + self._metaclass_hack = True + break + except exceptions.InferenceError: + pass + + if self._metaclass: + # Expects this from Py3k TreeRebuilder + try: + return next(node for node in self._metaclass.infer() + if node is not util.YES) + except (exceptions.InferenceError, StopIteration): + return None + if six.PY3: + return None + + if '__metaclass__' in self._locals: + assignment = self._locals['__metaclass__'][-1] + elif self.bases: + return None + elif '__metaclass__' in self.root()._locals: + assignments = [ass for ass in self.root()._locals['__metaclass__'] + if ass.lineno < self.lineno] + if not assignments: + return None + assignment = assignments[-1] + else: + return None + + try: + inferred = next(assignment.infer()) + except exceptions.InferenceError: + return + if inferred is util.YES: # don't expose this + return None + return inferred + + def _find_metaclass(self, seen=None): + if seen is None: + seen = set() + seen.add(self) + + klass = self._explicit_metaclass() + if klass is None: + for parent in self.ancestors(): + if parent not in seen: + klass = parent._find_metaclass(seen) + if klass is not None: + break + return klass + + def metaclass(self): + """Return the metaclass of this class. + + If this class does not define explicitly a metaclass, + then the first defined metaclass in ancestors will be used + instead. + """ + return self._find_metaclass() + + def has_metaclass_hack(self): + return self._metaclass_hack + + def _islots(self): + """ Return an iterator with the inferred slots. """ + if '__slots__' not in self._locals: + return + for slots in self.igetattr('__slots__'): + # check if __slots__ is a valid type + for meth in ITER_METHODS: + try: + slots.getattr(meth) + break + except exceptions.NotFoundError: + continue + else: + continue + + if isinstance(slots, node_classes.Const): + # a string. Ignore the following checks, + # but yield the node, only if it has a value + if slots.value: + yield slots + continue + if not hasattr(slots, 'itered'): + # we can't obtain the values, maybe a .deque? + continue + + if isinstance(slots, node_classes.Dict): + values = [item[0] for item in slots.items] + else: + values = slots.itered() + if values is util.YES: + continue + if not values: + # Stop the iteration, because the class + # has an empty list of slots. + raise StopIteration(values) + + for elt in values: + try: + for inferred in elt.infer(): + if inferred is util.YES: + continue + if (not isinstance(inferred, node_classes.Const) or + not isinstance(inferred.value, + six.string_types)): + continue + if not inferred.value: + continue + yield inferred + except exceptions.InferenceError: + continue + + def _slots(self): + if not self.newstyle: + raise NotImplementedError( + "The concept of slots is undefined for old-style classes.") + + slots = self._islots() + try: + first = next(slots) + except StopIteration as exc: + # The class doesn't have a __slots__ definition or empty slots. + if exc.args and exc.args[0] not in ('', None): + return exc.args[0] + return None + # pylint: disable=unsupported-binary-operation; false positive + return [first] + list(slots) + + # Cached, because inferring them all the time is expensive + @decorators_mod.cached + def slots(self): + """Get all the slots for this node. + + If the class doesn't define any slot, through `__slots__` + variable, then this function will return a None. + Also, it will return None in the case the slots weren't inferred. + Otherwise, it will return a list of slot names. + """ + def grouped_slots(): + # Not interested in object, since it can't have slots. + for cls in self.mro()[:-1]: + try: + cls_slots = cls._slots() + except NotImplementedError: + continue + if cls_slots is not None: + for slot in cls_slots: + yield slot + else: + yield None + + if not self.newstyle: + raise NotImplementedError( + "The concept of slots is undefined for old-style classes.") + + slots = list(grouped_slots()) + if not all(slot is not None for slot in slots): + return None + + return sorted(slots, key=lambda item: item.value) + + def _inferred_bases(self, context=None): + # TODO(cpopa): really similar with .ancestors, + # but the difference is when one base is inferred, + # only the first object is wanted. That's because + # we aren't interested in superclasses, as in the following + # example: + # + # class SomeSuperClass(object): pass + # class SomeClass(SomeSuperClass): pass + # class Test(SomeClass): pass + # + # Inferring SomeClass from the Test's bases will give + # us both SomeClass and SomeSuperClass, but we are interested + # only in SomeClass. + + if context is None: + context = contextmod.InferenceContext() + if six.PY3: + if not self.bases and self.qname() != 'builtins.object': + yield builtin_lookup("object")[1][0] + return + + for stmt in self.bases: + try: + baseobj = next(stmt.infer(context=context)) + except exceptions.InferenceError: + continue + if isinstance(baseobj, bases.Instance): + baseobj = baseobj._proxied + if not isinstance(baseobj, ClassDef): + continue + if not baseobj.hide: + yield baseobj + else: + for base in baseobj.bases: + yield base + + def mro(self, context=None): + """Get the method resolution order, using C3 linearization. + + It returns the list of ancestors sorted by the mro. + This will raise `NotImplementedError` for old-style classes, since + they don't have the concept of MRO. + """ + if not self.newstyle: + raise NotImplementedError( + "Could not obtain mro for old-style classes.") + + bases = list(self._inferred_bases(context=context)) + bases_mro = [] + for base in bases: + try: + mro = base.mro(context=context) + bases_mro.append(mro) + except NotImplementedError: + # Some classes have in their ancestors both newstyle and + # old style classes. For these we can't retrieve the .mro, + # although in Python it's possible, since the class we are + # currently working is in fact new style. + # So, we fallback to ancestors here. + ancestors = list(base.ancestors(context=context)) + bases_mro.append(ancestors) + + unmerged_mro = ([[self]] + bases_mro + [bases]) + _verify_duplicates_mro(unmerged_mro) + return _c3_merge(unmerged_mro) + +def get_locals(node): + '''Stub function for forwards compatibility.''' + return node._locals + +def get_attributes(node): + '''Stub function for forwards compatibility.''' + return node._instance_attrs + +# Backwards-compatibility aliases +Class = node_classes.proxy_alias('Class', ClassDef) +Function = node_classes.proxy_alias('Function', FunctionDef) +GenExpr = node_classes.proxy_alias('GenExpr', GeneratorExp) diff --git a/pymode/libs/astroid/test_utils.py b/pymode/libs/astroid/test_utils.py new file mode 100644 index 00000000..9e45abcf --- /dev/null +++ b/pymode/libs/astroid/test_utils.py @@ -0,0 +1,201 @@ +"""Utility functions for test code that uses astroid ASTs as input.""" +import functools +import sys + +from astroid import nodes +from astroid import builder +# The name of the transient function that is used to +# wrap expressions to be extracted when calling +# extract_node. +_TRANSIENT_FUNCTION = '__' + +# The comment used to select a statement to be extracted +# when calling extract_node. +_STATEMENT_SELECTOR = '#@' + +def _extract_expressions(node): + """Find expressions in a call to _TRANSIENT_FUNCTION and extract them. + + The function walks the AST recursively to search for expressions that + are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an + expression, it completely removes the function call node from the tree, + replacing it by the wrapped expression inside the parent. + + :param node: An astroid node. + :type node: astroid.bases.NodeNG + :yields: The sequence of wrapped expressions on the modified tree + expression can be found. + """ + if (isinstance(node, nodes.Call) + and isinstance(node.func, nodes.Name) + and node.func.name == _TRANSIENT_FUNCTION): + real_expr = node.args[0] + real_expr.parent = node.parent + # Search for node in all _astng_fields (the fields checked when + # get_children is called) of its parent. Some of those fields may + # be lists or tuples, in which case the elements need to be checked. + # When we find it, replace it by real_expr, so that the AST looks + # like no call to _TRANSIENT_FUNCTION ever took place. + for name in node.parent._astroid_fields: + child = getattr(node.parent, name) + if isinstance(child, (list, tuple)): + for idx, compound_child in enumerate(child): + if compound_child is node: + child[idx] = real_expr + elif child is node: + setattr(node.parent, name, real_expr) + yield real_expr + else: + for child in node.get_children(): + for result in _extract_expressions(child): + yield result + + +def _find_statement_by_line(node, line): + """Extracts the statement on a specific line from an AST. + + If the line number of node matches line, it will be returned; + otherwise its children are iterated and the function is called + recursively. + + :param node: An astroid node. + :type node: astroid.bases.NodeNG + :param line: The line number of the statement to extract. + :type line: int + :returns: The statement on the line, or None if no statement for the line + can be found. + :rtype: astroid.bases.NodeNG or None + """ + if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)): + # This is an inaccuracy in the AST: the nodes that can be + # decorated do not carry explicit information on which line + # the actual definition (class/def), but .fromline seems to + # be close enough. + node_line = node.fromlineno + else: + node_line = node.lineno + + if node_line == line: + return node + + for child in node.get_children(): + result = _find_statement_by_line(child, line) + if result: + return result + + return None + +def extract_node(code, module_name=''): + """Parses some Python code as a module and extracts a designated AST node. + + Statements: + To extract one or more statement nodes, append #@ to the end of the line + + Examples: + >>> def x(): + >>> def y(): + >>> return 1 #@ + + The return statement will be extracted. + + >>> class X(object): + >>> def meth(self): #@ + >>> pass + + The funcion object 'meth' will be extracted. + + Expressions: + To extract arbitrary expressions, surround them with the fake + function call __(...). After parsing, the surrounded expression + will be returned and the whole AST (accessible via the returned + node's parent attribute) will look like the function call was + never there in the first place. + + Examples: + >>> a = __(1) + + The const node will be extracted. + + >>> def x(d=__(foo.bar)): pass + + The node containing the default argument will be extracted. + + >>> def foo(a, b): + >>> return 0 < __(len(a)) < b + + The node containing the function call 'len' will be extracted. + + If no statements or expressions are selected, the last toplevel + statement will be returned. + + If the selected statement is a discard statement, (i.e. an expression + turned into a statement), the wrapped expression is returned instead. + + For convenience, singleton lists are unpacked. + + :param str code: A piece of Python code that is parsed as + a module. Will be passed through textwrap.dedent first. + :param str module_name: The name of the module. + :returns: The designated node from the parse tree, or a list of nodes. + :rtype: astroid.bases.NodeNG, or a list of nodes. + """ + def _extract(node): + if isinstance(node, nodes.Expr): + return node.value + else: + return node + + requested_lines = [] + for idx, line in enumerate(code.splitlines()): + if line.strip().endswith(_STATEMENT_SELECTOR): + requested_lines.append(idx + 1) + + tree = builder.parse(code, module_name=module_name) + extracted = [] + if requested_lines: + for line in requested_lines: + extracted.append(_find_statement_by_line(tree, line)) + + # Modifies the tree. + extracted.extend(_extract_expressions(tree)) + + if not extracted: + extracted.append(tree.body[-1]) + + extracted = [_extract(node) for node in extracted] + if len(extracted) == 1: + return extracted[0] + else: + return extracted + + +def require_version(minver=None, maxver=None): + """ Compare version of python interpreter to the given one. Skip the test + if older. + """ + def parse(string, default=None): + string = string or default + try: + return tuple(int(v) for v in string.split('.')) + except ValueError: + raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version) + + def check_require_version(f): + current = sys.version_info[:3] + if parse(minver, "0") < current <= parse(maxver, "4"): + return f + else: + str_version = '.'.join(str(v) for v in sys.version_info) + @functools.wraps(f) + def new_f(self, *args, **kwargs): + if minver is not None: + self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version)) + elif maxver is not None: + self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version)) + return new_f + + + return check_require_version + +def get_name_node(start_from, name, index=0): + return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index] diff --git a/pymode/libs/astroid/tests/__init__.py b/pymode/libs/astroid/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/resources.py b/pymode/libs/astroid/tests/resources.py new file mode 100644 index 00000000..7988d053 --- /dev/null +++ b/pymode/libs/astroid/tests/resources.py @@ -0,0 +1,72 @@ +# Copyright 2014 Google, Inc. All rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import os +import sys + +import pkg_resources + +from astroid import builder +from astroid import MANAGER +from astroid.bases import BUILTINS + + +DATA_DIR = 'testdata/python{}/'.format(sys.version_info[0]) + +def find(name): + return pkg_resources.resource_filename( + 'astroid.tests', os.path.normpath(os.path.join(DATA_DIR, name))) + + +def build_file(path, modname=None): + return builder.AstroidBuilder().file_build(find(path), modname) + + +class SysPathSetup(object): + def setUp(self): + sys.path.insert(0, find('')) + + def tearDown(self): + del sys.path[0] + datadir = find('') + for key in list(sys.path_importer_cache): + if key.startswith(datadir): + del sys.path_importer_cache[key] + + +class AstroidCacheSetupMixin(object): + """Mixin for handling the astroid cache problems. + + When clearing the astroid cache, some tests fails due to + cache inconsistencies, where some objects had a different + builtins object referenced. + This saves the builtins module and makes sure to add it + back to the astroid_cache after the tests finishes. + The builtins module is special, since some of the + transforms for a couple of its objects (str, bytes etc) + are executed only once, so astroid_bootstrapping will be + useless for retrieving the original builtins module. + """ + + @classmethod + def setUpClass(cls): + cls._builtins = MANAGER.astroid_cache.get(BUILTINS) + + @classmethod + def tearDownClass(cls): + if cls._builtins: + MANAGER.astroid_cache[BUILTINS] = cls._builtins diff --git a/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg b/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg new file mode 100644 index 00000000..f62599c7 Binary files /dev/null and b/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg differ diff --git a/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip b/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip new file mode 100644 index 00000000..f62599c7 Binary files /dev/null and b/pymode/libs/astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip differ diff --git a/pymode/libs/astroid/tests/testdata/python2/data/SSL1/Connection1.py b/pymode/libs/astroid/tests/testdata/python2/data/SSL1/Connection1.py new file mode 100644 index 00000000..6bbb1302 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/SSL1/Connection1.py @@ -0,0 +1,14 @@ +"""M2Crypto.SSL.Connection + +Copyright (c) 1999-2004 Ng Pheng Siong. All rights reserved.""" +from __future__ import print_function +RCS_id='$Id: Connection1.py,v 1.1 2005-06-13 20:55:22 syt Exp $' + +#Some code deleted here + +class Connection: + + """An SSL connection.""" + + def __init__(self, ctx, sock=None): + print('init Connection') diff --git a/pymode/libs/astroid/tests/testdata/python2/data/SSL1/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/SSL1/__init__.py new file mode 100644 index 00000000..a007b049 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/SSL1/__init__.py @@ -0,0 +1 @@ +from Connection1 import Connection diff --git a/pymode/libs/astroid/tests/testdata/python2/data/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/__init__.py new file mode 100644 index 00000000..332e2e72 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/__init__.py @@ -0,0 +1 @@ +__revision__="$Id: __init__.py,v 1.1 2005-06-13 20:55:20 syt Exp $" diff --git a/pymode/libs/astroid/tests/testdata/python2/data/absimp/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/absimp/__init__.py new file mode 100644 index 00000000..b98444df --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/absimp/__init__.py @@ -0,0 +1,5 @@ +"""a package with absolute import activated +""" + +from __future__ import absolute_import + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py new file mode 100644 index 00000000..239499a6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py @@ -0,0 +1,3 @@ +"""a side package with nothing in it +""" + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/absimp/string.py b/pymode/libs/astroid/tests/testdata/python2/data/absimp/string.py new file mode 100644 index 00000000..e68e7496 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/absimp/string.py @@ -0,0 +1,3 @@ +from __future__ import absolute_import, print_function +import string +print(string) diff --git a/pymode/libs/astroid/tests/testdata/python2/data/absimport.py b/pymode/libs/astroid/tests/testdata/python2/data/absimport.py new file mode 100644 index 00000000..f98effa6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/absimport.py @@ -0,0 +1,3 @@ +from __future__ import absolute_import +import email +from email import message diff --git a/pymode/libs/astroid/tests/testdata/python2/data/all.py b/pymode/libs/astroid/tests/testdata/python2/data/all.py new file mode 100644 index 00000000..23f7d2b6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/all.py @@ -0,0 +1,9 @@ + +name = 'a' +_bla = 2 +other = 'o' +class Aaa: pass + +def func(): print 'yo' + +__all__ = 'Aaa', '_bla', 'name' diff --git a/pymode/libs/astroid/tests/testdata/python2/data/appl/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/appl/__init__.py new file mode 100644 index 00000000..d652ffd9 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/appl/__init__.py @@ -0,0 +1,3 @@ +""" +Init +""" diff --git a/pymode/libs/astroid/tests/testdata/python2/data/appl/myConnection.py b/pymode/libs/astroid/tests/testdata/python2/data/appl/myConnection.py new file mode 100644 index 00000000..5b24b259 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/appl/myConnection.py @@ -0,0 +1,12 @@ +from __future__ import print_function +from data import SSL1 +class MyConnection(SSL1.Connection): + + """An SSL connection.""" + + def __init__(self, dummy): + print('MyConnection init') + +if __name__ == '__main__': + myConnection = MyConnection(' ') + raw_input('Press Enter to continue...') diff --git a/pymode/libs/astroid/tests/testdata/python2/data/clientmodule_test.py b/pymode/libs/astroid/tests/testdata/python2/data/clientmodule_test.py new file mode 100644 index 00000000..a178ca6d --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/clientmodule_test.py @@ -0,0 +1,32 @@ +""" docstring for file clientmodule.py """ +from data.suppliermodule_test import Interface as IFace, DoNothing + +class Toto: pass + +class Ancestor: + """ Ancestor method """ + __implements__ = (IFace,) + + def __init__(self, value): + local_variable = 0 + self.attr = 'this method shouldn\'t have a docstring' + self.__value = value + + def get_value(self): + """ nice docstring ;-) """ + return self.__value + + def set_value(self, value): + self.__value = value + return 'this method shouldn\'t have a docstring' + +class Specialization(Ancestor): + TYPE = 'final class' + top = 'class' + + def __init__(self, value, _id): + Ancestor.__init__(self, value) + self._id = _id + self.relation = DoNothing() + self.toto = Toto() + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/descriptor_crash.py b/pymode/libs/astroid/tests/testdata/python2/data/descriptor_crash.py new file mode 100644 index 00000000..11fbb4a2 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/descriptor_crash.py @@ -0,0 +1,11 @@ + +import urllib + +class Page(object): + _urlOpen = staticmethod(urllib.urlopen) + + def getPage(self, url): + handle = self._urlOpen(url) + data = handle.read() + handle.close() + return data diff --git a/pymode/libs/astroid/tests/testdata/python2/data/email.py b/pymode/libs/astroid/tests/testdata/python2/data/email.py new file mode 100644 index 00000000..dc593564 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/email.py @@ -0,0 +1 @@ +"""fake email module to test absolute import doesn't grab this one""" diff --git a/pymode/libs/astroid/tests/testdata/python2/data/find_test/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/find_test/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python2/data/find_test/module.py b/pymode/libs/astroid/tests/testdata/python2/data/find_test/module.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python2/data/find_test/module2.py b/pymode/libs/astroid/tests/testdata/python2/data/find_test/module2.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python2/data/find_test/noendingnewline.py b/pymode/libs/astroid/tests/testdata/python2/data/find_test/noendingnewline.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python2/data/find_test/nonregr.py b/pymode/libs/astroid/tests/testdata/python2/data/find_test/nonregr.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python2/data/format.py b/pymode/libs/astroid/tests/testdata/python2/data/format.py new file mode 100644 index 00000000..73797061 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/format.py @@ -0,0 +1,34 @@ +"""A multiline string +""" + +function('aeozrijz\ +earzer', hop) +# XXX write test +x = [i for i in range(5) + if i % 4] + +fonction(1, + 2, + 3, + 4) + +def definition(a, + b, + c): + return a + b + c + +class debile(dict, + object): + pass + +if aaaa: pass +else: + aaaa,bbbb = 1,2 + aaaa,bbbb = bbbb,aaaa +# XXX write test +hop = \ + aaaa + + +__revision__.lower(); + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/joined_strings.py b/pymode/libs/astroid/tests/testdata/python2/data/joined_strings.py new file mode 100644 index 00000000..302e7cd7 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/joined_strings.py @@ -0,0 +1,1051 @@ +x = ('R0lGODlhigJnAef/AAABAAEEAAkCAAMGAg0GBAYJBQoMCBMODQ4QDRITEBkS' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7') \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python2/data/lmfp/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/lmfp/__init__.py new file mode 100644 index 00000000..74b26b82 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/lmfp/__init__.py @@ -0,0 +1,2 @@ +# force a "direct" python import +from . import foo diff --git a/pymode/libs/astroid/tests/testdata/python2/data/lmfp/foo.py b/pymode/libs/astroid/tests/testdata/python2/data/lmfp/foo.py new file mode 100644 index 00000000..8f7de1e8 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/lmfp/foo.py @@ -0,0 +1,6 @@ +import sys +if not getattr(sys, 'bar', None): + sys.just_once = [] +# there used to be two numbers here because +# of a load_module_from_path bug +sys.just_once.append(42) diff --git a/pymode/libs/astroid/tests/testdata/python2/data/module.py b/pymode/libs/astroid/tests/testdata/python2/data/module.py new file mode 100644 index 00000000..6a67b9b6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/module.py @@ -0,0 +1,89 @@ +"""test module for astroid +""" + +__revision__ = '$Id: module.py,v 1.2 2005-11-02 11:56:54 syt Exp $' +from astroid.node_classes import Name as NameNode +from astroid import modutils +from astroid.utils import * +import os.path +MY_DICT = {} + +def global_access(key, val): + """function test""" + local = 1 + MY_DICT[key] = val + for i in val: + if i: + del MY_DICT[i] + continue + else: + break + else: + return local + + +class YO: + """hehe""" + a = 1 + + def __init__(self): + try: + self.yo = 1 + except ValueError, ex: + pass + except (NameError, TypeError): + raise XXXError() + except: + raise + + + +class YOUPI(YO): + class_attr = None + + def __init__(self): + self.member = None + + def method(self): + """method test""" + global MY_DICT + try: + MY_DICT = {} + local = None + autre = [a for (a, b) in MY_DICT if b] + if b in autre: + return b + else: + if a in autre: + return a + global_access(local, val=autre) + finally: + return local + + def static_method(): + """static method test""" + assert MY_DICT, '???' + static_method = staticmethod(static_method) + + def class_method(cls): + """class method test""" + exec a in b + class_method = classmethod(class_method) + + +def four_args(a, b, c, d): + """four arguments (was nested_args)""" + pass + while 1: + if a: + break + a += +1 + else: + b += -2 + if c: + d = ((a) and (b)) or (c) + else: + c = ((a) and (b)) or (d) + map(lambda x, y: (y, x), a) +redirect = four_args + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/module1abs/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/module1abs/__init__.py new file mode 100644 index 00000000..42949a44 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/module1abs/__init__.py @@ -0,0 +1,4 @@ +from __future__ import absolute_import, print_function +from . import core +from .core import * +print(sys.version) diff --git a/pymode/libs/astroid/tests/testdata/python2/data/module1abs/core.py b/pymode/libs/astroid/tests/testdata/python2/data/module1abs/core.py new file mode 100644 index 00000000..de101117 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/module1abs/core.py @@ -0,0 +1 @@ +import sys diff --git a/pymode/libs/astroid/tests/testdata/python2/data/module2.py b/pymode/libs/astroid/tests/testdata/python2/data/module2.py new file mode 100644 index 00000000..0a1bd1ad --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/module2.py @@ -0,0 +1,143 @@ +from data.module import YO, YOUPI +import data + + +class Specialization(YOUPI, YO): + pass + + + +class Metaclass(type): + pass + + + +class Interface: + pass + + + +class MyIFace(Interface): + pass + + + +class AnotherIFace(Interface): + pass + + + +class MyException(Exception): + pass + + + +class MyError(MyException): + pass + + + +class AbstractClass(object): + + def to_override(self, whatever): + raise NotImplementedError() + + def return_something(self, param): + if param: + return 'toto' + return + + + +class Concrete0: + __implements__ = MyIFace + + + +class Concrete1: + __implements__ = (MyIFace, AnotherIFace) + + + +class Concrete2: + __implements__ = (MyIFace, AnotherIFace) + + + +class Concrete23(Concrete1): + pass + +del YO.member +del YO +[SYN1, SYN2] = (Concrete0, Concrete1) +assert '1' +b = (1) | (((2) & (3)) ^ (8)) +bb = ((1) | (two)) | (6) +ccc = ((one) & (two)) & (three) +dddd = ((x) ^ (o)) ^ (r) +exec 'c = 3' +exec 'c = 3' in {}, {} + +def raise_string(a=2, *args, **kwargs): + raise Exception, 'yo' + yield 'coucou' + yield +a = (b) + (2) +c = (b) * (2) +c = (b) / (2) +c = (b) // (2) +c = (b) - (2) +c = (b) % (2) +c = (b) ** (2) +c = (b) << (2) +c = (b) >> (2) +c = ~b +c = not b +d = [c] +e = d[:] +e = d[a:b:c] +raise_string(*args, **kwargs) +print >> stream, 'bonjour' +print >> stream, 'salut', + +def make_class(any, base=data.module.YO, *args, **kwargs): + """check base is correctly resolved to Concrete0""" + + + class Aaaa(base): + """dynamic class""" + + + return Aaaa +from os.path import abspath +import os as myos + + +class A: + pass + + + +class A(A): + pass + + +def generator(): + """A generator.""" + yield + +def not_a_generator(): + """A function that contains generator, but is not one.""" + + def generator(): + yield + genl = lambda : (yield) + +def with_metaclass(meta, *bases): + return meta('NewBase', bases, {}) + + +class NotMetaclass(with_metaclass(Metaclass)): + pass + + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/noendingnewline.py b/pymode/libs/astroid/tests/testdata/python2/data/noendingnewline.py new file mode 100644 index 00000000..e1d6e4a1 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/noendingnewline.py @@ -0,0 +1,36 @@ +import unittest + + +class TestCase(unittest.TestCase): + + def setUp(self): + unittest.TestCase.setUp(self) + + + def tearDown(self): + unittest.TestCase.tearDown(self) + + def testIt(self): + self.a = 10 + self.xxx() + + + def xxx(self): + if False: + pass + print 'a' + + if False: + pass + pass + + if False: + pass + print 'rara' + + +if __name__ == '__main__': + print 'test2' + unittest.main() + + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/nonregr.py b/pymode/libs/astroid/tests/testdata/python2/data/nonregr.py new file mode 100644 index 00000000..813469fe --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/nonregr.py @@ -0,0 +1,57 @@ +from __future__ import generators, print_function + +try: + enumerate = enumerate +except NameError: + + def enumerate(iterable): + """emulates the python2.3 enumerate() function""" + i = 0 + for val in iterable: + yield i, val + i += 1 + +def toto(value): + for k, v in value: + print(v.get('yo')) + + +import imp +fp, mpath, desc = imp.find_module('optparse',a) +s_opt = imp.load_module('std_optparse', fp, mpath, desc) + +class OptionParser(s_opt.OptionParser): + + def parse_args(self, args=None, values=None, real_optparse=False): + if real_optparse: + pass +## return super(OptionParser, self).parse_args() + else: + import optcomp + optcomp.completion(self) + + +class Aaa(object): + """docstring""" + def __init__(self): + self.__setattr__('a','b') + pass + + def one_public(self): + """docstring""" + pass + + def another_public(self): + """docstring""" + pass + +class Ccc(Aaa): + """docstring""" + + class Ddd(Aaa): + """docstring""" + pass + + class Eee(Ddd): + """docstring""" + pass diff --git a/pymode/libs/astroid/tests/testdata/python2/data/notall.py b/pymode/libs/astroid/tests/testdata/python2/data/notall.py new file mode 100644 index 00000000..7be27b18 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/notall.py @@ -0,0 +1,7 @@ +name = 'a' +_bla = 2 +other = 'o' +class Aaa: pass + +def func(): print('yo') + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/package/__init__.py new file mode 100644 index 00000000..575d18b1 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/__init__.py @@ -0,0 +1,4 @@ +"""package's __init__ file""" + + +from . import subpackage diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/absimport.py b/pymode/libs/astroid/tests/testdata/python2/data/package/absimport.py new file mode 100644 index 00000000..33ed117c --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/absimport.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import, print_function +import import_package_subpackage_module # fail +print(import_package_subpackage_module) + +from . import hello as hola + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/hello.py b/pymode/libs/astroid/tests/testdata/python2/data/package/hello.py new file mode 100644 index 00000000..b154c844 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/hello.py @@ -0,0 +1,2 @@ +"""hello module""" + diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py b/pymode/libs/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py new file mode 100644 index 00000000..ad442c16 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py @@ -0,0 +1,49 @@ +# pylint: disable-msg=I0011,C0301,W0611 +"""I found some of my scripts trigger off an AttributeError in pylint +0.8.1 (with common 0.12.0 and astroid 0.13.1). + +Traceback (most recent call last): + File "/usr/bin/pylint", line 4, in ? + lint.Run(sys.argv[1:]) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 729, in __init__ + linter.check(args) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 412, in check + self.check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 426, in check_file + astroid = self._check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 450, in _check_file + self.check_astroid_module(astroid, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 494, in check_astroid_module + self.astroid_events(astroid, [checker for checker in checkers + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 508, in astroid_events + checker.visit(astroid) + File "/usr/lib/python2.4/site-packages/logilab/astroid/utils.py", line 84, in visit + method(node) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 295, in visit_import + self._check_module_attrs(node, module, name_parts[1:]) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 357, in _check_module_attrs + self.add_message('E0611', args=(name, module.name), +AttributeError: Import instance has no attribute 'name' + + +You can reproduce it by: +(1) create package structure like the following: + +package/ + __init__.py + subpackage/ + __init__.py + module.py + +(2) in package/__init__.py write: + +import subpackage + +(3) run pylint with a script importing package.subpackage.module. +""" +__revision__ = '$Id: import_package_subpackage_module.py,v 1.1 2005-11-10 15:59:32 syt Exp $' +import package.subpackage.module diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/__init__.py new file mode 100644 index 00000000..dc4782e6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/__init__.py @@ -0,0 +1 @@ +"""package.subpackage""" diff --git a/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/module.py b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/module.py new file mode 100644 index 00000000..4b7244ba --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/package/subpackage/module.py @@ -0,0 +1 @@ +"""package.subpackage.module""" diff --git a/pymode/libs/astroid/tests/testdata/python2/data/recursion.py b/pymode/libs/astroid/tests/testdata/python2/data/recursion.py new file mode 100644 index 00000000..85f65134 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/recursion.py @@ -0,0 +1,3 @@ +""" For issue #25 """ +class Base(object): + pass \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python2/data/suppliermodule_test.py b/pymode/libs/astroid/tests/testdata/python2/data/suppliermodule_test.py new file mode 100644 index 00000000..ddacb477 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/suppliermodule_test.py @@ -0,0 +1,13 @@ +""" file suppliermodule.py """ + +class NotImplemented(Exception): + pass + +class Interface: + def get_value(self): + raise NotImplemented() + + def set_value(self, value): + raise NotImplemented() + +class DoNothing : pass diff --git a/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/__init__.py new file mode 100644 index 00000000..713e5591 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/__init__.py @@ -0,0 +1 @@ +x = "șțîâ" \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/core/__init__.py b/pymode/libs/astroid/tests/testdata/python2/data/unicode_package/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg b/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg new file mode 100644 index 00000000..f62599c7 Binary files /dev/null and b/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg differ diff --git a/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip b/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip new file mode 100644 index 00000000..f62599c7 Binary files /dev/null and b/pymode/libs/astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip differ diff --git a/pymode/libs/astroid/tests/testdata/python3/data/SSL1/Connection1.py b/pymode/libs/astroid/tests/testdata/python3/data/SSL1/Connection1.py new file mode 100644 index 00000000..7373271d --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/SSL1/Connection1.py @@ -0,0 +1,14 @@ +"""M2Crypto.SSL.Connection + +Copyright (c) 1999-2004 Ng Pheng Siong. All rights reserved.""" + +RCS_id='$Id: Connection1.py,v 1.1 2005-06-13 20:55:22 syt Exp $' + +#Some code deleted here + +class Connection: + + """An SSL connection.""" + + def __init__(self, ctx, sock=None): + print('init Connection') diff --git a/pymode/libs/astroid/tests/testdata/python3/data/SSL1/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/SSL1/__init__.py new file mode 100644 index 00000000..c83ededc --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/SSL1/__init__.py @@ -0,0 +1 @@ +from .Connection1 import Connection diff --git a/pymode/libs/astroid/tests/testdata/python3/data/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/__init__.py new file mode 100644 index 00000000..332e2e72 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/__init__.py @@ -0,0 +1 @@ +__revision__="$Id: __init__.py,v 1.1 2005-06-13 20:55:20 syt Exp $" diff --git a/pymode/libs/astroid/tests/testdata/python3/data/absimp/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/absimp/__init__.py new file mode 100644 index 00000000..b98444df --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/absimp/__init__.py @@ -0,0 +1,5 @@ +"""a package with absolute import activated +""" + +from __future__ import absolute_import + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py new file mode 100644 index 00000000..239499a6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py @@ -0,0 +1,3 @@ +"""a side package with nothing in it +""" + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/absimp/string.py b/pymode/libs/astroid/tests/testdata/python3/data/absimp/string.py new file mode 100644 index 00000000..e68e7496 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/absimp/string.py @@ -0,0 +1,3 @@ +from __future__ import absolute_import, print_function +import string +print(string) diff --git a/pymode/libs/astroid/tests/testdata/python3/data/absimport.py b/pymode/libs/astroid/tests/testdata/python3/data/absimport.py new file mode 100644 index 00000000..88f9d955 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/absimport.py @@ -0,0 +1,3 @@ + +import email +from email import message diff --git a/pymode/libs/astroid/tests/testdata/python3/data/all.py b/pymode/libs/astroid/tests/testdata/python3/data/all.py new file mode 100644 index 00000000..587765b5 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/all.py @@ -0,0 +1,9 @@ + +name = 'a' +_bla = 2 +other = 'o' +class Aaa: pass + +def func(): print('yo') + +__all__ = 'Aaa', '_bla', 'name' diff --git a/pymode/libs/astroid/tests/testdata/python3/data/appl/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/appl/__init__.py new file mode 100644 index 00000000..d652ffd9 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/appl/__init__.py @@ -0,0 +1,3 @@ +""" +Init +""" diff --git a/pymode/libs/astroid/tests/testdata/python3/data/appl/myConnection.py b/pymode/libs/astroid/tests/testdata/python3/data/appl/myConnection.py new file mode 100644 index 00000000..49269534 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/appl/myConnection.py @@ -0,0 +1,11 @@ +from data import SSL1 +class MyConnection(SSL1.Connection): + + """An SSL connection.""" + + def __init__(self, dummy): + print('MyConnection init') + +if __name__ == '__main__': + myConnection = MyConnection(' ') + input('Press Enter to continue...') diff --git a/pymode/libs/astroid/tests/testdata/python3/data/clientmodule_test.py b/pymode/libs/astroid/tests/testdata/python3/data/clientmodule_test.py new file mode 100644 index 00000000..a178ca6d --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/clientmodule_test.py @@ -0,0 +1,32 @@ +""" docstring for file clientmodule.py """ +from data.suppliermodule_test import Interface as IFace, DoNothing + +class Toto: pass + +class Ancestor: + """ Ancestor method """ + __implements__ = (IFace,) + + def __init__(self, value): + local_variable = 0 + self.attr = 'this method shouldn\'t have a docstring' + self.__value = value + + def get_value(self): + """ nice docstring ;-) """ + return self.__value + + def set_value(self, value): + self.__value = value + return 'this method shouldn\'t have a docstring' + +class Specialization(Ancestor): + TYPE = 'final class' + top = 'class' + + def __init__(self, value, _id): + Ancestor.__init__(self, value) + self._id = _id + self.relation = DoNothing() + self.toto = Toto() + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/descriptor_crash.py b/pymode/libs/astroid/tests/testdata/python3/data/descriptor_crash.py new file mode 100644 index 00000000..11fbb4a2 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/descriptor_crash.py @@ -0,0 +1,11 @@ + +import urllib + +class Page(object): + _urlOpen = staticmethod(urllib.urlopen) + + def getPage(self, url): + handle = self._urlOpen(url) + data = handle.read() + handle.close() + return data diff --git a/pymode/libs/astroid/tests/testdata/python3/data/email.py b/pymode/libs/astroid/tests/testdata/python3/data/email.py new file mode 100644 index 00000000..dc593564 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/email.py @@ -0,0 +1 @@ +"""fake email module to test absolute import doesn't grab this one""" diff --git a/pymode/libs/astroid/tests/testdata/python3/data/find_test/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/find_test/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python3/data/find_test/module.py b/pymode/libs/astroid/tests/testdata/python3/data/find_test/module.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python3/data/find_test/module2.py b/pymode/libs/astroid/tests/testdata/python3/data/find_test/module2.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python3/data/find_test/noendingnewline.py b/pymode/libs/astroid/tests/testdata/python3/data/find_test/noendingnewline.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python3/data/find_test/nonregr.py b/pymode/libs/astroid/tests/testdata/python3/data/find_test/nonregr.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/testdata/python3/data/format.py b/pymode/libs/astroid/tests/testdata/python3/data/format.py new file mode 100644 index 00000000..73797061 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/format.py @@ -0,0 +1,34 @@ +"""A multiline string +""" + +function('aeozrijz\ +earzer', hop) +# XXX write test +x = [i for i in range(5) + if i % 4] + +fonction(1, + 2, + 3, + 4) + +def definition(a, + b, + c): + return a + b + c + +class debile(dict, + object): + pass + +if aaaa: pass +else: + aaaa,bbbb = 1,2 + aaaa,bbbb = bbbb,aaaa +# XXX write test +hop = \ + aaaa + + +__revision__.lower(); + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/joined_strings.py b/pymode/libs/astroid/tests/testdata/python3/data/joined_strings.py new file mode 100644 index 00000000..302e7cd7 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/joined_strings.py @@ -0,0 +1,1051 @@ +x = ('R0lGODlhigJnAef/AAABAAEEAAkCAAMGAg0GBAYJBQoMCBMODQ4QDRITEBkS' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7' + +'CxsSEhkWDhYYFQ0aJhkaGBweGyccGh8hHiIkIiMmGTEiHhQoPSYoJSkqKDcp' + +'Ii0uLDAxLzI0Mh44U0gxMDI5JkM0JjU3NDY6Kjc5Njo7OUE8Ozw+Oz89QTxA' + +'F1akOFFiRIgPHTZksKBAgMCLGTdGNIAAQgKfDAcgZbj0odOnUA8GBAA7') \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python3/data/lmfp/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/lmfp/__init__.py new file mode 100644 index 00000000..74b26b82 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/lmfp/__init__.py @@ -0,0 +1,2 @@ +# force a "direct" python import +from . import foo diff --git a/pymode/libs/astroid/tests/testdata/python3/data/lmfp/foo.py b/pymode/libs/astroid/tests/testdata/python3/data/lmfp/foo.py new file mode 100644 index 00000000..8f7de1e8 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/lmfp/foo.py @@ -0,0 +1,6 @@ +import sys +if not getattr(sys, 'bar', None): + sys.just_once = [] +# there used to be two numbers here because +# of a load_module_from_path bug +sys.just_once.append(42) diff --git a/pymode/libs/astroid/tests/testdata/python3/data/module.py b/pymode/libs/astroid/tests/testdata/python3/data/module.py new file mode 100644 index 00000000..2a5fb58c --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/module.py @@ -0,0 +1,88 @@ +"""test module for astroid +""" + +__revision__ = '$Id: module.py,v 1.2 2005-11-02 11:56:54 syt Exp $' +from astroid.node_classes import Name as NameNode +from astroid import modutils +from astroid.utils import * +import os.path +MY_DICT = {} + +def global_access(key, val): + """function test""" + local = 1 + MY_DICT[key] = val + for i in val: + if i: + del MY_DICT[i] + continue + else: + break + else: + return + + +class YO: + """hehe""" + a = 1 + + def __init__(self): + try: + self.yo = 1 + except ValueError as ex: + pass + except (NameError, TypeError): + raise XXXError() + except: + raise + + + +class YOUPI(YO): + class_attr = None + + def __init__(self): + self.member = None + + def method(self): + """method test""" + global MY_DICT + try: + MY_DICT = {} + local = None + autre = [a for (a, b) in MY_DICT if b] + if b in autre: + return + else: + if a in autre: + return 'hehe' + global_access(local, val=autre) + finally: + return local + + def static_method(): + """static method test""" + assert MY_DICT, '???' + static_method = staticmethod(static_method) + + def class_method(cls): + """class method test""" + exec(a, b) + class_method = classmethod(class_method) + + +def four_args(a, b, c, d): + """four arguments (was nested_args)""" + while 1: + if a: + break + a += +1 + else: + b += -2 + if c: + d = ((a) and (b)) or (c) + else: + c = ((a) and (b)) or (d) + list(map(lambda x, y: (y, x), a)) +redirect = four_args + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/module1abs/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/module1abs/__init__.py new file mode 100644 index 00000000..f9d5b686 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/module1abs/__init__.py @@ -0,0 +1,4 @@ + +from . import core +from .core import * +print(sys.version) diff --git a/pymode/libs/astroid/tests/testdata/python3/data/module1abs/core.py b/pymode/libs/astroid/tests/testdata/python3/data/module1abs/core.py new file mode 100644 index 00000000..de101117 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/module1abs/core.py @@ -0,0 +1 @@ +import sys diff --git a/pymode/libs/astroid/tests/testdata/python3/data/module2.py b/pymode/libs/astroid/tests/testdata/python3/data/module2.py new file mode 100644 index 00000000..1171bdfe --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/module2.py @@ -0,0 +1,143 @@ +from data.module import YO, YOUPI +import data + + +class Specialization(YOUPI, YO): + pass + + + +class Metaclass(type): + pass + + + +class Interface: + pass + + + +class MyIFace(Interface): + pass + + + +class AnotherIFace(Interface): + pass + + + +class MyException(Exception): + pass + + + +class MyError(MyException): + pass + + + +class AbstractClass(object): + + def to_override(self, whatever): + raise NotImplementedError() + + def return_something(self, param): + if param: + return 'toto' + return + + + +class Concrete0: + __implements__ = MyIFace + + + +class Concrete1: + __implements__ = (MyIFace, AnotherIFace) + + + +class Concrete2: + __implements__ = (MyIFace, AnotherIFace) + + + +class Concrete23(Concrete1): + pass + +del YO.member +del YO +[SYN1, SYN2] = (Concrete0, Concrete1) +assert repr(1) +b = (1) | (((2) & (3)) ^ (8)) +bb = ((1) | (two)) | (6) +ccc = ((one) & (two)) & (three) +dddd = ((x) ^ (o)) ^ (r) +exec('c = 3') +exec('c = 3', {}, {}) + +def raise_string(a=2, *args, **kwargs): + raise Exception('yo') + yield 'coucou' + yield +a = (b) + (2) +c = (b) * (2) +c = (b) / (2) +c = (b) // (2) +c = (b) - (2) +c = (b) % (2) +c = (b) ** (2) +c = (b) << (2) +c = (b) >> (2) +c = ~b +c = not b +d = [c] +e = d[:] +e = d[a:b:c] +raise_string(*args, **kwargs) +print('bonjour', file=stream) +print('salut', end=' ', file=stream) + +def make_class(any, base=data.module.YO, *args, **kwargs): + """check base is correctly resolved to Concrete0""" + + + class Aaaa(base): + """dynamic class""" + + + return Aaaa +from os.path import abspath +import os as myos + + +class A: + pass + + + +class A(A): + pass + + +def generator(): + """A generator.""" + yield + +def not_a_generator(): + """A function that contains generator, but is not one.""" + + def generator(): + yield + genl = lambda : (yield) + +def with_metaclass(meta, *bases): + return meta('NewBase', bases, {}) + + +class NotMetaclass(with_metaclass(Metaclass)): + pass + + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/noendingnewline.py b/pymode/libs/astroid/tests/testdata/python3/data/noendingnewline.py new file mode 100644 index 00000000..e17b92cc --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/noendingnewline.py @@ -0,0 +1,36 @@ +import unittest + + +class TestCase(unittest.TestCase): + + def setUp(self): + unittest.TestCase.setUp(self) + + + def tearDown(self): + unittest.TestCase.tearDown(self) + + def testIt(self): + self.a = 10 + self.xxx() + + + def xxx(self): + if False: + pass + print('a') + + if False: + pass + pass + + if False: + pass + print('rara') + + +if __name__ == '__main__': + print('test2') + unittest.main() + + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/nonregr.py b/pymode/libs/astroid/tests/testdata/python3/data/nonregr.py new file mode 100644 index 00000000..78765c85 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/nonregr.py @@ -0,0 +1,57 @@ + + +try: + enumerate = enumerate +except NameError: + + def enumerate(iterable): + """emulates the python2.3 enumerate() function""" + i = 0 + for val in iterable: + yield i, val + i += 1 + +def toto(value): + for k, v in value: + print(v.get('yo')) + + +import imp +fp, mpath, desc = imp.find_module('optparse',a) +s_opt = imp.load_module('std_optparse', fp, mpath, desc) + +class OptionParser(s_opt.OptionParser): + + def parse_args(self, args=None, values=None, real_optparse=False): + if real_optparse: + pass +## return super(OptionParser, self).parse_args() + else: + import optcomp + optcomp.completion(self) + + +class Aaa(object): + """docstring""" + def __init__(self): + self.__setattr__('a','b') + pass + + def one_public(self): + """docstring""" + pass + + def another_public(self): + """docstring""" + pass + +class Ccc(Aaa): + """docstring""" + + class Ddd(Aaa): + """docstring""" + pass + + class Eee(Ddd): + """docstring""" + pass diff --git a/pymode/libs/astroid/tests/testdata/python3/data/notall.py b/pymode/libs/astroid/tests/testdata/python3/data/notall.py new file mode 100644 index 00000000..9d35aa3a --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/notall.py @@ -0,0 +1,8 @@ + +name = 'a' +_bla = 2 +other = 'o' +class Aaa: pass + +def func(): print('yo') + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/package/__init__.py new file mode 100644 index 00000000..575d18b1 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/__init__.py @@ -0,0 +1,4 @@ +"""package's __init__ file""" + + +from . import subpackage diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/absimport.py b/pymode/libs/astroid/tests/testdata/python3/data/package/absimport.py new file mode 100644 index 00000000..33ed117c --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/absimport.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import, print_function +import import_package_subpackage_module # fail +print(import_package_subpackage_module) + +from . import hello as hola + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/hello.py b/pymode/libs/astroid/tests/testdata/python3/data/package/hello.py new file mode 100644 index 00000000..b154c844 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/hello.py @@ -0,0 +1,2 @@ +"""hello module""" + diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py b/pymode/libs/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py new file mode 100644 index 00000000..ad442c16 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py @@ -0,0 +1,49 @@ +# pylint: disable-msg=I0011,C0301,W0611 +"""I found some of my scripts trigger off an AttributeError in pylint +0.8.1 (with common 0.12.0 and astroid 0.13.1). + +Traceback (most recent call last): + File "/usr/bin/pylint", line 4, in ? + lint.Run(sys.argv[1:]) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 729, in __init__ + linter.check(args) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 412, in check + self.check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 426, in check_file + astroid = self._check_file(filepath, modname, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 450, in _check_file + self.check_astroid_module(astroid, checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 494, in check_astroid_module + self.astroid_events(astroid, [checker for checker in checkers + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 511, in astroid_events + self.astroid_events(child, checkers, _reversed_checkers) + File "/usr/lib/python2.4/site-packages/pylint/lint.py", line 508, in astroid_events + checker.visit(astroid) + File "/usr/lib/python2.4/site-packages/logilab/astroid/utils.py", line 84, in visit + method(node) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 295, in visit_import + self._check_module_attrs(node, module, name_parts[1:]) + File "/usr/lib/python2.4/site-packages/pylint/checkers/variables.py", line 357, in _check_module_attrs + self.add_message('E0611', args=(name, module.name), +AttributeError: Import instance has no attribute 'name' + + +You can reproduce it by: +(1) create package structure like the following: + +package/ + __init__.py + subpackage/ + __init__.py + module.py + +(2) in package/__init__.py write: + +import subpackage + +(3) run pylint with a script importing package.subpackage.module. +""" +__revision__ = '$Id: import_package_subpackage_module.py,v 1.1 2005-11-10 15:59:32 syt Exp $' +import package.subpackage.module diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/__init__.py new file mode 100644 index 00000000..dc4782e6 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/__init__.py @@ -0,0 +1 @@ +"""package.subpackage""" diff --git a/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/module.py b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/module.py new file mode 100644 index 00000000..4b7244ba --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/package/subpackage/module.py @@ -0,0 +1 @@ +"""package.subpackage.module""" diff --git a/pymode/libs/astroid/tests/testdata/python3/data/recursion.py b/pymode/libs/astroid/tests/testdata/python3/data/recursion.py new file mode 100644 index 00000000..85f65134 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/recursion.py @@ -0,0 +1,3 @@ +""" For issue #25 """ +class Base(object): + pass \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python3/data/suppliermodule_test.py b/pymode/libs/astroid/tests/testdata/python3/data/suppliermodule_test.py new file mode 100644 index 00000000..ddacb477 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/suppliermodule_test.py @@ -0,0 +1,13 @@ +""" file suppliermodule.py """ + +class NotImplemented(Exception): + pass + +class Interface: + def get_value(self): + raise NotImplemented() + + def set_value(self, value): + raise NotImplemented() + +class DoNothing : pass diff --git a/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/__init__.py new file mode 100644 index 00000000..713e5591 --- /dev/null +++ b/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/__init__.py @@ -0,0 +1 @@ +x = "șțîâ" \ No newline at end of file diff --git a/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/core/__init__.py b/pymode/libs/astroid/tests/testdata/python3/data/unicode_package/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pymode/libs/astroid/tests/unittest_brain.py b/pymode/libs/astroid/tests/unittest_brain.py new file mode 100644 index 00000000..9dbbe1d0 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_brain.py @@ -0,0 +1,506 @@ +# Copyright 2013 Google Inc. All Rights Reserved. +# +# This file is part of astroid. +# +# logilab-astng is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# logilab-astng is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with logilab-astng. If not, see . +"""Tests for basic functionality in astroid.brain.""" +import sys +import unittest + +import six + +from astroid import MANAGER +from astroid import bases +from astroid import builder +from astroid import nodes +from astroid import test_utils +from astroid import util +import astroid + + +try: + import nose # pylint: disable=unused-import + HAS_NOSE = True +except ImportError: + HAS_NOSE = False + +try: + import multiprocessing # pylint: disable=unused-import + HAS_MULTIPROCESSING = True +except ImportError: + HAS_MULTIPROCESSING = False + +try: + import enum # pylint: disable=unused-import + HAS_ENUM = True +except ImportError: + HAS_ENUM = False + +try: + import dateutil # pylint: disable=unused-import + HAS_DATEUTIL = True +except ImportError: + HAS_DATEUTIL = False + +try: + import numpy # pylint: disable=unused-import + HAS_NUMPY = True +except ImportError: + HAS_NUMPY = False + +try: + import pytest # pylint: disable=unused-import + HAS_PYTEST = True +except ImportError: + HAS_PYTEST = False + + +class HashlibTest(unittest.TestCase): + def test_hashlib(self): + """Tests that brain extensions for hashlib work.""" + hashlib_module = MANAGER.ast_from_module_name('hashlib') + for class_name in ['md5', 'sha1']: + class_obj = hashlib_module[class_name] + self.assertIn('update', class_obj) + self.assertIn('digest', class_obj) + self.assertIn('hexdigest', class_obj) + self.assertIn('block_size', class_obj) + self.assertIn('digest_size', class_obj) + self.assertEqual(len(class_obj['__init__'].args.args), 2) + self.assertEqual(len(class_obj['__init__'].args.defaults), 1) + self.assertEqual(len(class_obj['update'].args.args), 2) + self.assertEqual(len(class_obj['digest'].args.args), 1) + self.assertEqual(len(class_obj['hexdigest'].args.args), 1) + + +class NamedTupleTest(unittest.TestCase): + + def test_namedtuple_base(self): + klass = test_utils.extract_node(""" + from collections import namedtuple + + class X(namedtuple("X", ["a", "b", "c"])): + pass + """) + self.assertEqual( + [anc.name for anc in klass.ancestors()], + ['X', 'tuple', 'object']) + for anc in klass.ancestors(): + self.assertFalse(anc.parent is None) + + def test_namedtuple_inference(self): + klass = test_utils.extract_node(""" + from collections import namedtuple + + name = "X" + fields = ["a", "b", "c"] + class X(namedtuple(name, fields)): + pass + """) + for base in klass.ancestors(): + if base.name == 'X': + break + self.assertSetEqual({"a", "b", "c"}, set(base._instance_attrs)) + + def test_namedtuple_inference_failure(self): + klass = test_utils.extract_node(""" + from collections import namedtuple + + def foo(fields): + return __(namedtuple("foo", fields)) + """) + self.assertIs(util.YES, next(klass.infer())) + + @unittest.skipIf(sys.version_info[0] > 2, + 'namedtuple inference is broken on Python 3') + def test_namedtuple_advanced_inference(self): + # urlparse return an object of class ParseResult, which has a + # namedtuple call and a mixin as base classes + result = test_utils.extract_node(""" + import urlparse + + result = __(urlparse.urlparse('gopher://')) + """) + instance = next(result.infer()) + self.assertEqual(len(instance.getattr('scheme')), 1) + self.assertEqual(len(instance.getattr('port')), 1) + with self.assertRaises(astroid.NotFoundError): + instance.getattr('foo') + self.assertEqual(len(instance.getattr('geturl')), 1) + self.assertEqual(instance.name, 'ParseResult') + + def test_namedtuple_instance_attrs(self): + result = test_utils.extract_node(''' + from collections import namedtuple + namedtuple('a', 'a b c')(1, 2, 3) #@ + ''') + inferred = next(result.infer()) + for name, attr in inferred._instance_attrs.items(): + self.assertEqual(attr[0].attrname, name) + + def test_namedtuple_uninferable_fields(self): + node = test_utils.extract_node(''' + x = [A] * 2 + from collections import namedtuple + l = namedtuple('a', x) + l(1) + ''') + inferred = next(node.infer()) + self.assertIs(util.YES, inferred) + + +class ModuleExtenderTest(unittest.TestCase): + def testExtensionModules(self): + transformer = MANAGER._transform + for extender, _ in transformer.transforms[nodes.Module]: + n = nodes.Module('__main__', None) + extender(n) + + +@unittest.skipUnless(HAS_NOSE, "This test requires nose library.") +class NoseBrainTest(unittest.TestCase): + + def test_nose_tools(self): + methods = test_utils.extract_node(""" + from nose.tools import assert_equal + from nose.tools import assert_equals + from nose.tools import assert_true + assert_equal = assert_equal #@ + assert_true = assert_true #@ + assert_equals = assert_equals #@ + """) + assert_equal = next(methods[0].value.infer()) + assert_true = next(methods[1].value.infer()) + assert_equals = next(methods[2].value.infer()) + + self.assertIsInstance(assert_equal, astroid.BoundMethod) + self.assertIsInstance(assert_true, astroid.BoundMethod) + self.assertIsInstance(assert_equals, astroid.BoundMethod) + self.assertEqual(assert_equal.qname(), + 'unittest.case.TestCase.assertEqual') + self.assertEqual(assert_true.qname(), + 'unittest.case.TestCase.assertTrue') + self.assertEqual(assert_equals.qname(), + 'unittest.case.TestCase.assertEqual') + + +class SixBrainTest(unittest.TestCase): + + def test_attribute_access(self): + ast_nodes = test_utils.extract_node(''' + import six + six.moves.http_client #@ + six.moves.urllib_parse #@ + six.moves.urllib_error #@ + six.moves.urllib.request #@ + ''') + http_client = next(ast_nodes[0].infer()) + self.assertIsInstance(http_client, nodes.Module) + self.assertEqual(http_client.name, + 'http.client' if six.PY3 else 'httplib') + + urllib_parse = next(ast_nodes[1].infer()) + if six.PY3: + self.assertIsInstance(urllib_parse, nodes.Module) + self.assertEqual(urllib_parse.name, 'urllib.parse') + else: + # On Python 2, this is a fake module, the same behaviour + # being mimicked in brain's tip for six.moves. + self.assertIsInstance(urllib_parse, astroid.Instance) + urljoin = next(urllib_parse.igetattr('urljoin')) + urlencode = next(urllib_parse.igetattr('urlencode')) + if six.PY2: + # In reality it's a function, but our implementations + # transforms it into a method. + self.assertIsInstance(urljoin, astroid.BoundMethod) + self.assertEqual(urljoin.qname(), 'urlparse.urljoin') + self.assertIsInstance(urlencode, astroid.BoundMethod) + self.assertEqual(urlencode.qname(), 'urllib.urlencode') + else: + self.assertIsInstance(urljoin, nodes.FunctionDef) + self.assertEqual(urljoin.qname(), 'urllib.parse.urljoin') + self.assertIsInstance(urlencode, nodes.FunctionDef) + self.assertEqual(urlencode.qname(), 'urllib.parse.urlencode') + + urllib_error = next(ast_nodes[2].infer()) + if six.PY3: + self.assertIsInstance(urllib_error, nodes.Module) + self.assertEqual(urllib_error.name, 'urllib.error') + else: + # On Python 2, this is a fake module, the same behaviour + # being mimicked in brain's tip for six.moves. + self.assertIsInstance(urllib_error, astroid.Instance) + urlerror = next(urllib_error.igetattr('URLError')) + self.assertIsInstance(urlerror, nodes.ClassDef) + content_too_short = next(urllib_error.igetattr('ContentTooShortError')) + self.assertIsInstance(content_too_short, nodes.ClassDef) + + urllib_request = next(ast_nodes[3].infer()) + if six.PY3: + self.assertIsInstance(urllib_request, nodes.Module) + self.assertEqual(urllib_request.name, 'urllib.request') + else: + self.assertIsInstance(urllib_request, astroid.Instance) + urlopen = next(urllib_request.igetattr('urlopen')) + urlretrieve = next(urllib_request.igetattr('urlretrieve')) + if six.PY2: + # In reality it's a function, but our implementations + # transforms it into a method. + self.assertIsInstance(urlopen, astroid.BoundMethod) + self.assertEqual(urlopen.qname(), 'urllib2.urlopen') + self.assertIsInstance(urlretrieve, astroid.BoundMethod) + self.assertEqual(urlretrieve.qname(), 'urllib.urlretrieve') + else: + self.assertIsInstance(urlopen, nodes.FunctionDef) + self.assertEqual(urlopen.qname(), 'urllib.request.urlopen') + self.assertIsInstance(urlretrieve, nodes.FunctionDef) + self.assertEqual(urlretrieve.qname(), 'urllib.request.urlretrieve') + + def test_from_imports(self): + ast_node = test_utils.extract_node(''' + from six.moves import http_client + http_client.HTTPSConnection #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + if six.PY3: + qname = 'http.client.HTTPSConnection' + else: + qname = 'httplib.HTTPSConnection' + self.assertEqual(inferred.qname(), qname) + + +@unittest.skipUnless(HAS_MULTIPROCESSING, + 'multiprocesing is required for this test, but ' + 'on some platforms it is missing ' + '(Jython for instance)') +class MultiprocessingBrainTest(unittest.TestCase): + + def test_multiprocessing_module_attributes(self): + # Test that module attributes are working, + # especially on Python 3.4+, where they are obtained + # from a context. + module = test_utils.extract_node(""" + import multiprocessing + """) + module = module.do_import_module('multiprocessing') + cpu_count = next(module.igetattr('cpu_count')) + if sys.version_info < (3, 4): + self.assertIsInstance(cpu_count, nodes.FunctionDef) + else: + self.assertIsInstance(cpu_count, astroid.BoundMethod) + + def test_module_name(self): + module = test_utils.extract_node(""" + import multiprocessing + multiprocessing.SyncManager() + """) + inferred_sync_mgr = next(module.infer()) + module = inferred_sync_mgr.root() + self.assertEqual(module.name, 'multiprocessing.managers') + + def test_multiprocessing_manager(self): + # Test that we have the proper attributes + # for a multiprocessing.managers.SyncManager + module = builder.parse(""" + import multiprocessing + manager = multiprocessing.Manager() + queue = manager.Queue() + joinable_queue = manager.JoinableQueue() + event = manager.Event() + rlock = manager.RLock() + bounded_semaphore = manager.BoundedSemaphore() + condition = manager.Condition() + barrier = manager.Barrier() + pool = manager.Pool() + list = manager.list() + dict = manager.dict() + value = manager.Value() + array = manager.Array() + namespace = manager.Namespace() + """) + queue = next(module['queue'].infer()) + self.assertEqual(queue.qname(), + "{}.Queue".format(six.moves.queue.__name__)) + + joinable_queue = next(module['joinable_queue'].infer()) + self.assertEqual(joinable_queue.qname(), + "{}.Queue".format(six.moves.queue.__name__)) + + event = next(module['event'].infer()) + event_name = "threading.{}".format("Event" if six.PY3 else "_Event") + self.assertEqual(event.qname(), event_name) + + rlock = next(module['rlock'].infer()) + rlock_name = "threading._RLock" + self.assertEqual(rlock.qname(), rlock_name) + + bounded_semaphore = next(module['bounded_semaphore'].infer()) + semaphore_name = "threading.{}".format( + "BoundedSemaphore" if six.PY3 else "_BoundedSemaphore") + self.assertEqual(bounded_semaphore.qname(), semaphore_name) + + pool = next(module['pool'].infer()) + pool_name = "multiprocessing.pool.Pool" + self.assertEqual(pool.qname(), pool_name) + + for attr in ('list', 'dict'): + obj = next(module[attr].infer()) + self.assertEqual(obj.qname(), + "{}.{}".format(bases.BUILTINS, attr)) + + array = next(module['array'].infer()) + self.assertEqual(array.qname(), "array.array") + + manager = next(module['manager'].infer()) + # Verify that we have these attributes + self.assertTrue(manager.getattr('start')) + self.assertTrue(manager.getattr('shutdown')) + + +@unittest.skipUnless(HAS_ENUM, + 'The enum module was only added in Python 3.4. Support for ' + 'older Python versions may be available through the enum34 ' + 'compatibility module.') +class EnumBrainTest(unittest.TestCase): + + def test_simple_enum(self): + module = builder.parse(""" + import enum + + class MyEnum(enum.Enum): + one = "one" + two = "two" + + def mymethod(self, x): + return 5 + + """) + + enum = next(module['MyEnum'].infer()) + one = enum['one'] + self.assertEqual(one.pytype(), '.MyEnum.one') + + property_type = '{}.property'.format(bases.BUILTINS) + for propname in ('name', 'value'): + prop = next(iter(one.getattr(propname))) + self.assertIn(property_type, prop.decoratornames()) + + meth = one.getattr('mymethod')[0] + self.assertIsInstance(meth, astroid.FunctionDef) + + def test_looks_like_enum_false_positive(self): + # Test that a class named Enumeration is not considered a builtin enum. + module = builder.parse(''' + class Enumeration(object): + def __init__(self, name, enum_list): + pass + test = 42 + ''') + enum = module['Enumeration'] + test = next(enum.igetattr('test')) + self.assertEqual(test.value, 42) + + def test_enum_multiple_base_classes(self): + module = builder.parse(""" + import enum + + class Mixin: + pass + + class MyEnum(Mixin, enum.Enum): + one = 1 + """) + enum = next(module['MyEnum'].infer()) + one = enum['one'] + + clazz = one.getattr('__class__')[0] + self.assertTrue(clazz.is_subtype_of('.Mixin'), + 'Enum instance should share base classes with generating class') + + def test_int_enum(self): + module = builder.parse(""" + import enum + + class MyEnum(enum.IntEnum): + one = 1 + """) + + enum = next(module['MyEnum'].infer()) + one = enum['one'] + + clazz = one.getattr('__class__')[0] + int_type = '{}.{}'.format(bases.BUILTINS, 'int') + self.assertTrue(clazz.is_subtype_of(int_type), + 'IntEnum based enums should be a subtype of int') + + def test_enum_func_form_is_class_not_instance(self): + cls, instance = test_utils.extract_node(''' + from enum import Enum + f = Enum('Audience', ['a', 'b', 'c']) + f #@ + f(1) #@ + ''') + inferred_cls = next(cls.infer()) + self.assertIsInstance(inferred_cls, bases.Instance) + inferred_instance = next(instance.infer()) + self.assertIsInstance(inferred_instance, bases.Instance) + self.assertIsInstance(next(inferred_instance.igetattr('name')), nodes.Const) + self.assertIsInstance(next(inferred_instance.igetattr('value')), nodes.Const) + + +@unittest.skipUnless(HAS_DATEUTIL, "This test requires the dateutil library.") +class DateutilBrainTest(unittest.TestCase): + def test_parser(self): + module = builder.parse(""" + from dateutil.parser import parse + d = parse('2000-01-01') + """) + d_type = next(module['d'].infer()) + self.assertEqual(d_type.qname(), "datetime.datetime") + + +@unittest.skipUnless(HAS_NUMPY, "This test requires the numpy library.") +class NumpyBrainTest(unittest.TestCase): + + def test_numpy(self): + node = test_utils.extract_node(''' + import numpy + numpy.ones #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.FunctionDef) + + +@unittest.skipUnless(HAS_PYTEST, "This test requires the pytest library.") +class PytestBrainTest(unittest.TestCase): + + def test_pytest(self): + ast_node = test_utils.extract_node(''' + import pytest + pytest #@ + ''') + module = next(ast_node.infer()) + attrs = ['deprecated_call', 'warns', 'exit', 'fail', 'skip', + 'importorskip', 'xfail', 'mark', 'raises', 'freeze_includes', + 'set_trace', 'fixture', 'yield_fixture'] + if pytest.__version__.split('.')[0] == '3': + attrs += ['approx', 'register_assert_rewrite'] + + for attr in attrs: + self.assertIn(attr, module) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_builder.py b/pymode/libs/astroid/tests/unittest_builder.py new file mode 100644 index 00000000..920f36e8 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_builder.py @@ -0,0 +1,774 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for the astroid builder and rebuilder module""" + +import os +import sys +import unittest + +import six + +from astroid import builder +from astroid import exceptions +from astroid import manager +from astroid import nodes +from astroid import test_utils +from astroid import util +from astroid.tests import resources + +MANAGER = manager.AstroidManager() +BUILTINS = six.moves.builtins.__name__ + + +class FromToLineNoTest(unittest.TestCase): + + def setUp(self): + self.astroid = resources.build_file('data/format.py') + + def test_callfunc_lineno(self): + stmts = self.astroid.body + # on line 4: + # function('aeozrijz\ + # earzer', hop) + discard = stmts[0] + self.assertIsInstance(discard, nodes.Expr) + self.assertEqual(discard.fromlineno, 4) + self.assertEqual(discard.tolineno, 5) + callfunc = discard.value + self.assertIsInstance(callfunc, nodes.Call) + self.assertEqual(callfunc.fromlineno, 4) + self.assertEqual(callfunc.tolineno, 5) + name = callfunc.func + self.assertIsInstance(name, nodes.Name) + self.assertEqual(name.fromlineno, 4) + self.assertEqual(name.tolineno, 4) + strarg = callfunc.args[0] + self.assertIsInstance(strarg, nodes.Const) + if hasattr(sys, 'pypy_version_info'): + lineno = 4 + else: + lineno = 5 # no way for this one in CPython (is 4 actually) + self.assertEqual(strarg.fromlineno, lineno) + self.assertEqual(strarg.tolineno, lineno) + namearg = callfunc.args[1] + self.assertIsInstance(namearg, nodes.Name) + self.assertEqual(namearg.fromlineno, 5) + self.assertEqual(namearg.tolineno, 5) + # on line 10: + # fonction(1, + # 2, + # 3, + # 4) + discard = stmts[2] + self.assertIsInstance(discard, nodes.Expr) + self.assertEqual(discard.fromlineno, 10) + self.assertEqual(discard.tolineno, 13) + callfunc = discard.value + self.assertIsInstance(callfunc, nodes.Call) + self.assertEqual(callfunc.fromlineno, 10) + self.assertEqual(callfunc.tolineno, 13) + name = callfunc.func + self.assertIsInstance(name, nodes.Name) + self.assertEqual(name.fromlineno, 10) + self.assertEqual(name.tolineno, 10) + for i, arg in enumerate(callfunc.args): + self.assertIsInstance(arg, nodes.Const) + self.assertEqual(arg.fromlineno, 10+i) + self.assertEqual(arg.tolineno, 10+i) + + def test_function_lineno(self): + stmts = self.astroid.body + # on line 15: + # def definition(a, + # b, + # c): + # return a + b + c + function = stmts[3] + self.assertIsInstance(function, nodes.FunctionDef) + self.assertEqual(function.fromlineno, 15) + self.assertEqual(function.tolineno, 18) + return_ = function.body[0] + self.assertIsInstance(return_, nodes.Return) + self.assertEqual(return_.fromlineno, 18) + self.assertEqual(return_.tolineno, 18) + if sys.version_info < (3, 0): + self.assertEqual(function.blockstart_tolineno, 17) + else: + self.skipTest('FIXME http://bugs.python.org/issue10445 ' + '(no line number on function args)') + + def test_decorated_function_lineno(self): + astroid = builder.parse(''' + @decorator + def function( + arg): + print (arg) + ''', __name__) + function = astroid['function'] + self.assertEqual(function.fromlineno, 3) # XXX discussable, but that's what is expected by pylint right now + self.assertEqual(function.tolineno, 5) + self.assertEqual(function.decorators.fromlineno, 2) + self.assertEqual(function.decorators.tolineno, 2) + if sys.version_info < (3, 0): + self.assertEqual(function.blockstart_tolineno, 4) + else: + self.skipTest('FIXME http://bugs.python.org/issue10445 ' + '(no line number on function args)') + + + def test_class_lineno(self): + stmts = self.astroid.body + # on line 20: + # class debile(dict, + # object): + # pass + class_ = stmts[4] + self.assertIsInstance(class_, nodes.ClassDef) + self.assertEqual(class_.fromlineno, 20) + self.assertEqual(class_.tolineno, 22) + self.assertEqual(class_.blockstart_tolineno, 21) + pass_ = class_.body[0] + self.assertIsInstance(pass_, nodes.Pass) + self.assertEqual(pass_.fromlineno, 22) + self.assertEqual(pass_.tolineno, 22) + + def test_if_lineno(self): + stmts = self.astroid.body + # on line 20: + # if aaaa: pass + # else: + # aaaa,bbbb = 1,2 + # aaaa,bbbb = bbbb,aaaa + if_ = stmts[5] + self.assertIsInstance(if_, nodes.If) + self.assertEqual(if_.fromlineno, 24) + self.assertEqual(if_.tolineno, 27) + self.assertEqual(if_.blockstart_tolineno, 24) + self.assertEqual(if_.orelse[0].fromlineno, 26) + self.assertEqual(if_.orelse[1].tolineno, 27) + + def test_for_while_lineno(self): + for code in (''' + for a in range(4): + print (a) + break + else: + print ("bouh") + ''', ''' + while a: + print (a) + break + else: + print ("bouh") + '''): + astroid = builder.parse(code, __name__) + stmt = astroid.body[0] + self.assertEqual(stmt.fromlineno, 2) + self.assertEqual(stmt.tolineno, 6) + self.assertEqual(stmt.blockstart_tolineno, 2) + self.assertEqual(stmt.orelse[0].fromlineno, 6) # XXX + self.assertEqual(stmt.orelse[0].tolineno, 6) + + def test_try_except_lineno(self): + astroid = builder.parse(''' + try: + print (a) + except: + pass + else: + print ("bouh") + ''', __name__) + try_ = astroid.body[0] + self.assertEqual(try_.fromlineno, 2) + self.assertEqual(try_.tolineno, 7) + self.assertEqual(try_.blockstart_tolineno, 2) + self.assertEqual(try_.orelse[0].fromlineno, 7) # XXX + self.assertEqual(try_.orelse[0].tolineno, 7) + hdlr = try_.handlers[0] + self.assertEqual(hdlr.fromlineno, 4) + self.assertEqual(hdlr.tolineno, 5) + self.assertEqual(hdlr.blockstart_tolineno, 4) + + + def test_try_finally_lineno(self): + astroid = builder.parse(''' + try: + print (a) + finally: + print ("bouh") + ''', __name__) + try_ = astroid.body[0] + self.assertEqual(try_.fromlineno, 2) + self.assertEqual(try_.tolineno, 5) + self.assertEqual(try_.blockstart_tolineno, 2) + self.assertEqual(try_.finalbody[0].fromlineno, 5) # XXX + self.assertEqual(try_.finalbody[0].tolineno, 5) + + + def test_try_finally_25_lineno(self): + astroid = builder.parse(''' + try: + print (a) + except: + pass + finally: + print ("bouh") + ''', __name__) + try_ = astroid.body[0] + self.assertEqual(try_.fromlineno, 2) + self.assertEqual(try_.tolineno, 7) + self.assertEqual(try_.blockstart_tolineno, 2) + self.assertEqual(try_.finalbody[0].fromlineno, 7) # XXX + self.assertEqual(try_.finalbody[0].tolineno, 7) + + + def test_with_lineno(self): + astroid = builder.parse(''' + from __future__ import with_statement + with file("/tmp/pouet") as f: + print (f) + ''', __name__) + with_ = astroid.body[1] + self.assertEqual(with_.fromlineno, 3) + self.assertEqual(with_.tolineno, 4) + self.assertEqual(with_.blockstart_tolineno, 3) + + +class BuilderTest(unittest.TestCase): + + def setUp(self): + self.builder = builder.AstroidBuilder() + + def test_data_build_null_bytes(self): + with self.assertRaises(exceptions.AstroidBuildingException): + self.builder.string_build('\x00') + + def test_data_build_invalid_x_escape(self): + with self.assertRaises(exceptions.AstroidBuildingException): + self.builder.string_build('"\\x1"') + + def test_missing_newline(self): + """check that a file with no trailing new line is parseable""" + resources.build_file('data/noendingnewline.py') + + def test_missing_file(self): + with self.assertRaises(exceptions.AstroidBuildingException): + resources.build_file('data/inexistant.py') + + def test_inspect_build0(self): + """test astroid tree build from a living object""" + builtin_ast = MANAGER.ast_from_module_name(BUILTINS) + if six.PY2: + fclass = builtin_ast['file'] + self.assertIn('name', fclass) + self.assertIn('mode', fclass) + self.assertIn('read', fclass) + self.assertTrue(fclass.newstyle) + self.assertTrue(fclass.pytype(), '%s.type' % BUILTINS) + self.assertIsInstance(fclass['read'], nodes.FunctionDef) + # check builtin function has args.args == None + dclass = builtin_ast['dict'] + self.assertIsNone(dclass['has_key'].args.args) + # just check type and object are there + builtin_ast.getattr('type') + objectastroid = builtin_ast.getattr('object')[0] + self.assertIsInstance(objectastroid.getattr('__new__')[0], nodes.FunctionDef) + # check open file alias + builtin_ast.getattr('open') + # check 'help' is there (defined dynamically by site.py) + builtin_ast.getattr('help') + # check property has __init__ + pclass = builtin_ast['property'] + self.assertIn('__init__', pclass) + self.assertIsInstance(builtin_ast['None'], nodes.Const) + self.assertIsInstance(builtin_ast['True'], nodes.Const) + self.assertIsInstance(builtin_ast['False'], nodes.Const) + if six.PY3: + self.assertIsInstance(builtin_ast['Exception'], nodes.ClassDef) + self.assertIsInstance(builtin_ast['NotImplementedError'], nodes.ClassDef) + else: + self.assertIsInstance(builtin_ast['Exception'], nodes.ImportFrom) + self.assertIsInstance(builtin_ast['NotImplementedError'], nodes.ImportFrom) + + def test_inspect_build1(self): + time_ast = MANAGER.ast_from_module_name('time') + self.assertTrue(time_ast) + self.assertEqual(time_ast['time'].args.defaults, []) + + if os.name == 'java': + test_inspect_build1 = unittest.expectedFailure(test_inspect_build1) + + def test_inspect_build2(self): + """test astroid tree build from a living object""" + try: + from mx import DateTime + except ImportError: + self.skipTest('test skipped: mxDateTime is not available') + else: + dt_ast = self.builder.inspect_build(DateTime) + dt_ast.getattr('DateTime') + # this one is failing since DateTimeType.__module__ = 'builtins' ! + #dt_ast.getattr('DateTimeType') + + def test_inspect_build3(self): + self.builder.inspect_build(unittest) + + @test_utils.require_version(maxver='3.0') + def test_inspect_build_instance(self): + """test astroid tree build from a living object""" + import exceptions + builtin_ast = self.builder.inspect_build(exceptions) + fclass = builtin_ast['OSError'] + # things like OSError.strerror are now (2.5) data descriptors on the + # class instead of entries in the __dict__ of an instance + container = fclass + self.assertIn('errno', container) + self.assertIn('strerror', container) + self.assertIn('filename', container) + + def test_inspect_build_type_object(self): + builtin_ast = MANAGER.ast_from_module_name(BUILTINS) + + inferred = list(builtin_ast.igetattr('object')) + self.assertEqual(len(inferred), 1) + inferred = inferred[0] + self.assertEqual(inferred.name, 'object') + inferred.as_string() # no crash test + + inferred = list(builtin_ast.igetattr('type')) + self.assertEqual(len(inferred), 1) + inferred = inferred[0] + self.assertEqual(inferred.name, 'type') + inferred.as_string() # no crash test + + def test_inspect_transform_module(self): + # ensure no cached version of the time module + MANAGER._mod_file_cache.pop(('time', None), None) + MANAGER.astroid_cache.pop('time', None) + def transform_time(node): + if node.name == 'time': + node.transformed = True + MANAGER.register_transform(nodes.Module, transform_time) + try: + time_ast = MANAGER.ast_from_module_name('time') + self.assertTrue(getattr(time_ast, 'transformed', False)) + finally: + MANAGER.unregister_transform(nodes.Module, transform_time) + + def test_package_name(self): + """test base properties and method of a astroid module""" + datap = resources.build_file('data/__init__.py', 'data') + self.assertEqual(datap.name, 'data') + self.assertEqual(datap.package, 1) + datap = resources.build_file('data/__init__.py', 'data.__init__') + self.assertEqual(datap.name, 'data') + self.assertEqual(datap.package, 1) + + def test_yield_parent(self): + """check if we added discard nodes as yield parent (w/ compiler)""" + code = """ + def yiell(): #@ + yield 0 + if noe: + yield more + """ + func = test_utils.extract_node(code) + self.assertIsInstance(func, nodes.FunctionDef) + stmt = func.body[0] + self.assertIsInstance(stmt, nodes.Expr) + self.assertIsInstance(stmt.value, nodes.Yield) + self.assertIsInstance(func.body[1].body[0], nodes.Expr) + self.assertIsInstance(func.body[1].body[0].value, nodes.Yield) + + def test_object(self): + obj_ast = self.builder.inspect_build(object) + self.assertIn('__setattr__', obj_ast) + + def test_newstyle_detection(self): + data = ''' + class A: + "old style" + + class B(A): + "old style" + + class C(object): + "new style" + + class D(C): + "new style" + + __metaclass__ = type + + class E(A): + "old style" + + class F: + "new style" + ''' + mod_ast = builder.parse(data, __name__) + if six.PY3: + self.assertTrue(mod_ast['A'].newstyle) + self.assertTrue(mod_ast['B'].newstyle) + self.assertTrue(mod_ast['E'].newstyle) + else: + self.assertFalse(mod_ast['A'].newstyle) + self.assertFalse(mod_ast['B'].newstyle) + self.assertFalse(mod_ast['E'].newstyle) + self.assertTrue(mod_ast['C'].newstyle) + self.assertTrue(mod_ast['D'].newstyle) + self.assertTrue(mod_ast['F'].newstyle) + + def test_globals(self): + data = ''' + CSTE = 1 + + def update_global(): + global CSTE + CSTE += 1 + + def global_no_effect(): + global CSTE2 + print (CSTE) + ''' + astroid = builder.parse(data, __name__) + self.assertEqual(len(astroid.getattr('CSTE')), 2) + self.assertIsInstance(astroid.getattr('CSTE')[0], nodes.AssignName) + self.assertEqual(astroid.getattr('CSTE')[0].fromlineno, 2) + self.assertEqual(astroid.getattr('CSTE')[1].fromlineno, 6) + with self.assertRaises(exceptions.NotFoundError): + astroid.getattr('CSTE2') + with self.assertRaises(exceptions.InferenceError): + next(astroid['global_no_effect'].ilookup('CSTE2')) + + @unittest.skipIf(os.name == 'java', + 'This test is skipped on Jython, because the ' + 'socket object is patched later on with the ' + 'methods we are looking for. Since we do not ' + 'understand setattr in for loops yet, we skip this') + def test_socket_build(self): + import socket + astroid = self.builder.module_build(socket) + # XXX just check the first one. Actually 3 objects are inferred (look at + # the socket module) but the last one as those attributes dynamically + # set and astroid is missing this. + for fclass in astroid.igetattr('socket'): + self.assertIn('connect', fclass) + self.assertIn('send', fclass) + self.assertIn('close', fclass) + break + + def test_gen_expr_var_scope(self): + data = 'l = list(n for n in range(10))\n' + astroid = builder.parse(data, __name__) + # n unavailable outside gen expr scope + self.assertNotIn('n', astroid) + # test n is inferable anyway + n = test_utils.get_name_node(astroid, 'n') + self.assertIsNot(n.scope(), astroid) + self.assertEqual([i.__class__ for i in n.infer()], + [util.YES.__class__]) + + def test_no_future_imports(self): + mod = builder.parse("import sys") + self.assertEqual(set(), mod._future_imports) + + def test_future_imports(self): + mod = builder.parse("from __future__ import print_function") + self.assertEqual(set(['print_function']), mod._future_imports) + + def test_two_future_imports(self): + mod = builder.parse(""" + from __future__ import print_function + from __future__ import absolute_import + """) + self.assertEqual(set(['print_function', 'absolute_import']), mod._future_imports) + + def test_inferred_build(self): + code = ''' + class A: pass + A.type = "class" + + def A_assign_type(self): + print (self) + A.assign_type = A_assign_type + ''' + astroid = builder.parse(code) + lclass = list(astroid.igetattr('A')) + self.assertEqual(len(lclass), 1) + lclass = lclass[0] + self.assertIn('assign_type', lclass._locals) + self.assertIn('type', lclass._locals) + + def test_augassign_attr(self): + builder.parse(""" + class Counter: + v = 0 + def inc(self): + self.v += 1 + """, __name__) + # TODO: Check self.v += 1 generate AugAssign(AssAttr(...)), + # not AugAssign(GetAttr(AssName...)) + + def test_inferred_dont_pollute(self): + code = ''' + def func(a=None): + a.custom_attr = 0 + def func2(a={}): + a.custom_attr = 0 + ''' + builder.parse(code) + nonetype = nodes.const_factory(None) + self.assertNotIn('custom_attr', nonetype._locals) + self.assertNotIn('custom_attr', nonetype._instance_attrs) + nonetype = nodes.const_factory({}) + self.assertNotIn('custom_attr', nonetype._locals) + self.assertNotIn('custom_attr', nonetype._instance_attrs) + + def test_asstuple(self): + code = 'a, b = range(2)' + astroid = builder.parse(code) + self.assertIn('b', astroid._locals) + code = ''' + def visit_if(self, node): + node.test, body = node.tests[0] + ''' + astroid = builder.parse(code) + self.assertIn('body', astroid['visit_if']._locals) + + def test_build_constants(self): + '''test expected values of constants after rebuilding''' + code = ''' + def func(): + return None + return + return 'None' + ''' + astroid = builder.parse(code) + none, nothing, chain = [ret.value for ret in astroid.body[0].body] + self.assertIsInstance(none, nodes.Const) + self.assertIsNone(none.value) + self.assertIsNone(nothing) + self.assertIsInstance(chain, nodes.Const) + self.assertEqual(chain.value, 'None') + + def test_not_implemented(self): + node = test_utils.extract_node(''' + NotImplemented #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, NotImplemented) + + +class FileBuildTest(unittest.TestCase): + def setUp(self): + self.module = resources.build_file('data/module.py', 'data.module') + + def test_module_base_props(self): + """test base properties and method of a astroid module""" + module = self.module + self.assertEqual(module.name, 'data.module') + self.assertEqual(module.doc, "test module for astroid\n") + self.assertEqual(module.fromlineno, 0) + self.assertIsNone(module.parent) + self.assertEqual(module.frame(), module) + self.assertEqual(module.root(), module) + self.assertEqual(module.source_file, os.path.abspath(resources.find('data/module.py'))) + self.assertEqual(module.pure_python, 1) + self.assertEqual(module.package, 0) + self.assertFalse(module.is_statement) + self.assertEqual(module.statement(), module) + self.assertEqual(module.statement(), module) + + def test_module_locals(self): + """test the 'locals' dictionary of a astroid module""" + module = self.module + _locals = module._locals + self.assertIs(_locals, module._globals) + keys = sorted(_locals.keys()) + should = ['MY_DICT', 'NameNode', 'YO', 'YOUPI', + '__revision__', 'global_access', 'modutils', 'four_args', + 'os', 'redirect'] + should.sort() + self.assertEqual(keys, sorted(should)) + + def test_function_base_props(self): + """test base properties and method of a astroid function""" + module = self.module + function = module['global_access'] + self.assertEqual(function.name, 'global_access') + self.assertEqual(function.doc, 'function test') + self.assertEqual(function.fromlineno, 11) + self.assertTrue(function.parent) + self.assertEqual(function.frame(), function) + self.assertEqual(function.parent.frame(), module) + self.assertEqual(function.root(), module) + self.assertEqual([n.name for n in function.args.args], ['key', 'val']) + self.assertEqual(function.type, 'function') + + def test_function_locals(self): + """test the 'locals' dictionary of a astroid function""" + _locals = self.module['global_access']._locals + self.assertEqual(len(_locals), 4) + keys = sorted(_locals.keys()) + self.assertEqual(keys, ['i', 'key', 'local', 'val']) + + def test_class_base_props(self): + """test base properties and method of a astroid class""" + module = self.module + klass = module['YO'] + self.assertEqual(klass.name, 'YO') + self.assertEqual(klass.doc, 'hehe') + self.assertEqual(klass.fromlineno, 25) + self.assertTrue(klass.parent) + self.assertEqual(klass.frame(), klass) + self.assertEqual(klass.parent.frame(), module) + self.assertEqual(klass.root(), module) + self.assertEqual(klass.basenames, []) + if six.PY3: + self.assertTrue(klass.newstyle) + else: + self.assertFalse(klass.newstyle) + + def test_class_locals(self): + """test the 'locals' dictionary of a astroid class""" + module = self.module + klass1 = module['YO'] + locals1 = klass1._locals + keys = sorted(locals1.keys()) + self.assertEqual(keys, ['__init__', 'a']) + klass2 = module['YOUPI'] + locals2 = klass2._locals + keys = locals2.keys() + self.assertEqual(sorted(keys), + ['__init__', 'class_attr', 'class_method', + 'method', 'static_method']) + + def test_class_instance_attrs(self): + module = self.module + klass1 = module['YO'] + klass2 = module['YOUPI'] + self.assertEqual(list(klass1._instance_attrs.keys()), ['yo']) + self.assertEqual(list(klass2._instance_attrs.keys()), ['member']) + + def test_class_basenames(self): + module = self.module + klass1 = module['YO'] + klass2 = module['YOUPI'] + self.assertEqual(klass1.basenames, []) + self.assertEqual(klass2.basenames, ['YO']) + + def test_method_base_props(self): + """test base properties and method of a astroid method""" + klass2 = self.module['YOUPI'] + # "normal" method + method = klass2['method'] + self.assertEqual(method.name, 'method') + self.assertEqual([n.name for n in method.args.args], ['self']) + self.assertEqual(method.doc, 'method test') + self.assertEqual(method.fromlineno, 47) + self.assertEqual(method.type, 'method') + # class method + method = klass2['class_method'] + self.assertEqual([n.name for n in method.args.args], ['cls']) + self.assertEqual(method.type, 'classmethod') + # static method + method = klass2['static_method'] + self.assertEqual(method.args.args, []) + self.assertEqual(method.type, 'staticmethod') + + def test_method_locals(self): + """test the 'locals' dictionary of a astroid method""" + method = self.module['YOUPI']['method'] + _locals = method._locals + keys = sorted(_locals) + if sys.version_info < (3, 0): + self.assertEqual(len(_locals), 5) + self.assertEqual(keys, ['a', 'autre', 'b', 'local', 'self']) + else:# ListComp variables are no more accessible outside + self.assertEqual(len(_locals), 3) + self.assertEqual(keys, ['autre', 'local', 'self']) + + +class ModuleBuildTest(resources.SysPathSetup, FileBuildTest): + + def setUp(self): + super(ModuleBuildTest, self).setUp() + abuilder = builder.AstroidBuilder() + try: + import data.module + except ImportError: + # Make pylint happy. + self.skipTest('Unable to load data.module') + else: + self.module = abuilder.module_build(data.module, 'data.module') + +@unittest.skipIf(six.PY3, "guess_encoding not used on Python 3") +class TestGuessEncoding(unittest.TestCase): + def setUp(self): + self.guess_encoding = builder._guess_encoding + + def testEmacs(self): + e = self.guess_encoding('# -*- coding: UTF-8 -*-') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding('# -*- coding:UTF-8 -*-') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding(''' + ### -*- coding: ISO-8859-1 -*- + ''') + self.assertEqual(e, 'ISO-8859-1') + e = self.guess_encoding(''' + + ### -*- coding: ISO-8859-1 -*- + ''') + self.assertIsNone(e) + + def testVim(self): + e = self.guess_encoding('# vim:fileencoding=UTF-8') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding(''' + ### vim:fileencoding=ISO-8859-1 + ''') + self.assertEqual(e, 'ISO-8859-1') + e = self.guess_encoding(''' + + ### vim:fileencoding= ISO-8859-1 + ''') + self.assertIsNone(e) + + def test_wrong_coding(self): + # setting "coding" varaible + e = self.guess_encoding("coding = UTF-8") + self.assertIsNone(e) + # setting a dictionnary entry + e = self.guess_encoding("coding:UTF-8") + self.assertIsNone(e) + # setting an arguement + e = self.guess_encoding("def do_something(a_word_with_coding=None):") + self.assertIsNone(e) + + def testUTF8(self): + e = self.guess_encoding('\xef\xbb\xbf any UTF-8 data') + self.assertEqual(e, 'UTF-8') + e = self.guess_encoding(' any UTF-8 data \xef\xbb\xbf') + self.assertIsNone(e) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_inference.py b/pymode/libs/astroid/tests/unittest_inference.py new file mode 100644 index 00000000..86497727 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_inference.py @@ -0,0 +1,2130 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for the astroid inference capabilities +""" +import sys +from functools import partial +import unittest +import warnings + +import six + +from astroid import InferenceError, builder, nodes +from astroid.builder import parse +from astroid.inference import infer_end as inference_infer_end +from astroid.bases import Instance, BoundMethod, UnboundMethod,\ + path_wrapper, BUILTINS +from astroid import arguments +from astroid import objects +from astroid import test_utils +from astroid import util +from astroid.tests import resources + + +def get_node_of_class(start_from, klass): + return next(start_from.nodes_of_class(klass)) + +builder = builder.AstroidBuilder() + +if sys.version_info < (3, 0): + EXC_MODULE = 'exceptions' +else: + EXC_MODULE = BUILTINS + + +class InferenceUtilsTest(unittest.TestCase): + + def test_path_wrapper(self): + def infer_default(self, *args): + raise InferenceError + infer_default = path_wrapper(infer_default) + infer_end = path_wrapper(inference_infer_end) + with self.assertRaises(InferenceError): + next(infer_default(1)) + self.assertEqual(next(infer_end(1)), 1) + + +def _assertInferElts(node_type, self, node, elts): + inferred = next(node.infer()) + self.assertIsInstance(inferred, node_type) + self.assertEqual(sorted(elt.value for elt in inferred.elts), + elts) + +def partialmethod(func, arg): + """similar to functools.partial but return a lambda instead of a class so returned value may be + turned into a method. + """ + return lambda *args, **kwargs: func(arg, *args, **kwargs) + +class InferenceTest(resources.SysPathSetup, unittest.TestCase): + + # additional assertInfer* method for builtin types + + def assertInferConst(self, node, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected) + + def assertInferDict(self, node, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + + elts = set([(key.value, value.value) + for (key, value) in inferred.items]) + self.assertEqual(sorted(elts), sorted(expected.items())) + + assertInferTuple = partialmethod(_assertInferElts, nodes.Tuple) + assertInferList = partialmethod(_assertInferElts, nodes.List) + assertInferSet = partialmethod(_assertInferElts, nodes.Set) + assertInferFrozenSet = partialmethod(_assertInferElts, objects.FrozenSet) + + CODE = ''' + class C(object): + "new style" + attr = 4 + + def meth1(self, arg1, optarg=0): + var = object() + print ("yo", arg1, optarg) + self.iattr = "hop" + return var + + def meth2(self): + self.meth1(*self.meth3) + + def meth3(self, d=attr): + b = self.attr + c = self.iattr + return b, c + + ex = Exception("msg") + v = C().meth1(1) + m_unbound = C.meth1 + m_bound = C().meth1 + a, b, c = ex, 1, "bonjour" + [d, e, f] = [ex, 1.0, ("bonjour", v)] + g, h = f + i, (j, k) = "glup", f + + a, b= b, a # Gasp ! + ''' + + ast = parse(CODE, __name__) + + def test_infer_abstract_property_return_values(self): + module = parse(''' + import abc + + class A(object): + @abc.abstractproperty + def test(self): + return 42 + + a = A() + x = a.test + ''') + inferred = next(module['x'].infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_module_inference(self): + inferred = self.ast.infer() + obj = next(inferred) + self.assertEqual(obj.name, __name__) + self.assertEqual(obj.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_class_inference(self): + inferred = self.ast['C'].infer() + obj = next(inferred) + self.assertEqual(obj.name, 'C') + self.assertEqual(obj.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_function_inference(self): + inferred = self.ast['C']['meth1'].infer() + obj = next(inferred) + self.assertEqual(obj.name, 'meth1') + self.assertEqual(obj.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_builtin_name_inference(self): + inferred = self.ast['C']['meth1']['var'].infer() + var = next(inferred) + self.assertEqual(var.name, 'object') + self.assertEqual(var.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_tupleassign_name_inference(self): + inferred = self.ast['a'].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['b'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 1) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['c'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "bonjour") + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_listassign_name_inference(self): + inferred = self.ast['d'].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['e'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 1.0) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['f'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Tuple) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_advanced_tupleassign_name_inference1(self): + inferred = self.ast['g'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "bonjour") + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['h'].infer() + var = next(inferred) + self.assertEqual(var.name, 'object') + self.assertEqual(var.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_advanced_tupleassign_name_inference2(self): + inferred = self.ast['i'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, u"glup") + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['j'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "bonjour") + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast['k'].infer() + var = next(inferred) + self.assertEqual(var.name, 'object') + self.assertEqual(var.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_swap_assign_inference(self): + inferred = self.ast._locals['a'][1].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 1) + self.assertRaises(StopIteration, partial(next, inferred)) + inferred = self.ast._locals['b'][1].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference1(self): + inferred = self.ast['ex'].infer() + exc = next(inferred) + self.assertIsInstance(exc, Instance) + self.assertEqual(exc.name, 'Exception') + self.assertEqual(exc.root().name, EXC_MODULE) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference2(self): + inferred = get_node_of_class(self.ast['C']['meth2'], nodes.Attribute).infer() + meth1 = next(inferred) + self.assertEqual(meth1.name, 'meth1') + self.assertEqual(meth1.root().name, __name__) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference3(self): + inferred = self.ast['C']['meth3']['b'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, 4) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_getattr_inference4(self): + inferred = self.ast['C']['meth3']['c'].infer() + const = next(inferred) + self.assertIsInstance(const, nodes.Const) + self.assertEqual(const.value, "hop") + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_callfunc_inference(self): + inferred = self.ast['v'].infer() + meth1 = next(inferred) + self.assertIsInstance(meth1, Instance) + self.assertEqual(meth1.name, 'object') + self.assertEqual(meth1.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_unbound_method_inference(self): + inferred = self.ast['m_unbound'].infer() + meth1 = next(inferred) + self.assertIsInstance(meth1, UnboundMethod) + self.assertEqual(meth1.name, 'meth1') + self.assertEqual(meth1.parent.frame().name, 'C') + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_bound_method_inference(self): + inferred = self.ast['m_bound'].infer() + meth1 = next(inferred) + self.assertIsInstance(meth1, BoundMethod) + self.assertEqual(meth1.name, 'meth1') + self.assertEqual(meth1.parent.frame().name, 'C') + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_args_default_inference1(self): + optarg = test_utils.get_name_node(self.ast['C']['meth1'], 'optarg') + inferred = optarg.infer() + obj1 = next(inferred) + self.assertIsInstance(obj1, nodes.Const) + self.assertEqual(obj1.value, 0) + obj1 = next(inferred) + self.assertIs(obj1, util.YES, obj1) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_args_default_inference2(self): + inferred = self.ast['C']['meth3'].ilookup('d') + obj1 = next(inferred) + self.assertIsInstance(obj1, nodes.Const) + self.assertEqual(obj1.value, 4) + obj1 = next(inferred) + self.assertIs(obj1, util.YES, obj1) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_inference_restrictions(self): + inferred = test_utils.get_name_node(self.ast['C']['meth1'], 'arg1').infer() + obj1 = next(inferred) + self.assertIs(obj1, util.YES, obj1) + self.assertRaises(StopIteration, partial(next, inferred)) + + def test_ancestors_inference(self): + code = ''' + class A(object): #@ + pass + + class A(A): #@ + pass + ''' + a1, a2 = test_utils.extract_node(code, __name__) + a2_ancestors = list(a2.ancestors()) + self.assertEqual(len(a2_ancestors), 2) + self.assertIs(a2_ancestors[0], a1) + + def test_ancestors_inference2(self): + code = ''' + class A(object): #@ + pass + + class B(A): #@ + pass + + class A(B): #@ + pass + ''' + a1, b, a2 = test_utils.extract_node(code, __name__) + a2_ancestors = list(a2.ancestors()) + self.assertEqual(len(a2_ancestors), 3) + self.assertIs(a2_ancestors[0], b) + self.assertIs(a2_ancestors[1], a1) + + def test_f_arg_f(self): + code = ''' + def f(f=1): + return f + + a = f() + ''' + ast = parse(code, __name__) + a = ast['a'] + a_inferred = a.inferred() + self.assertEqual(a_inferred[0].value, 1) + self.assertEqual(len(a_inferred), 1) + + def test_infered_warning(self): + code = ''' + def f(f=1): + return f + + a = f() + ''' + ast = parse(code, __name__) + a = ast['a'] + + warnings.simplefilter('always') + with warnings.catch_warnings(record=True) as w: + a.infered() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + + def test_exc_ancestors(self): + code = ''' + def f(): + raise __(NotImplementedError) + ''' + error = test_utils.extract_node(code, __name__) + nie = error.inferred()[0] + self.assertIsInstance(nie, nodes.ClassDef) + nie_ancestors = [c.name for c in nie.ancestors()] + if sys.version_info < (3, 0): + self.assertEqual(nie_ancestors, ['RuntimeError', 'StandardError', 'Exception', 'BaseException', 'object']) + else: + self.assertEqual(nie_ancestors, ['RuntimeError', 'Exception', 'BaseException', 'object']) + + def test_except_inference(self): + code = ''' + try: + print (hop) + except NameError as ex: + ex1 = ex + except Exception as ex: + ex2 = ex + raise + ''' + ast = parse(code, __name__) + ex1 = ast['ex1'] + ex1_infer = ex1.infer() + ex1 = next(ex1_infer) + self.assertIsInstance(ex1, Instance) + self.assertEqual(ex1.name, 'NameError') + self.assertRaises(StopIteration, partial(next, ex1_infer)) + ex2 = ast['ex2'] + ex2_infer = ex2.infer() + ex2 = next(ex2_infer) + self.assertIsInstance(ex2, Instance) + self.assertEqual(ex2.name, 'Exception') + self.assertRaises(StopIteration, partial(next, ex2_infer)) + + def test_del1(self): + code = ''' + del undefined_attr + ''' + delete = test_utils.extract_node(code, __name__) + self.assertRaises(InferenceError, delete.infer) + + def test_del2(self): + code = ''' + a = 1 + b = a + del a + c = a + a = 2 + d = a + ''' + ast = parse(code, __name__) + n = ast['b'] + n_infer = n.infer() + inferred = next(n_infer) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 1) + self.assertRaises(StopIteration, partial(next, n_infer)) + n = ast['c'] + n_infer = n.infer() + self.assertRaises(InferenceError, partial(next, n_infer)) + n = ast['d'] + n_infer = n.infer() + inferred = next(n_infer) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 2) + self.assertRaises(StopIteration, partial(next, n_infer)) + + def test_builtin_types(self): + code = ''' + l = [1] + t = (2,) + d = {} + s = '' + s2 = '_' + ''' + ast = parse(code, __name__) + n = ast['l'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.List) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.getitem(0).value, 1) + self.assertIsInstance(inferred._proxied, nodes.ClassDef) + self.assertEqual(inferred._proxied.name, 'list') + self.assertIn('append', inferred._proxied._locals) + n = ast['t'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.getitem(0).value, 2) + self.assertIsInstance(inferred._proxied, nodes.ClassDef) + self.assertEqual(inferred._proxied.name, 'tuple') + n = ast['d'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Dict) + self.assertIsInstance(inferred, Instance) + self.assertIsInstance(inferred._proxied, nodes.ClassDef) + self.assertEqual(inferred._proxied.name, 'dict') + self.assertIn('get', inferred._proxied._locals) + n = ast['s'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'str') + self.assertIn('lower', inferred._proxied._locals) + n = ast['s2'] + inferred = next(n.infer()) + self.assertEqual(inferred.getitem(0).value, '_') + + code = 's = {1}' + ast = parse(code, __name__) + n = ast['s'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Set) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'set') + self.assertIn('remove', inferred._proxied._locals) + + @test_utils.require_version(maxver='3.0') + def test_unicode_type(self): + code = '''u = u""''' + ast = parse(code, __name__) + n = ast['u'] + inferred = next(n.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'unicode') + self.assertIn('lower', inferred._proxied._locals) + + @unittest.expectedFailure + def test_descriptor_are_callable(self): + code = ''' + class A: + statm = staticmethod(open) + clsm = classmethod('whatever') + ''' + ast = parse(code, __name__) + statm = next(ast['A'].igetattr('statm')) + self.assertTrue(statm.callable()) + clsm = next(ast['A'].igetattr('clsm')) + self.assertFalse(clsm.callable()) + + def test_bt_ancestor_crash(self): + code = ''' + class Warning(Warning): + pass + ''' + ast = parse(code, __name__) + w = ast['Warning'] + ancestors = w.ancestors() + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'Warning') + self.assertEqual(ancestor.root().name, EXC_MODULE) + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'Exception') + self.assertEqual(ancestor.root().name, EXC_MODULE) + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'BaseException') + self.assertEqual(ancestor.root().name, EXC_MODULE) + ancestor = next(ancestors) + self.assertEqual(ancestor.name, 'object') + self.assertEqual(ancestor.root().name, BUILTINS) + self.assertRaises(StopIteration, partial(next, ancestors)) + + def test_qqch(self): + code = ''' + from astroid.modutils import load_module_from_name + xxx = load_module_from_name('__pkginfo__') + ''' + ast = parse(code, __name__) + xxx = ast['xxx'] + self.assertSetEqual({n.__class__ for n in xxx.inferred()}, + {nodes.Const, util.YES.__class__}) + + def test_method_argument(self): + code = ''' + class ErudiEntitySchema: + """a entity has a type, a set of subject and or object relations""" + def __init__(self, e_type, **kwargs): + kwargs['e_type'] = e_type.capitalize().encode() + + def meth(self, e_type, *args, **kwargs): + kwargs['e_type'] = e_type.capitalize().encode() + print(args) + ''' + ast = parse(code, __name__) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'e_type') + self.assertEqual([n.__class__ for n in arg.infer()], + [util.YES.__class__]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'kwargs') + self.assertEqual([n.__class__ for n in arg.infer()], + [nodes.Dict]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'e_type') + self.assertEqual([n.__class__ for n in arg.infer()], + [util.YES.__class__]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'args') + self.assertEqual([n.__class__ for n in arg.infer()], + [nodes.Tuple]) + arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'kwargs') + self.assertEqual([n.__class__ for n in arg.infer()], + [nodes.Dict]) + + def test_tuple_then_list(self): + code = ''' + def test_view(rql, vid, tags=()): + tags = list(tags) + __(tags).append(vid) + ''' + name = test_utils.extract_node(code, __name__) + it = name.infer() + tags = next(it) + self.assertIsInstance(tags, nodes.List) + self.assertEqual(tags.elts, []) + with self.assertRaises(StopIteration): + next(it) + + def test_mulassign_inference(self): + code = ''' + def first_word(line): + """Return the first word of a line""" + + return line.split()[0] + + def last_word(line): + """Return last word of a line""" + + return line.split()[-1] + + def process_line(word_pos): + """Silly function: returns (ok, callable) based on argument. + + For test purpose only. + """ + + if word_pos > 0: + return (True, first_word) + elif word_pos < 0: + return (True, last_word) + else: + return (False, None) + + if __name__ == '__main__': + + line_number = 0 + for a_line in file('test_callable.py'): + tupletest = process_line(line_number) + (ok, fct) = process_line(line_number) + if ok: + fct(a_line) + ''' + ast = parse(code, __name__) + self.assertEqual(len(list(ast['process_line'].infer_call_result(None))), 3) + self.assertEqual(len(list(ast['tupletest'].infer())), 3) + values = ['FunctionDef(first_word)', 'FunctionDef(last_word)', 'Const(NoneType)'] + self.assertEqual([str(inferred) + for inferred in ast['fct'].infer()], values) + + def test_float_complex_ambiguity(self): + code = ''' + def no_conjugate_member(magic_flag): #@ + """should not raise E1101 on something.conjugate""" + if magic_flag: + something = 1.0 + else: + something = 1.0j + if isinstance(something, float): + return something + return __(something).conjugate() + ''' + func, retval = test_utils.extract_node(code, __name__) + self.assertEqual( + [i.value for i in func.ilookup('something')], + [1.0, 1.0j]) + self.assertEqual( + [i.value for i in retval.infer()], + [1.0, 1.0j]) + + def test_lookup_cond_branches(self): + code = ''' + def no_conjugate_member(magic_flag): + """should not raise E1101 on something.conjugate""" + something = 1.0 + if magic_flag: + something = 1.0j + return something.conjugate() + ''' + ast = parse(code, __name__) + values = [i.value for i in test_utils.get_name_node(ast, 'something', -1).infer()] + self.assertEqual(values, [1.0, 1.0j]) + + + def test_simple_subscript(self): + code = ''' + class A(object): + def __getitem__(self, index): + return index + 42 + [1, 2, 3][0] #@ + (1, 2, 3)[1] #@ + (1, 2, 3)[-1] #@ + [1, 2, 3][0] + (2, )[0] + (3, )[-1] #@ + e = {'key': 'value'} + e['key'] #@ + "first"[0] #@ + list([1, 2, 3])[-1] #@ + tuple((4, 5, 6))[2] #@ + A()[0] #@ + A()[-1] #@ + ''' + ast_nodes = test_utils.extract_node(code, __name__) + expected = [1, 2, 3, 6, 'value', 'f', 3, 6, 42, 41] + for node, expected_value in zip(ast_nodes, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + def test_invalid_subscripts(self): + ast_nodes = test_utils.extract_node(''' + class NoGetitem(object): + pass + class InvalidGetitem(object): + def __getitem__(self): pass + class InvalidGetitem2(object): + __getitem__ = 42 + NoGetitem()[4] #@ + InvalidGetitem()[5] #@ + InvalidGetitem2()[10] #@ + [1, 2, 3][None] #@ + 'lala'['bala'] #@ + ''') + for node in ast_nodes[:3]: + self.assertRaises(InferenceError, next, node.infer()) + for node in ast_nodes[3:]: + self.assertEqual(next(node.infer()), util.YES) + + def test_bytes_subscript(self): + node = test_utils.extract_node('''b'a'[0]''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + if six.PY2: + self.assertEqual(inferred.value, 'a') + else: + self.assertEqual(inferred.value, 97) + + #def test_simple_tuple(self): + #"""test case for a simple tuple value""" + ## XXX tuple inference is not implemented ... + #code = """ +#a = (1,) +#b = (22,) +#some = a + b +#""" + #ast = builder.string_build(code, __name__, __file__) + #self.assertEqual(ast['some'].infer.next().as_string(), "(1, 22)") + + def test_simple_for(self): + code = ''' + for a in [1, 2, 3]: + print (a) + for b,c in [(1,2), (3,4)]: + print (b) + print (c) + + print ([(d,e) for e,d in ([1,2], [3,4])]) + ''' + ast = parse(code, __name__) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'a', -1).infer()], [1, 2, 3]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'b', -1).infer()], [1, 3]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'c', -1).infer()], [2, 4]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'd', -1).infer()], [2, 4]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'e', -1).infer()], [1, 3]) + + def test_simple_for_genexpr(self): + code = ''' + print ((d,e) for e,d in ([1,2], [3,4])) + ''' + ast = parse(code, __name__) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'd', -1).infer()], [2, 4]) + self.assertEqual([i.value for i in + test_utils.get_name_node(ast, 'e', -1).infer()], [1, 3]) + + + def test_builtin_help(self): + code = ''' + help() + ''' + # XXX failing since __builtin__.help assignment has + # been moved into a function... + node = test_utils.extract_node(code, __name__) + inferred = list(node.func.infer()) + self.assertEqual(len(inferred), 1, inferred) + self.assertIsInstance(inferred[0], Instance) + self.assertEqual(inferred[0].name, "_Helper") + + def test_builtin_open(self): + code = ''' + open("toto.txt") + ''' + node = test_utils.extract_node(code, __name__).func + inferred = list(node.infer()) + self.assertEqual(len(inferred), 1) + if hasattr(sys, 'pypy_version_info'): + self.assertIsInstance(inferred[0], nodes.ClassDef) + self.assertEqual(inferred[0].name, 'file') + else: + self.assertIsInstance(inferred[0], nodes.FunctionDef) + self.assertEqual(inferred[0].name, 'open') + + def test_callfunc_context_func(self): + code = ''' + def mirror(arg=None): + return arg + + un = mirror(1) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('un')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, 1) + + def test_callfunc_context_lambda(self): + code = ''' + mirror = lambda x=None: x + + un = mirror(1) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('mirror')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Lambda) + inferred = list(ast.igetattr('un')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, 1) + + def test_factory_method(self): + code = ''' + class Super(object): + @classmethod + def instance(cls): + return cls() + + class Sub(Super): + def method(self): + print ('method called') + + sub = Sub.instance() + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('sub')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], Instance) + self.assertEqual(inferred[0]._proxied.name, 'Sub') + + + def test_import_as(self): + code = ''' + import os.path as osp + print (osp.dirname(__file__)) + + from os.path import exists as e + assert e(__file__) + + from new import code as make_code + print (make_code) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('osp')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Module) + self.assertEqual(inferred[0].name, 'os.path') + inferred = list(ast.igetattr('e')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + self.assertEqual(inferred[0].name, 'exists') + if sys.version_info >= (3, 0): + self.skipTest(' module has been removed') + inferred = list(ast.igetattr('make_code')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], Instance) + self.assertEqual(str(inferred[0]), + 'Instance of %s.type' % BUILTINS) + + def _test_const_inferred(self, node, value): + inferred = list(node.infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, value) + + def test_unary_not(self): + for code in ('a = not (1,); b = not ()', + 'a = not {1:2}; b = not {}'): + ast = builder.string_build(code, __name__, __file__) + self._test_const_inferred(ast['a'], False) + self._test_const_inferred(ast['b'], True) + + @test_utils.require_version(minver='3.5') + def test_matmul(self): + node = test_utils.extract_node(''' + class Array: + def __matmul__(self, other): + return 42 + Array() @ Array() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_binary_op_int_add(self): + ast = builder.string_build('a = 1 + 2', __name__, __file__) + self._test_const_inferred(ast['a'], 3) + + def test_binary_op_int_sub(self): + ast = builder.string_build('a = 1 - 2', __name__, __file__) + self._test_const_inferred(ast['a'], -1) + + def test_binary_op_float_div(self): + ast = builder.string_build('a = 1 / 2.', __name__, __file__) + self._test_const_inferred(ast['a'], 1 / 2.) + + def test_binary_op_str_mul(self): + ast = builder.string_build('a = "*" * 40', __name__, __file__) + self._test_const_inferred(ast['a'], "*" * 40) + + def test_binary_op_bitand(self): + ast = builder.string_build('a = 23&20', __name__, __file__) + self._test_const_inferred(ast['a'], 23&20) + + def test_binary_op_bitor(self): + ast = builder.string_build('a = 23|8', __name__, __file__) + self._test_const_inferred(ast['a'], 23|8) + + def test_binary_op_bitxor(self): + ast = builder.string_build('a = 23^9', __name__, __file__) + self._test_const_inferred(ast['a'], 23^9) + + def test_binary_op_shiftright(self): + ast = builder.string_build('a = 23 >>1', __name__, __file__) + self._test_const_inferred(ast['a'], 23>>1) + + def test_binary_op_shiftleft(self): + ast = builder.string_build('a = 23 <<1', __name__, __file__) + self._test_const_inferred(ast['a'], 23<<1) + + + def test_binary_op_list_mul(self): + for code in ('a = [[]] * 2', 'a = 2 * [[]]'): + ast = builder.string_build(code, __name__, __file__) + inferred = list(ast['a'].infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.List) + self.assertEqual(len(inferred[0].elts), 2) + self.assertIsInstance(inferred[0].elts[0], nodes.List) + self.assertIsInstance(inferred[0].elts[1], nodes.List) + + def test_binary_op_list_mul_none(self): + 'test correct handling on list multiplied by None' + ast = builder.string_build('a = [1] * None\nb = [1] * "r"') + inferred = ast['a'].inferred() + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0], util.YES) + inferred = ast['b'].inferred() + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0], util.YES) + + def test_binary_op_list_mul_int(self): + 'test correct handling on list multiplied by int when there are more than one' + code = ''' + from ctypes import c_int + seq = [c_int()] * 4 + ''' + ast = parse(code, __name__) + inferred = ast['seq'].inferred() + self.assertEqual(len(inferred), 1) + listval = inferred[0] + self.assertIsInstance(listval, nodes.List) + self.assertEqual(len(listval.itered()), 4) + + def test_binary_op_tuple_add(self): + ast = builder.string_build('a = (1,) + (2,)', __name__, __file__) + inferred = list(ast['a'].infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Tuple) + self.assertEqual(len(inferred[0].elts), 2) + self.assertEqual(inferred[0].elts[0].value, 1) + self.assertEqual(inferred[0].elts[1].value, 2) + + def test_binary_op_custom_class(self): + code = ''' + class myarray: + def __init__(self, array): + self.array = array + def __mul__(self, x): + return myarray([2,4,6]) + def astype(self): + return "ASTYPE" + + def randint(maximum): + if maximum is not None: + return myarray([1,2,3]) * 2 + else: + return int(5) + + x = randint(1) + ''' + ast = parse(code, __name__) + inferred = list(ast.igetattr('x')) + self.assertEqual(len(inferred), 2) + value = [str(v) for v in inferred] + # The __name__ trick here makes it work when invoked directly + # (__name__ == '__main__') and through pytest (__name__ == + # 'unittest_inference') + self.assertEqual(value, ['Instance of %s.myarray' % __name__, + 'Instance of %s.int' % BUILTINS]) + + def test_nonregr_lambda_arg(self): + code = ''' + def f(g = lambda: None): + __(g()).x +''' + callfuncnode = test_utils.extract_node(code) + inferred = list(callfuncnode.infer()) + self.assertEqual(len(inferred), 2, inferred) + inferred.remove(util.YES) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertIsNone(inferred[0].value) + + def test_nonregr_getitem_empty_tuple(self): + code = ''' + def f(x): + a = ()[x] + ''' + ast = parse(code, __name__) + inferred = list(ast['f'].ilookup('a')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0], util.YES) + + def test_nonregr_instance_attrs(self): + """non regression for instance_attrs infinite loop : pylint / #4""" + + code = """ + class Foo(object): + + def set_42(self): + self.attr = 42 + + class Bar(Foo): + + def __init__(self): + self.attr = 41 + """ + ast = parse(code, __name__) + foo_class = ast['Foo'] + bar_class = ast['Bar'] + bar_self = ast['Bar']['__init__']['self'] + assattr = bar_class._instance_attrs['attr'][0] + self.assertEqual(len(foo_class._instance_attrs['attr']), 1) + self.assertEqual(len(bar_class._instance_attrs['attr']), 1) + self.assertEqual(bar_class._instance_attrs, {'attr': [assattr]}) + # call 'instance_attr' via 'Instance.getattr' to trigger the bug: + instance = bar_self.inferred()[0] + instance.getattr('attr') + self.assertEqual(len(bar_class._instance_attrs['attr']), 1) + self.assertEqual(len(foo_class._instance_attrs['attr']), 1) + self.assertEqual(bar_class._instance_attrs, {'attr': [assattr]}) + + def test_python25_generator_exit(self): + # pylint: disable=redefined-variable-type + buffer = six.StringIO() + sys.stderr = buffer + try: + data = "b = {}[str(0)+''].a" + ast = builder.string_build(data, __name__, __file__) + list(ast['b'].infer()) + output = buffer.getvalue() + finally: + sys.stderr = sys.__stderr__ + # I have no idea how to test for this in another way... + msg = ("Exception exceptions.RuntimeError: " + "'generator ignored GeneratorExit' in " + "ignored") + self.assertNotIn("RuntimeError", output, msg) + + def test_python25_no_relative_import(self): + ast = resources.build_file('data/package/absimport.py') + self.assertTrue(ast.absolute_import_activated(), True) + inferred = next(test_utils.get_name_node(ast, 'import_package_subpackage_module').infer()) + # failed to import since absolute_import is activated + self.assertIs(inferred, util.YES) + + def test_nonregr_absolute_import(self): + ast = resources.build_file('data/absimp/string.py', 'data.absimp.string') + self.assertTrue(ast.absolute_import_activated(), True) + inferred = next(test_utils.get_name_node(ast, 'string').infer()) + self.assertIsInstance(inferred, nodes.Module) + self.assertEqual(inferred.name, 'string') + self.assertIn('ascii_letters', inferred._locals) + + def test_mechanize_open(self): + try: + import mechanize # pylint: disable=unused-variable + except ImportError: + self.skipTest('require mechanize installed') + data = ''' + from mechanize import Browser + print(Browser) + b = Browser() + ''' + ast = parse(data, __name__) + browser = next(test_utils.get_name_node(ast, 'Browser').infer()) + self.assertIsInstance(browser, nodes.ClassDef) + bopen = list(browser.igetattr('open')) + self.skipTest('the commit said: "huum, see that later"') + self.assertEqual(len(bopen), 1) + self.assertIsInstance(bopen[0], nodes.FunctionDef) + self.assertTrue(bopen[0].callable()) + b = next(test_utils.get_name_node(ast, 'b').infer()) + self.assertIsInstance(b, Instance) + bopen = list(b.igetattr('open')) + self.assertEqual(len(bopen), 1) + self.assertIsInstance(bopen[0], BoundMethod) + self.assertTrue(bopen[0].callable()) + + def test_property(self): + code = ''' + from smtplib import SMTP + class SendMailController(object): + + @property + def smtp(self): + return SMTP(mailhost, port) + + @property + def me(self): + return self + + my_smtp = SendMailController().smtp + my_me = SendMailController().me + ''' + decorators = set(['%s.property' % BUILTINS]) + ast = parse(code, __name__) + self.assertEqual(ast['SendMailController']['smtp'].decoratornames(), + decorators) + propinferred = list(ast.body[2].value.infer()) + self.assertEqual(len(propinferred), 1) + propinferred = propinferred[0] + self.assertIsInstance(propinferred, Instance) + self.assertEqual(propinferred.name, 'SMTP') + self.assertEqual(propinferred.root().name, 'smtplib') + self.assertEqual(ast['SendMailController']['me'].decoratornames(), + decorators) + propinferred = list(ast.body[3].value.infer()) + self.assertEqual(len(propinferred), 1) + propinferred = propinferred[0] + self.assertIsInstance(propinferred, Instance) + self.assertEqual(propinferred.name, 'SendMailController') + self.assertEqual(propinferred.root().name, __name__) + + def test_im_func_unwrap(self): + code = ''' + class EnvBasedTC: + def pactions(self): + pass + pactions = EnvBasedTC.pactions.im_func + print (pactions) + + class EnvBasedTC2: + pactions = EnvBasedTC.pactions.im_func + print (pactions) + ''' + ast = parse(code, __name__) + pactions = test_utils.get_name_node(ast, 'pactions') + inferred = list(pactions.infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + pactions = test_utils.get_name_node(ast['EnvBasedTC2'], 'pactions') + inferred = list(pactions.infer()) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + + def test_augassign(self): + code = ''' + a = 1 + a += 2 + print (a) + ''' + ast = parse(code, __name__) + inferred = list(test_utils.get_name_node(ast, 'a').infer()) + + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.Const) + self.assertEqual(inferred[0].value, 3) + + def test_nonregr_func_arg(self): + code = ''' + def foo(self, bar): + def baz(): + pass + def qux(): + return baz + spam = bar(None, qux) + print (spam) + ''' + ast = parse(code, __name__) + inferred = list(test_utils.get_name_node(ast['foo'], 'spam').infer()) + self.assertEqual(len(inferred), 1) + self.assertIs(inferred[0], util.YES) + + def test_nonregr_func_global(self): + code = ''' + active_application = None + + def get_active_application(): + global active_application + return active_application + + class Application(object): + def __init__(self): + global active_application + active_application = self + + class DataManager(object): + def __init__(self, app=None): + self.app = get_active_application() + def test(self): + p = self.app + print (p) + ''' + ast = parse(code, __name__) + inferred = list(Instance(ast['DataManager']).igetattr('app')) + self.assertEqual(len(inferred), 2, inferred) # None / Instance(Application) + inferred = list(test_utils.get_name_node(ast['DataManager']['test'], 'p').infer()) + self.assertEqual(len(inferred), 2, inferred) + for node in inferred: + if isinstance(node, Instance) and node.name == 'Application': + break + else: + self.fail('expected to find an instance of Application in %s' % inferred) + + def test_list_inference(self): + """#20464""" + code = ''' + from unknown import Unknown + A = [] + B = [] + + def test(): + xyz = [ + Unknown + ] + A + B + return xyz + + Z = test() + ''' + ast = parse(code, __name__) + inferred = next(ast['Z'].infer()) + self.assertIsInstance(inferred, nodes.List) + self.assertEqual(len(inferred.elts), 1) + self.assertIs(inferred.elts[0], util.YES) + + def test__new__(self): + code = ''' + class NewTest(object): + "doc" + def __new__(cls, arg): + self = object.__new__(cls) + self.arg = arg + return self + + n = NewTest() + ''' + ast = parse(code, __name__) + self.assertRaises(InferenceError, list, ast['NewTest'].igetattr('arg')) + n = next(ast['n'].infer()) + inferred = list(n.igetattr('arg')) + self.assertEqual(len(inferred), 1, inferred) + + def test__new__bound_methods(self): + node = test_utils.extract_node(''' + class cls(object): pass + cls().__new__(cls) #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred._proxied, node.root()['cls']) + + def test_two_parents_from_same_module(self): + code = ''' + from data import nonregr + class Xxx(nonregr.Aaa, nonregr.Ccc): + "doc" + ''' + ast = parse(code, __name__) + parents = list(ast['Xxx'].ancestors()) + self.assertEqual(len(parents), 3, parents) # Aaa, Ccc, object + + def test_pluggable_inference(self): + code = ''' + from collections import namedtuple + A = namedtuple('A', ['a', 'b']) + B = namedtuple('B', 'a b') + ''' + ast = parse(code, __name__) + aclass = ast['A'].inferred()[0] + self.assertIsInstance(aclass, nodes.ClassDef) + self.assertIn('a', aclass._instance_attrs) + self.assertIn('b', aclass._instance_attrs) + bclass = ast['B'].inferred()[0] + self.assertIsInstance(bclass, nodes.ClassDef) + self.assertIn('a', bclass._instance_attrs) + self.assertIn('b', bclass._instance_attrs) + + def test_infer_arguments(self): + code = ''' + class A(object): + def first(self, arg1, arg2): + return arg1 + @classmethod + def method(cls, arg1, arg2): + return arg2 + @classmethod + def empty(cls): + return 2 + @staticmethod + def static(arg1, arg2): + return arg1 + def empty_method(self): + return [] + x = A().first(1, []) + y = A.method(1, []) + z = A.static(1, []) + empty = A.empty() + empty_list = A().empty_method() + ''' + ast = parse(code, __name__) + int_node = ast['x'].inferred()[0] + self.assertIsInstance(int_node, nodes.Const) + self.assertEqual(int_node.value, 1) + list_node = ast['y'].inferred()[0] + self.assertIsInstance(list_node, nodes.List) + int_node = ast['z'].inferred()[0] + self.assertIsInstance(int_node, nodes.Const) + self.assertEqual(int_node.value, 1) + empty = ast['empty'].inferred()[0] + self.assertIsInstance(empty, nodes.Const) + self.assertEqual(empty.value, 2) + empty_list = ast['empty_list'].inferred()[0] + self.assertIsInstance(empty_list, nodes.List) + + def test_infer_variable_arguments(self): + code = ''' + def test(*args, **kwargs): + vararg = args + kwarg = kwargs + ''' + ast = parse(code, __name__) + func = ast['test'] + vararg = func.body[0].value + kwarg = func.body[1].value + + kwarg_inferred = kwarg.inferred()[0] + self.assertIsInstance(kwarg_inferred, nodes.Dict) + self.assertIs(kwarg_inferred.parent, func.args) + + vararg_inferred = vararg.inferred()[0] + self.assertIsInstance(vararg_inferred, nodes.Tuple) + self.assertIs(vararg_inferred.parent, func.args) + + def test_infer_nested(self): + code = """ + def nested(): + from threading import Thread + + class NestedThread(Thread): + def __init__(self): + Thread.__init__(self) + """ + # Test that inferring Thread.__init__ looks up in + # the nested scope. + ast = parse(code, __name__) + callfunc = next(ast.nodes_of_class(nodes.Call)) + func = callfunc.func + inferred = func.inferred()[0] + self.assertIsInstance(inferred, UnboundMethod) + + def test_instance_binary_operations(self): + code = """ + class A(object): + def __mul__(self, other): + return 42 + a = A() + b = A() + sub = a - b + mul = a * b + """ + ast = parse(code, __name__) + sub = ast['sub'].inferred()[0] + mul = ast['mul'].inferred()[0] + self.assertIs(sub, util.YES) + self.assertIsInstance(mul, nodes.Const) + self.assertEqual(mul.value, 42) + + def test_instance_binary_operations_parent(self): + code = """ + class A(object): + def __mul__(self, other): + return 42 + class B(A): + pass + a = B() + b = B() + sub = a - b + mul = a * b + """ + ast = parse(code, __name__) + sub = ast['sub'].inferred()[0] + mul = ast['mul'].inferred()[0] + self.assertIs(sub, util. YES) + self.assertIsInstance(mul, nodes.Const) + self.assertEqual(mul.value, 42) + + def test_instance_binary_operations_multiple_methods(self): + code = """ + class A(object): + def __mul__(self, other): + return 42 + class B(A): + def __mul__(self, other): + return [42] + a = B() + b = B() + sub = a - b + mul = a * b + """ + ast = parse(code, __name__) + sub = ast['sub'].inferred()[0] + mul = ast['mul'].inferred()[0] + self.assertIs(sub, util.YES) + self.assertIsInstance(mul, nodes.List) + self.assertIsInstance(mul.elts[0], nodes.Const) + self.assertEqual(mul.elts[0].value, 42) + + def test_infer_call_result_crash(self): + code = """ + class A(object): + def __mul__(self, other): + return type.__new__() + + a = A() + b = A() + c = a * b + """ + ast = parse(code, __name__) + node = ast['c'] + self.assertEqual(node.inferred(), [util.YES]) + + def test_infer_empty_nodes(self): + # Should not crash when trying to infer EmptyNodes. + node = nodes.EmptyNode() + self.assertEqual(node.inferred(), [util.YES]) + + def test_infinite_loop_for_decorators(self): + # Issue https://bitbucket.org/logilab/astroid/issue/50 + # A decorator that returns itself leads to an infinite loop. + code = """ + def decorator(): + def wrapper(): + return decorator() + return wrapper + + @decorator() + def do_a_thing(): + pass + """ + ast = parse(code, __name__) + node = ast['do_a_thing'] + self.assertEqual(node.type, 'function') + + def test_no_infinite_ancestor_loop(self): + klass = test_utils.extract_node(""" + import datetime + + def method(self): + datetime.datetime = something() + + class something(datetime.datetime): #@ + pass + """) + self.assertIn( + 'object', + [base.name for base in klass.ancestors()]) + + def test_stop_iteration_leak(self): + code = """ + class Test: + def __init__(self): + self.config = {0: self.config[0]} + self.config[0].test() #@ + """ + ast = test_utils.extract_node(code, __name__) + expr = ast.func.expr + self.assertRaises(InferenceError, next, expr.infer()) + + def test_tuple_builtin_inference(self): + code = """ + var = (1, 2) + tuple() #@ + tuple([1]) #@ + tuple({2}) #@ + tuple("abc") #@ + tuple({1: 2}) #@ + tuple(var) #@ + tuple(tuple([1])) #@ + + tuple(None) #@ + tuple(1) #@ + tuple(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + + self.assertInferTuple(ast[0], []) + self.assertInferTuple(ast[1], [1]) + self.assertInferTuple(ast[2], [2]) + self.assertInferTuple(ast[3], ["a", "b", "c"]) + self.assertInferTuple(ast[4], [1]) + self.assertInferTuple(ast[5], [1, 2]) + self.assertInferTuple(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.tuple".format(BUILTINS)) + + def test_frozenset_builtin_inference(self): + code = """ + var = (1, 2) + frozenset() #@ + frozenset([1, 2, 1]) #@ + frozenset({2, 3, 1}) #@ + frozenset("abcab") #@ + frozenset({1: 2}) #@ + frozenset(var) #@ + frozenset(tuple([1])) #@ + + frozenset(set(tuple([4, 5, set([2])]))) #@ + frozenset(None) #@ + frozenset(1) #@ + frozenset(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + + self.assertInferFrozenSet(ast[0], []) + self.assertInferFrozenSet(ast[1], [1, 2]) + self.assertInferFrozenSet(ast[2], [1, 2, 3]) + self.assertInferFrozenSet(ast[3], ["a", "b", "c"]) + self.assertInferFrozenSet(ast[4], [1]) + self.assertInferFrozenSet(ast[5], [1, 2]) + self.assertInferFrozenSet(ast[6], [1]) + + for node in ast[7:]: + infered = next(node.infer()) + self.assertIsInstance(infered, Instance) + self.assertEqual(infered.qname(), "{}.frozenset".format(BUILTINS)) + + def test_set_builtin_inference(self): + code = """ + var = (1, 2) + set() #@ + set([1, 2, 1]) #@ + set({2, 3, 1}) #@ + set("abcab") #@ + set({1: 2}) #@ + set(var) #@ + set(tuple([1])) #@ + + set(set(tuple([4, 5, set([2])]))) #@ + set(None) #@ + set(1) #@ + set(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + + self.assertInferSet(ast[0], []) + self.assertInferSet(ast[1], [1, 2]) + self.assertInferSet(ast[2], [1, 2, 3]) + self.assertInferSet(ast[3], ["a", "b", "c"]) + self.assertInferSet(ast[4], [1]) + self.assertInferSet(ast[5], [1, 2]) + self.assertInferSet(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.set".format(BUILTINS)) + + def test_list_builtin_inference(self): + code = """ + var = (1, 2) + list() #@ + list([1, 2, 1]) #@ + list({2, 3, 1}) #@ + list("abcab") #@ + list({1: 2}) #@ + list(var) #@ + list(tuple([1])) #@ + + list(list(tuple([4, 5, list([2])]))) #@ + list(None) #@ + list(1) #@ + list(1, 2) #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferList(ast[0], []) + self.assertInferList(ast[1], [1, 1, 2]) + self.assertInferList(ast[2], [1, 2, 3]) + self.assertInferList(ast[3], ["a", "a", "b", "b", "c"]) + self.assertInferList(ast[4], [1]) + self.assertInferList(ast[5], [1, 2]) + self.assertInferList(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.list".format(BUILTINS)) + + @test_utils.require_version('3.0') + def test_builtin_inference_py3k(self): + code = """ + list(b"abc") #@ + tuple(b"abc") #@ + set(b"abc") #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferList(ast[0], [97, 98, 99]) + self.assertInferTuple(ast[1], [97, 98, 99]) + self.assertInferSet(ast[2], [97, 98, 99]) + + def test_dict_inference(self): + code = """ + dict() #@ + dict(a=1, b=2, c=3) #@ + dict([(1, 2), (2, 3)]) #@ + dict([[1, 2], [2, 3]]) #@ + dict([(1, 2), [2, 3]]) #@ + dict([('a', 2)], b=2, c=3) #@ + dict({1: 2}) #@ + dict({'c': 2}, a=4, b=5) #@ + def func(): + return dict(a=1, b=2) + func() #@ + var = {'x': 2, 'y': 3} + dict(var, a=1, b=2) #@ + + dict([1, 2, 3]) #@ + dict([(1, 2), (1, 2, 3)]) #@ + dict({1: 2}, {1: 2}) #@ + dict({1: 2}, (1, 2)) #@ + dict({1: 2}, (1, 2), a=4) #@ + dict([(1, 2), ([4, 5], 2)]) #@ + dict([None, None]) #@ + + def using_unknown_kwargs(**kwargs): + return dict(**kwargs) + using_unknown_kwargs(a=1, b=2) #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferDict(ast[0], {}) + self.assertInferDict(ast[1], {'a': 1, 'b': 2, 'c': 3}) + for i in range(2, 5): + self.assertInferDict(ast[i], {1: 2, 2: 3}) + self.assertInferDict(ast[5], {'a': 2, 'b': 2, 'c': 3}) + self.assertInferDict(ast[6], {1: 2}) + self.assertInferDict(ast[7], {'c': 2, 'a': 4, 'b': 5}) + self.assertInferDict(ast[8], {'a': 1, 'b': 2}) + self.assertInferDict(ast[9], {'x': 2, 'y': 3, 'a': 1, 'b': 2}) + + for node in ast[10:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) + + def test_dict_inference_kwargs(self): + ast_node = test_utils.extract_node('''dict(a=1, b=2, **{'c': 3})''') + self.assertInferDict(ast_node, {'a': 1, 'b': 2, 'c': 3}) + + @test_utils.require_version('3.5') + def test_dict_inference_for_multiple_starred(self): + pairs = [ + ('dict(a=1, **{"b": 2}, **{"c":3})', {'a':1, 'b':2, 'c':3}), + ('dict(a=1, **{"b": 2}, d=4, **{"c":3})', {'a':1, 'b':2, 'c':3, 'd':4}), + ('dict({"a":1}, b=2, **{"c":3})', {'a':1, 'b':2, 'c':3}), + ] + for code, expected_value in pairs: + node = test_utils.extract_node(code) + self.assertInferDict(node, expected_value) + + def test_dict_invalid_args(self): + invalid_values = [ + 'dict(*1)', + 'dict(**lala)', + 'dict(**[])', + ] + for invalid in invalid_values: + ast_node = test_utils.extract_node(invalid) + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) + + def test_str_methods(self): + code = """ + ' '.decode() #@ + + ' '.encode() #@ + ' '.join('abcd') #@ + ' '.replace('a', 'b') #@ + ' '.format('a') #@ + ' '.capitalize() #@ + ' '.title() #@ + ' '.lower() #@ + ' '.upper() #@ + ' '.swapcase() #@ + ' '.strip() #@ + ' '.rstrip() #@ + ' '.lstrip() #@ + ' '.rjust() #@ + ' '.ljust() #@ + ' '.center() #@ + + ' '.index() #@ + ' '.find() #@ + ' '.count() #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferConst(ast[0], u'') + for i in range(1, 16): + self.assertInferConst(ast[i], '') + for i in range(16, 19): + self.assertInferConst(ast[i], 0) + + def test_unicode_methods(self): + code = """ + u' '.encode() #@ + + u' '.decode() #@ + u' '.join('abcd') #@ + u' '.replace('a', 'b') #@ + u' '.format('a') #@ + u' '.capitalize() #@ + u' '.title() #@ + u' '.lower() #@ + u' '.upper() #@ + u' '.swapcase() #@ + u' '.strip() #@ + u' '.rstrip() #@ + u' '.lstrip() #@ + u' '.rjust() #@ + u' '.ljust() #@ + u' '.center() #@ + + u' '.index() #@ + u' '.find() #@ + u' '.count() #@ + """ + ast = test_utils.extract_node(code, __name__) + self.assertInferConst(ast[0], '') + for i in range(1, 16): + self.assertInferConst(ast[i], u'') + for i in range(16, 19): + self.assertInferConst(ast[i], 0) + + def test_scope_lookup_same_attributes(self): + code = ''' + import collections + class Second(collections.Counter): + def collections(self): + return "second" + + ''' + ast = parse(code, __name__) + bases = ast['Second'].bases[0] + inferred = next(bases.infer()) + self.assertTrue(inferred) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertEqual(inferred.qname(), 'collections.Counter') + + +class ArgumentsTest(unittest.TestCase): + + @staticmethod + def _get_dict_value(inferred): + items = inferred.items + return sorted((key.value, value.value) for key, value in items) + + @staticmethod + def _get_tuple_value(inferred): + elts = inferred.elts + return tuple(elt.value for elt in elts) + + def test_args(self): + expected_values = [(), (1, ), (2, 3), (4, 5), + (3, ), (), (3, 4, 5), + (), (), (4, ), (4, 5), + (), (3, ), (), (), (3, ), (42, )] + ast_nodes = test_utils.extract_node(''' + def func(*args): + return args + func() #@ + func(1) #@ + func(2, 3) #@ + func(*(4, 5)) #@ + def func(a, b, *args): + return args + func(1, 2, 3) #@ + func(1, 2) #@ + func(1, 2, 3, 4, 5) #@ + def func(a, b, c=42, *args): + return args + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, 3, 4) #@ + func(1, 2, 3, 4, 5) #@ + func = lambda a, b, *args: args + func(1, 2) #@ + func(1, 2, 3) #@ + func = lambda a, b=42, *args: args + func(1) #@ + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, *(42, )) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertEqual(self._get_tuple_value(inferred), expected_value) + + @test_utils.require_version('3.5') + def test_multiple_starred_args(self): + expected_values = [ + (1, 2, 3), + (1, 4, 2, 3, 5, 6, 7), + ] + ast_nodes = test_utils.extract_node(''' + def func(a, b, *args): + return args + func(1, 2, *(1, ), *(2, 3)) #@ + func(1, 2, *(1, ), 4, *(2, 3), 5, *(6, 7)) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertEqual(self._get_tuple_value(inferred), expected_value) + + def test_defaults(self): + expected_values = [42, 3, 41, 42] + ast_nodes = test_utils.extract_node(''' + def func(a, b, c=42, *args): + return c + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, c=41) #@ + func(1, 2, 42, 41) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + @test_utils.require_version('3.0') + def test_kwonly_args(self): + expected_values = [24, 24, 42, 23, 24, 24, 54] + ast_nodes = test_utils.extract_node(''' + def test(*, f, b): return f + test(f=24, b=33) #@ + def test(a, *, f): return f + test(1, f=24) #@ + def test(a, *, f=42): return f + test(1) #@ + test(1, f=23) #@ + def test(a, b, c=42, *args, f=24): + return f + test(1, 2, 3) #@ + test(1, 2, 3, 4) #@ + test(1, 2, 3, 4, 5, f=54) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + def test_kwargs(self): + expected = [ + [('a', 1), ('b', 2), ('c', 3)], + [('a', 1)], + [('a', 'b')], + ] + ast_nodes = test_utils.extract_node(''' + def test(**kwargs): + return kwargs + test(a=1, b=2, c=3) #@ + test(a=1) #@ + test(**{'a': 'b'}) #@ + ''') + for node, expected_value in zip(ast_nodes, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + value = self._get_dict_value(inferred) + self.assertEqual(value, expected_value) + + def test_kwargs_and_other_named_parameters(self): + ast_nodes = test_utils.extract_node(''' + def test(a=42, b=24, **kwargs): + return kwargs + test(42, 24, c=3, d=4) #@ + test(49, b=24, d=4) #@ + test(a=42, b=33, c=3, d=42) #@ + test(a=42, **{'c':42}) #@ + ''') + expected_values = [ + [('c', 3), ('d', 4)], + [('d', 4)], + [('c', 3), ('d', 42)], + [('c', 42)], + ] + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + value = self._get_dict_value(inferred) + self.assertEqual(value, expected_value) + + def test_kwargs_access_by_name(self): + expected_values = [42, 42, 42, 24] + ast_nodes = test_utils.extract_node(''' + def test(**kwargs): + return kwargs['f'] + test(f=42) #@ + test(**{'f': 42}) #@ + test(**dict(f=42)) #@ + def test(f=42, **kwargs): + return kwargs['l'] + test(l=24) #@ + ''') + for ast_node, value in zip(ast_nodes, expected_values): + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, value) + + def test_infer_call_result_invalid_dunder_call_on_instance(self): + ast_nodes = test_utils.extract_node(''' + class A: + __call__ = 42 + class B: + __call__ = A() + class C: + __call = None + A() #@ + B() #@ + C() #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertRaises(InferenceError, next, inferred.infer_call_result(node)) + + + def test_subscript_inference_error(self): + # Used to raise StopIteration + ast_node = test_utils.extract_node(''' + class AttributeDict(dict): + def __getitem__(self, name): + return self + flow = AttributeDict() + flow['app'] = AttributeDict() + flow['app']['config'] = AttributeDict() + flow['app']['config']['doffing'] = AttributeDict() #@ + ''') + self.assertIsNone(util.safe_infer(ast_node.targets[0])) + + def test_classmethod_inferred_by_context(self): + ast_node = test_utils.extract_node(''' + class Super(object): + def instance(cls): + return cls() + instance = classmethod(instance) + + class Sub(Super): + def method(self): + return self + + # should see the Sub.instance() is returning a Sub + # instance, not a Super instance + Sub.instance().method() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'Sub') + + @test_utils.require_version('3.5') + def test_multiple_kwargs(self): + expected_value = [ + ('a', 1), + ('b', 2), + ('c', 3), + ('d', 4), + ('f', 42), + ] + ast_node = test_utils.extract_node(''' + def test(**kwargs): + return kwargs + test(a=1, b=2, **{'c': 3}, **{'d': 4}, f=42) #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Dict) + value = self._get_dict_value(inferred) + self.assertEqual(value, expected_value) + + def test_kwargs_are_overriden(self): + ast_nodes = test_utils.extract_node(''' + def test(f): + return f + test(f=23, **{'f': 34}) #@ + def test(f=None): + return f + test(f=23, **{'f':23}) #@ + ''') + for ast_node in ast_nodes: + inferred = next(ast_node.infer()) + self.assertEqual(inferred, util.YES) + + def test_fail_to_infer_args(self): + ast_nodes = test_utils.extract_node(''' + def test(a, **kwargs): return a + test(*missing) #@ + test(*object) #@ + test(*1) #@ + + + def test(**kwargs): return kwargs + test(**miss) #@ + test(**(1, 2)) #@ + test(**1) #@ + test(**{misss:1}) #@ + test(**{object:1}) #@ + test(**{1:1}) #@ + test(**{'a':1, 'a':1}) #@ + + def test(a): return a + test() #@ + test(1, 2, 3) #@ + + from unknown import unknown + test(*unknown) #@ + def test(*args): return args + test(*unknown) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertEqual(inferred, util.YES) + +class CallSiteTest(unittest.TestCase): + + @staticmethod + def _call_site_from_call(call): + return arguments.CallSite.from_call(call) + + def _test_call_site_pair(self, code, expected_args, expected_keywords): + ast_node = test_utils.extract_node(code) + call_site = self._call_site_from_call(ast_node) + self.assertEqual(len(call_site.positional_arguments), len(expected_args)) + self.assertEqual([arg.value for arg in call_site.positional_arguments], + expected_args) + self.assertEqual(len(call_site.keyword_arguments), len(expected_keywords)) + for keyword, value in expected_keywords.items(): + self.assertIn(keyword, call_site.keyword_arguments) + self.assertEqual(call_site.keyword_arguments[keyword].value, value) + + def _test_call_site(self, pairs): + for pair in pairs: + self._test_call_site_pair(*pair) + + @test_utils.require_version('3.5') + def test_call_site_starred_args(self): + pairs = [ + ( + "f(*(1, 2), *(2, 3), *(3, 4), **{'a':1}, **{'b': 2})", + [1, 2, 2, 3, 3, 4], + {'a': 1, 'b': 2} + ), + ( + "f(1, 2, *(3, 4), 5, *(6, 7), f=24, **{'c':3})", + [1, 2, 3, 4, 5, 6, 7], + {'f':24, 'c': 3}, + ), + # Too many fs passed into. + ( + "f(f=24, **{'f':24})", [], {}, + ), + ] + self._test_call_site(pairs) + + def test_call_site(self): + pairs = [ + ( + "f(1, 2)", [1, 2], {} + ), + ( + "f(1, 2, *(1, 2))", [1, 2, 1, 2], {} + ), + ( + "f(a=1, b=2, c=3)", [], {'a':1, 'b':2, 'c':3} + ) + ] + self._test_call_site(pairs) + + def _test_call_site_valid_arguments(self, values, invalid): + for value in values: + ast_node = test_utils.extract_node(value) + call_site = self._call_site_from_call(ast_node) + self.assertEqual(call_site.has_invalid_arguments(), invalid) + + def test_call_site_valid_arguments(self): + values = [ + "f(*lala)", "f(*1)", "f(*object)", + ] + self._test_call_site_valid_arguments(values, invalid=True) + values = [ + "f()", "f(*(1, ))", "f(1, 2, *(2, 3))", + ] + self._test_call_site_valid_arguments(values, invalid=False) + + def test_duplicated_keyword_arguments(self): + ast_node = test_utils.extract_node('f(f=24, **{"f": 25})') + site = self._call_site_from_call(ast_node) + self.assertIn('f', site.duplicated_keywords) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_lookup.py b/pymode/libs/astroid/tests/unittest_lookup.py new file mode 100644 index 00000000..bd1786d5 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_lookup.py @@ -0,0 +1,352 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for the astroid variable lookup capabilities +""" +import functools +import sys +import unittest + +from astroid import builder +from astroid import exceptions +from astroid import nodes +from astroid import scoped_nodes +from astroid import test_utils +from astroid import util +from astroid.tests import resources + + +class LookupTest(resources.SysPathSetup, unittest.TestCase): + + def setUp(self): + super(LookupTest, self).setUp() + self.module = resources.build_file('data/module.py', 'data.module') + self.module2 = resources.build_file('data/module2.py', 'data.module2') + self.nonregr = resources.build_file('data/nonregr.py', 'data.nonregr') + + def test_limit(self): + code = ''' + l = [a + for a,b in list] + + a = 1 + b = a + a = None + + def func(): + c = 1 + ''' + astroid = builder.parse(code, __name__) + # a & b + a = next(astroid.nodes_of_class(nodes.Name)) + self.assertEqual(a.lineno, 2) + if sys.version_info < (3, 0): + self.assertEqual(len(astroid.lookup('b')[1]), 1) + self.assertEqual(len(astroid.lookup('a')[1]), 1) + b = astroid._locals['b'][1] + else: + self.assertEqual(len(astroid.lookup('b')[1]), 1) + self.assertEqual(len(astroid.lookup('a')[1]), 1) + b = astroid._locals['b'][0] + + stmts = a.lookup('a')[1] + self.assertEqual(len(stmts), 1) + self.assertEqual(b.lineno, 6) + b_infer = b.infer() + b_value = next(b_infer) + self.assertEqual(b_value.value, 1) + # c + self.assertRaises(StopIteration, functools.partial(next, b_infer)) + func = astroid._locals['func'][0] + self.assertEqual(len(func.lookup('c')[1]), 1) + + def test_module(self): + astroid = builder.parse('pass', __name__) + # built-in objects + none = next(astroid.ilookup('None')) + self.assertIsNone(none.value) + obj = next(astroid.ilookup('object')) + self.assertIsInstance(obj, nodes.ClassDef) + self.assertEqual(obj.name, 'object') + self.assertRaises(exceptions.InferenceError, + functools.partial(next, astroid.ilookup('YOAA'))) + + # XXX + self.assertEqual(len(list(self.nonregr.ilookup('enumerate'))), 2) + + def test_class_ancestor_name(self): + code = ''' + class A: + pass + + class A(A): + pass + ''' + astroid = builder.parse(code, __name__) + cls1 = astroid._locals['A'][0] + cls2 = astroid._locals['A'][1] + name = next(cls2.nodes_of_class(nodes.Name)) + self.assertEqual(next(name.infer()), cls1) + + ### backport those test to inline code + def test_method(self): + method = self.module['YOUPI']['method'] + my_dict = next(method.ilookup('MY_DICT')) + self.assertTrue(isinstance(my_dict, nodes.Dict), my_dict) + none = next(method.ilookup('None')) + self.assertIsNone(none.value) + self.assertRaises(exceptions.InferenceError, + functools.partial(next, method.ilookup('YOAA'))) + + def test_function_argument_with_default(self): + make_class = self.module2['make_class'] + base = next(make_class.ilookup('base')) + self.assertTrue(isinstance(base, nodes.ClassDef), base.__class__) + self.assertEqual(base.name, 'YO') + self.assertEqual(base.root().name, 'data.module') + + def test_class(self): + klass = self.module['YOUPI'] + my_dict = next(klass.ilookup('MY_DICT')) + self.assertIsInstance(my_dict, nodes.Dict) + none = next(klass.ilookup('None')) + self.assertIsNone(none.value) + obj = next(klass.ilookup('object')) + self.assertIsInstance(obj, nodes.ClassDef) + self.assertEqual(obj.name, 'object') + self.assertRaises(exceptions.InferenceError, + functools.partial(next, klass.ilookup('YOAA'))) + + def test_inner_classes(self): + ddd = list(self.nonregr['Ccc'].ilookup('Ddd')) + self.assertEqual(ddd[0].name, 'Ddd') + + def test_loopvar_hiding(self): + astroid = builder.parse(""" + x = 10 + for x in range(5): + print (x) + + if x > 0: + print ('#' * x) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'x'] + # inside the loop, only one possible assignment + self.assertEqual(len(xnames[0].lookup('x')[1]), 1) + # outside the loop, two possible assignments + self.assertEqual(len(xnames[1].lookup('x')[1]), 2) + self.assertEqual(len(xnames[2].lookup('x')[1]), 2) + + def test_list_comps(self): + astroid = builder.parse(""" + print ([ i for i in range(10) ]) + print ([ i for i in range(10) ]) + print ( list( i for i in range(10) ) ) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + self.assertEqual(len(xnames[2].lookup('i')[1]), 1) + self.assertEqual(xnames[2].lookup('i')[1][0].lineno, 4) + + def test_list_comp_target(self): + """test the list comprehension target""" + astroid = builder.parse(""" + ten = [ var for var in range(10) ] + var + """) + var = astroid.body[1].value + if sys.version_info < (3, 0): + self.assertEqual(var.inferred(), [util.YES]) + else: + self.assertRaises(exceptions.UnresolvableName, var.inferred) + + def test_dict_comps(self): + astroid = builder.parse(""" + print ({ i: j for i in range(10) for j in range(10) }) + print ({ i: j for i in range(10) for j in range(10) }) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'j'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + + def test_set_comps(self): + astroid = builder.parse(""" + print ({ i for i in range(10) }) + print ({ i for i in range(10) }) + """, __name__) + xnames = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == 'i'] + self.assertEqual(len(xnames[0].lookup('i')[1]), 1) + self.assertEqual(xnames[0].lookup('i')[1][0].lineno, 2) + self.assertEqual(len(xnames[1].lookup('i')[1]), 1) + self.assertEqual(xnames[1].lookup('i')[1][0].lineno, 3) + + def test_set_comp_closure(self): + astroid = builder.parse(""" + ten = { var for var in range(10) } + var + """) + var = astroid.body[1].value + self.assertRaises(exceptions.UnresolvableName, var.inferred) + + def test_generator_attributes(self): + tree = builder.parse(""" + def count(): + "test" + yield 0 + + iterer = count() + num = iterer.next() + """) + next_node = tree.body[2].value.func + gener = next_node.expr.inferred()[0] + if sys.version_info < (3, 0): + self.assertIsInstance(gener.getattr('next')[0], nodes.FunctionDef) + else: + self.assertIsInstance(gener.getattr('__next__')[0], nodes.FunctionDef) + self.assertIsInstance(gener.getattr('send')[0], nodes.FunctionDef) + self.assertIsInstance(gener.getattr('throw')[0], nodes.FunctionDef) + self.assertIsInstance(gener.getattr('close')[0], nodes.FunctionDef) + + def test_explicit___name__(self): + code = ''' + class Pouet: + __name__ = "pouet" + p1 = Pouet() + + class PouetPouet(Pouet): pass + p2 = Pouet() + + class NoName: pass + p3 = NoName() + ''' + astroid = builder.parse(code, __name__) + p1 = next(astroid['p1'].infer()) + self.assertTrue(p1.getattr('__name__')) + p2 = next(astroid['p2'].infer()) + self.assertTrue(p2.getattr('__name__')) + self.assertTrue(astroid['NoName'].getattr('__name__')) + p3 = next(astroid['p3'].infer()) + self.assertRaises(exceptions.NotFoundError, p3.getattr, '__name__') + + def test_function_module_special(self): + astroid = builder.parse(''' + def initialize(linter): + """initialize linter with checkers in this package """ + package_load(linter, __path__[0]) + ''', 'data.__init__') + path = [n for n in astroid.nodes_of_class(nodes.Name) if n.name == '__path__'][0] + self.assertEqual(len(path.lookup('__path__')[1]), 1) + + def test_builtin_lookup(self): + self.assertEqual(scoped_nodes.builtin_lookup('__dict__')[1], ()) + intstmts = scoped_nodes.builtin_lookup('int')[1] + self.assertEqual(len(intstmts), 1) + self.assertIsInstance(intstmts[0], nodes.ClassDef) + self.assertEqual(intstmts[0].name, 'int') + self.assertIs(intstmts[0], nodes.const_factory(1)._proxied) + + def test_decorator_arguments_lookup(self): + code = ''' + def decorator(value): + def wrapper(function): + return function + return wrapper + + class foo: + member = 10 #@ + + @decorator(member) #This will cause pylint to complain + def test(self): + pass + ''' + member = test_utils.extract_node(code, __name__).targets[0] + it = member.infer() + obj = next(it) + self.assertIsInstance(obj, nodes.Const) + self.assertEqual(obj.value, 10) + self.assertRaises(StopIteration, functools.partial(next, it)) + + def test_inner_decorator_member_lookup(self): + code = ''' + class FileA: + def decorator(bla): + return bla + + @__(decorator) + def funcA(): + return 4 + ''' + decname = test_utils.extract_node(code, __name__) + it = decname.infer() + obj = next(it) + self.assertIsInstance(obj, nodes.FunctionDef) + self.assertRaises(StopIteration, functools.partial(next, it)) + + def test_static_method_lookup(self): + code = ''' + class FileA: + @staticmethod + def funcA(): + return 4 + + + class Test: + FileA = [1,2,3] + + def __init__(self): + print (FileA.funcA()) + ''' + astroid = builder.parse(code, __name__) + it = astroid['Test']['__init__'].ilookup('FileA') + obj = next(it) + self.assertIsInstance(obj, nodes.ClassDef) + self.assertRaises(StopIteration, functools.partial(next, it)) + + def test_global_delete(self): + code = ''' + def run2(): + f = Frobble() + + class Frobble: + pass + Frobble.mumble = True + + del Frobble + + def run1(): + f = Frobble() + ''' + astroid = builder.parse(code, __name__) + stmts = astroid['run2'].lookup('Frobbel')[1] + self.assertEqual(len(stmts), 0) + stmts = astroid['run1'].lookup('Frobbel')[1] + self.assertEqual(len(stmts), 0) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_manager.py b/pymode/libs/astroid/tests/unittest_manager.py new file mode 100644 index 00000000..452b759e --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_manager.py @@ -0,0 +1,216 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import os +import platform +import sys +import unittest + +import six + +from astroid import exceptions +from astroid import manager +from astroid.tests import resources + + +BUILTINS = six.moves.builtins.__name__ + + +def _get_file_from_object(obj): + if platform.python_implementation() == 'Jython': + return obj.__file__.split("$py.class")[0] + ".py" + if sys.version_info > (3, 0): + return obj.__file__ + if not obj.__file__.endswith(".py"): + return obj.__file__[:-1] + return obj.__file__ + + +class AstroidManagerTest(resources.SysPathSetup, + resources.AstroidCacheSetupMixin, + unittest.TestCase): + + def setUp(self): + super(AstroidManagerTest, self).setUp() + self.manager = manager.AstroidManager() + self.manager.clear_cache(self._builtins) # take care of borg + + def test_ast_from_file(self): + filepath = unittest.__file__ + astroid = self.manager.ast_from_file(filepath) + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_file_cache(self): + filepath = unittest.__file__ + self.manager.ast_from_file(filepath) + astroid = self.manager.ast_from_file('unhandledName', 'unittest') + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_file_astro_builder(self): + filepath = unittest.__file__ + astroid = self.manager.ast_from_file(filepath, None, True, True) + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_file_name_astro_builder_exception(self): + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.ast_from_file, 'unhandledName') + + def test_do_not_expose_main(self): + obj = self.manager.ast_from_module_name('__main__') + self.assertEqual(obj.name, '__main__') + self.assertEqual(obj.items(), []) + + def test_ast_from_module_name(self): + astroid = self.manager.ast_from_module_name('unittest') + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_module_name_not_python_source(self): + astroid = self.manager.ast_from_module_name('time') + self.assertEqual(astroid.name, 'time') + self.assertIn('time', self.manager.astroid_cache) + self.assertEqual(astroid.pure_python, False) + + def test_ast_from_module_name_astro_builder_exception(self): + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.ast_from_module_name, + 'unhandledModule') + + def _test_ast_from_zip(self, archive): + origpath = sys.path[:] + sys.modules.pop('mypypa', None) + archive_path = resources.find(archive) + sys.path.insert(0, archive_path) + try: + module = self.manager.ast_from_module_name('mypypa') + self.assertEqual(module.name, 'mypypa') + end = os.path.join(archive, 'mypypa') + self.assertTrue(module.source_file.endswith(end), + "%s doesn't endswith %s" % (module.source_file, end)) + finally: + # remove the module, else after importing egg, we don't get the zip + if 'mypypa' in self.manager.astroid_cache: + del self.manager.astroid_cache['mypypa'] + del self.manager._mod_file_cache[('mypypa', None)] + if archive_path in sys.path_importer_cache: + del sys.path_importer_cache[archive_path] + sys.path = origpath + + def test_ast_from_module_name_egg(self): + self._test_ast_from_zip( + os.path.sep.join(['data', os.path.normcase('MyPyPa-0.1.0-py2.5.egg')]) + ) + + def test_ast_from_module_name_zip(self): + self._test_ast_from_zip( + os.path.sep.join(['data', os.path.normcase('MyPyPa-0.1.0-py2.5.zip')]) + ) + + def test_zip_import_data(self): + """check if zip_import_data works""" + filepath = resources.find('data/MyPyPa-0.1.0-py2.5.zip/mypypa') + astroid = self.manager.zip_import_data(filepath) + self.assertEqual(astroid.name, 'mypypa') + + def test_zip_import_data_without_zipimport(self): + """check if zip_import_data return None without zipimport""" + self.assertEqual(self.manager.zip_import_data('path'), None) + + def test_file_from_module(self): + """check if the unittest filepath is equals to the result of the method""" + self.assertEqual( + _get_file_from_object(unittest), + self.manager.file_from_module_name('unittest', None)[0]) + + def test_file_from_module_name_astro_building_exception(self): + """check if the method launch a exception with a wrong module name""" + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.file_from_module_name, 'unhandledModule', None) + + def test_ast_from_module(self): + astroid = self.manager.ast_from_module(unittest) + self.assertEqual(astroid.pure_python, True) + import time + astroid = self.manager.ast_from_module(time) + self.assertEqual(astroid.pure_python, False) + + def test_ast_from_module_cache(self): + """check if the module is in the cache manager""" + astroid = self.manager.ast_from_module(unittest) + self.assertEqual(astroid.name, 'unittest') + self.assertIn('unittest', self.manager.astroid_cache) + + def test_ast_from_class(self): + astroid = self.manager.ast_from_class(int) + self.assertEqual(astroid.name, 'int') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + + astroid = self.manager.ast_from_class(object) + self.assertEqual(astroid.name, 'object') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + self.assertIn('__setattr__', astroid) + + def test_ast_from_class_with_module(self): + """check if the method works with the module name""" + astroid = self.manager.ast_from_class(int, int.__module__) + self.assertEqual(astroid.name, 'int') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + + astroid = self.manager.ast_from_class(object, object.__module__) + self.assertEqual(astroid.name, 'object') + self.assertEqual(astroid.parent.frame().name, BUILTINS) + self.assertIn('__setattr__', astroid) + + def test_ast_from_class_attr_error(self): + """give a wrong class at the ast_from_class method""" + self.assertRaises(exceptions.AstroidBuildingException, + self.manager.ast_from_class, None) + + def testFailedImportHooks(self): + def hook(modname): + if modname == 'foo.bar': + return unittest + else: + raise exceptions.AstroidBuildingException() + + with self.assertRaises(exceptions.AstroidBuildingException): + self.manager.ast_from_module_name('foo.bar') + self.manager.register_failed_import_hook(hook) + self.assertEqual(unittest, self.manager.ast_from_module_name('foo.bar')) + with self.assertRaises(exceptions.AstroidBuildingException): + self.manager.ast_from_module_name('foo.bar.baz') + del self.manager._failed_import_hooks[0] + + +class BorgAstroidManagerTC(unittest.TestCase): + + def test_borg(self): + """test that the AstroidManager is really a borg, i.e. that two different + instances has same cache""" + first_manager = manager.AstroidManager() + built = first_manager.ast_from_module_name(BUILTINS) + + second_manager = manager.AstroidManager() + second_built = second_manager.ast_from_module_name(BUILTINS) + self.assertIs(built, second_built) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_modutils.py b/pymode/libs/astroid/tests/unittest_modutils.py new file mode 100644 index 00000000..dffc3b8d --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_modutils.py @@ -0,0 +1,269 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) any +# later version. +# +# astroid is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +""" +unit tests for module modutils (module manipulation utilities) +""" +import os +import sys +import unittest + +from astroid import modutils +from astroid.tests import resources + + +def _get_file_from_object(obj): + return modutils._path_from_filename(obj.__file__) + + +class ModuleFileTest(unittest.TestCase): + package = "mypypa" + + def tearDown(self): + for k in list(sys.path_importer_cache.keys()): + if 'MyPyPa' in k: + del sys.path_importer_cache[k] + + def test_find_zipped_module(self): + mtype, mfile = modutils._module_file( + [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.zip')]) + self.assertEqual(mtype, modutils.PY_ZIPMODULE) + self.assertEqual(mfile.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.zip", self.package]) + + def test_find_egg_module(self): + mtype, mfile = modutils._module_file( + [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.egg')]) + self.assertEqual(mtype, modutils.PY_ZIPMODULE) + self.assertEqual(mfile.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.egg", self.package]) + + +class LoadModuleFromNameTest(unittest.TestCase): + """ load a python module from it's name """ + + def test_knownValues_load_module_from_name_1(self): + self.assertEqual(modutils.load_module_from_name('sys'), sys) + + def test_knownValues_load_module_from_name_2(self): + self.assertEqual(modutils.load_module_from_name('os.path'), os.path) + + def test_raise_load_module_from_name_1(self): + self.assertRaises(ImportError, + modutils.load_module_from_name, 'os.path', use_sys=0) + + +class GetModulePartTest(unittest.TestCase): + """given a dotted name return the module part of the name""" + + def test_knownValues_get_module_part_1(self): + self.assertEqual(modutils.get_module_part('astroid.modutils'), + 'astroid.modutils') + + def test_knownValues_get_module_part_2(self): + self.assertEqual(modutils.get_module_part('astroid.modutils.get_module_part'), + 'astroid.modutils') + + def test_knownValues_get_module_part_3(self): + """relative import from given file""" + self.assertEqual(modutils.get_module_part('node_classes.AssName', + modutils.__file__), 'node_classes') + + def test_knownValues_get_compiled_module_part(self): + self.assertEqual(modutils.get_module_part('math.log10'), 'math') + self.assertEqual(modutils.get_module_part('math.log10', __file__), 'math') + + def test_knownValues_get_builtin_module_part(self): + self.assertEqual(modutils.get_module_part('sys.path'), 'sys') + self.assertEqual(modutils.get_module_part('sys.path', '__file__'), 'sys') + + def test_get_module_part_exception(self): + self.assertRaises(ImportError, modutils.get_module_part, 'unknown.module', + modutils.__file__) + + +class ModPathFromFileTest(unittest.TestCase): + """ given an absolute file path return the python module's path as a list """ + + def test_knownValues_modpath_from_file_1(self): + from xml.etree import ElementTree + self.assertEqual(modutils.modpath_from_file(ElementTree.__file__), + ['xml', 'etree', 'ElementTree']) + + def test_knownValues_modpath_from_file_2(self): + self.assertEqual(modutils.modpath_from_file('unittest_modutils.py', + {os.getcwd(): 'arbitrary.pkg'}), + ['arbitrary', 'pkg', 'unittest_modutils']) + + def test_raise_modpath_from_file_Exception(self): + self.assertRaises(Exception, modutils.modpath_from_file, '/turlututu') + + +class LoadModuleFromPathTest(resources.SysPathSetup, unittest.TestCase): + + def test_do_not_load_twice(self): + modutils.load_module_from_modpath(['data', 'lmfp', 'foo']) + modutils.load_module_from_modpath(['data', 'lmfp']) + self.assertEqual(len(sys.just_once), 1) + del sys.just_once + + +class FileFromModPathTest(resources.SysPathSetup, unittest.TestCase): + """given a mod path (i.e. splited module / package name), return the + corresponding file, giving priority to source file over precompiled file + if it exists""" + + def test_site_packages(self): + filename = _get_file_from_object(modutils) + result = modutils.file_from_modpath(['astroid', 'modutils']) + self.assertEqual(os.path.realpath(result), os.path.realpath(filename)) + + def test_std_lib(self): + from os import path + self.assertEqual(os.path.realpath(modutils.file_from_modpath(['os', 'path']).replace('.pyc', '.py')), + os.path.realpath(path.__file__.replace('.pyc', '.py'))) + + def test_xmlplus(self): + try: + # don't fail if pyxml isn't installed + from xml.dom import ext + except ImportError: + pass + else: + self.assertEqual(os.path.realpath(modutils.file_from_modpath(['xml', 'dom', 'ext']).replace('.pyc', '.py')), + os.path.realpath(ext.__file__.replace('.pyc', '.py'))) + + def test_builtin(self): + self.assertEqual(modutils.file_from_modpath(['sys']), + None) + + + def test_unexisting(self): + self.assertRaises(ImportError, modutils.file_from_modpath, ['turlututu']) + + def test_unicode_in_package_init(self): + # file_from_modpath should not crash when reading an __init__ + # file with unicode characters. + modutils.file_from_modpath(["data", "unicode_package", "core"]) + + +class GetSourceFileTest(unittest.TestCase): + + def test(self): + filename = _get_file_from_object(os.path) + self.assertEqual(modutils.get_source_file(os.path.__file__), + os.path.normpath(filename)) + + def test_raise(self): + self.assertRaises(modutils.NoSourceFile, modutils.get_source_file, 'whatever') + + +class StandardLibModuleTest(resources.SysPathSetup, unittest.TestCase): + """ + return true if the module may be considered as a module from the standard + library + """ + + def test_datetime(self): + # This is an interesting example, since datetime, on pypy, + # is under lib_pypy, rather than the usual Lib directory. + self.assertTrue(modutils.is_standard_module('datetime')) + + def test_builtins(self): + if sys.version_info < (3, 0): + self.assertEqual(modutils.is_standard_module('__builtin__'), True) + self.assertEqual(modutils.is_standard_module('builtins'), False) + else: + self.assertEqual(modutils.is_standard_module('__builtin__'), False) + self.assertEqual(modutils.is_standard_module('builtins'), True) + + def test_builtin(self): + self.assertEqual(modutils.is_standard_module('sys'), True) + self.assertEqual(modutils.is_standard_module('marshal'), True) + + def test_nonstandard(self): + self.assertEqual(modutils.is_standard_module('astroid'), False) + + def test_unknown(self): + self.assertEqual(modutils.is_standard_module('unknown'), False) + + def test_4(self): + self.assertEqual(modutils.is_standard_module('hashlib'), True) + self.assertEqual(modutils.is_standard_module('pickle'), True) + self.assertEqual(modutils.is_standard_module('email'), True) + self.assertEqual(modutils.is_standard_module('io'), sys.version_info >= (2, 6)) + self.assertEqual(modutils.is_standard_module('StringIO'), sys.version_info < (3, 0)) + self.assertEqual(modutils.is_standard_module('unicodedata'), True) + + def test_custom_path(self): + datadir = resources.find('') + if datadir.startswith(modutils.EXT_LIB_DIR): + self.skipTest('known breakage of is_standard_module on installed package') + self.assertEqual(modutils.is_standard_module('data.module', (datadir,)), True) + self.assertEqual(modutils.is_standard_module('data.module', (os.path.abspath(datadir),)), True) + + def test_failing_edge_cases(self): + from xml import etree + # using a subpackage/submodule path as std_path argument + self.assertEqual(modutils.is_standard_module('xml.etree', etree.__path__), False) + # using a module + object name as modname argument + self.assertEqual(modutils.is_standard_module('sys.path'), True) + # this is because only the first package/module is considered + self.assertEqual(modutils.is_standard_module('sys.whatever'), True) + self.assertEqual(modutils.is_standard_module('xml.whatever', etree.__path__), False) + + +class IsRelativeTest(unittest.TestCase): + + + def test_knownValues_is_relative_1(self): + import email + self.assertEqual(modutils.is_relative('utils', email.__path__[0]), + True) + + def test_knownValues_is_relative_2(self): + from xml.etree import ElementTree + self.assertEqual(modutils.is_relative('ElementPath', ElementTree.__file__), + True) + + def test_knownValues_is_relative_3(self): + import astroid + self.assertEqual(modutils.is_relative('astroid', astroid.__path__[0]), + False) + + +class GetModuleFilesTest(unittest.TestCase): + + def test_get_module_files_1(self): + package = resources.find('data/find_test') + modules = set(modutils.get_module_files(package, [])) + expected = ['__init__.py', 'module.py', 'module2.py', + 'noendingnewline.py', 'nonregr.py'] + self.assertEqual(modules, + {os.path.join(package, x) for x in expected}) + + def test_load_module_set_attribute(self): + import xml.etree.ElementTree + import xml + del xml.etree.ElementTree + del sys.modules['xml.etree.ElementTree'] + m = modutils.load_module_from_modpath(['xml', 'etree', 'ElementTree']) + self.assertTrue(hasattr(xml, 'etree')) + self.assertTrue(hasattr(xml.etree, 'ElementTree')) + self.assertTrue(m is xml.etree.ElementTree) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_nodes.py b/pymode/libs/astroid/tests/unittest_nodes.py new file mode 100644 index 00000000..6fa4b6f3 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_nodes.py @@ -0,0 +1,764 @@ +# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for specific behaviour of astroid nodes +""" +import os +import sys +import textwrap +import unittest +import warnings + +import six + +from astroid import bases +from astroid import builder +from astroid import context as contextmod +from astroid import exceptions +from astroid import node_classes +from astroid import nodes +from astroid import parse +from astroid import util +from astroid import test_utils +from astroid import transforms +from astroid.tests import resources + + +abuilder = builder.AstroidBuilder() +BUILTINS = six.moves.builtins.__name__ + + +class AsStringTest(resources.SysPathSetup, unittest.TestCase): + + def test_tuple_as_string(self): + def build(string): + return abuilder.string_build(string).body[0].value + + self.assertEqual(build('1,').as_string(), '(1, )') + self.assertEqual(build('1, 2, 3').as_string(), '(1, 2, 3)') + self.assertEqual(build('(1, )').as_string(), '(1, )') + self.assertEqual(build('1, 2, 3').as_string(), '(1, 2, 3)') + + def test_as_string_for_list_containing_uninferable(self): + node = test_utils.extract_node(''' + def foo(arg): + bar = [arg] * 1 + ''') + binop = node.body[0].value + inferred = next(binop.infer()) + self.assertEqual(inferred.as_string(), '[Uninferable]') + self.assertEqual(binop.as_string(), '([arg]) * (1)') + + def test_frozenset_as_string(self): + nodes = test_utils.extract_node(''' + frozenset((1, 2, 3)) #@ + frozenset({1, 2, 3}) #@ + frozenset([1, 2, 3,]) #@ + + frozenset(None) #@ + frozenset(1) #@ + ''') + nodes = [next(node.infer()) for node in nodes] + + self.assertEqual(nodes[0].as_string(), 'frozenset((1, 2, 3))') + self.assertEqual(nodes[1].as_string(), 'frozenset({1, 2, 3})') + self.assertEqual(nodes[2].as_string(), 'frozenset([1, 2, 3])') + + self.assertNotEqual(nodes[3].as_string(), 'frozenset(None)') + self.assertNotEqual(nodes[4].as_string(), 'frozenset(1)') + + @test_utils.require_version(minver='3.0') + def test_func_signature_issue_185(self): + code = textwrap.dedent(''' + def test(a, b, c=42, *, x=42, **kwargs): + print(a, b, c, args) + ''') + node = parse(code) + self.assertEqual(node.as_string().strip(), code.strip()) + def test_varargs_kwargs_as_string(self): + ast = abuilder.string_build('raise_string(*args, **kwargs)').body[0] + self.assertEqual(ast.as_string(), 'raise_string(*args, **kwargs)') + + def test_module_as_string(self): + """check as_string on a whole module prepared to be returned identically + """ + module = resources.build_file('data/module.py', 'data.module') + with open(resources.find('data/module.py'), 'r') as fobj: + self.assertMultiLineEqual(module.as_string(), fobj.read()) + + def test_module2_as_string(self): + """check as_string on a whole module prepared to be returned identically + """ + module2 = resources.build_file('data/module2.py', 'data.module2') + with open(resources.find('data/module2.py'), 'r') as fobj: + self.assertMultiLineEqual(module2.as_string(), fobj.read()) + + def test_as_string(self): + """check as_string for python syntax >= 2.7""" + code = '''one_two = {1, 2} +b = {v: k for (k, v) in enumerate('string')} +cdd = {k for k in b}\n\n''' + ast = abuilder.string_build(code) + self.assertMultiLineEqual(ast.as_string(), code) + + @test_utils.require_version('3.0') + def test_3k_as_string(self): + """check as_string for python 3k syntax""" + code = '''print() + +def function(var): + nonlocal counter + try: + hello + except NameError as nexc: + (*hell, o) = b'hello' + raise AttributeError from nexc +\n''' + ast = abuilder.string_build(code) + self.assertEqual(ast.as_string(), code) + + @test_utils.require_version('3.0') + @unittest.expectedFailure + def test_3k_annotations_and_metaclass(self): + code_annotations = textwrap.dedent(''' + def function(var:int): + nonlocal counter + + class Language(metaclass=Natural): + """natural language""" + ''') + + ast = abuilder.string_build(code_annotations) + self.assertEqual(ast.as_string(), code_annotations) + + def test_ellipsis(self): + ast = abuilder.string_build('a[...]').body[0] + self.assertEqual(ast.as_string(), 'a[...]') + + def test_slices(self): + for code in ('a[0]', 'a[1:3]', 'a[:-1:step]', 'a[:,newaxis]', + 'a[newaxis,:]', 'del L[::2]', 'del A[1]', 'del Br[:]'): + ast = abuilder.string_build(code).body[0] + self.assertEqual(ast.as_string(), code) + + def test_slice_and_subscripts(self): + code = """a[:1] = bord[2:] +a[:1] = bord[2:] +del bree[3:d] +bord[2:] +del av[d::f], a[df:] +a[:1] = bord[2:] +del SRC[::1,newaxis,1:] +tous[vals] = 1010 +del thousand[key] +del a[::2], a[:-1:step] +del Fee.form[left:] +aout.vals = miles.of_stuff +del (ccok, (name.thing, foo.attrib.value)), Fee.form[left:] +if all[1] == bord[0:]: + pass\n\n""" + ast = abuilder.string_build(code) + self.assertEqual(ast.as_string(), code) + + +class _NodeTest(unittest.TestCase): + """test transformation of If Node""" + CODE = None + + @property + def astroid(self): + try: + return self.__class__.__dict__['CODE_Astroid'] + except KeyError: + astroid = builder.parse(self.CODE) + self.__class__.CODE_Astroid = astroid + return astroid + + +class IfNodeTest(_NodeTest): + """test transformation of If Node""" + CODE = """ + if 0: + print() + + if True: + print() + else: + pass + + if "": + print() + elif []: + raise + + if 1: + print() + elif True: + print() + elif func(): + pass + else: + raise + """ + + def test_if_elif_else_node(self): + """test transformation for If node""" + self.assertEqual(len(self.astroid.body), 4) + for stmt in self.astroid.body: + self.assertIsInstance(stmt, nodes.If) + self.assertFalse(self.astroid.body[0].orelse) # simple If + self.assertIsInstance(self.astroid.body[1].orelse[0], nodes.Pass) # If / else + self.assertIsInstance(self.astroid.body[2].orelse[0], nodes.If) # If / elif + self.assertIsInstance(self.astroid.body[3].orelse[0].orelse[0], nodes.If) + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.block_range(1), (0, 22)) + self.assertEqual(self.astroid.block_range(10), (0, 22)) # XXX (10, 22) ? + self.assertEqual(self.astroid.body[1].block_range(5), (5, 6)) + self.assertEqual(self.astroid.body[1].block_range(6), (6, 6)) + self.assertEqual(self.astroid.body[1].orelse[0].block_range(7), (7, 8)) + self.assertEqual(self.astroid.body[1].orelse[0].block_range(8), (8, 8)) + + +class TryExceptNodeTest(_NodeTest): + CODE = """ + try: + print ('pouet') + except IOError: + pass + except UnicodeError: + print() + else: + print() + """ + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.body[0].block_range(1), (1, 8)) + self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 8)) + self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) + self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) + self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) + self.assertEqual(self.astroid.body[0].block_range(7), (7, 7)) + self.assertEqual(self.astroid.body[0].block_range(8), (8, 8)) + + +class TryFinallyNodeTest(_NodeTest): + CODE = """ + try: + print ('pouet') + finally: + print ('pouet') + """ + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.body[0].block_range(1), (1, 4)) + self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) + self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) + + +class TryExceptFinallyNodeTest(_NodeTest): + CODE = """ + try: + print('pouet') + except Exception: + print ('oops') + finally: + print ('pouet') + """ + + def test_block_range(self): + # XXX ensure expected values + self.assertEqual(self.astroid.body[0].block_range(1), (1, 6)) + self.assertEqual(self.astroid.body[0].block_range(2), (2, 2)) + self.assertEqual(self.astroid.body[0].block_range(3), (3, 4)) + self.assertEqual(self.astroid.body[0].block_range(4), (4, 4)) + self.assertEqual(self.astroid.body[0].block_range(5), (5, 5)) + self.assertEqual(self.astroid.body[0].block_range(6), (6, 6)) + + +@unittest.skipIf(six.PY3, "Python 2 specific test.") +class TryExcept2xNodeTest(_NodeTest): + CODE = """ + try: + hello + except AttributeError, (retval, desc): + pass + """ + + + def test_tuple_attribute(self): + handler = self.astroid.body[0].handlers[0] + self.assertIsInstance(handler.name, nodes.Tuple) + + +class ImportNodeTest(resources.SysPathSetup, unittest.TestCase): + def setUp(self): + super(ImportNodeTest, self).setUp() + self.module = resources.build_file('data/module.py', 'data.module') + self.module2 = resources.build_file('data/module2.py', 'data.module2') + + def test_import_self_resolve(self): + myos = next(self.module2.igetattr('myos')) + self.assertTrue(isinstance(myos, nodes.Module), myos) + self.assertEqual(myos.name, 'os') + self.assertEqual(myos.qname(), 'os') + self.assertEqual(myos.pytype(), '%s.module' % BUILTINS) + + def test_from_self_resolve(self): + namenode = next(self.module.igetattr('NameNode')) + self.assertTrue(isinstance(namenode, nodes.ClassDef), namenode) + self.assertEqual(namenode.root().name, 'astroid.node_classes') + self.assertEqual(namenode.qname(), 'astroid.node_classes.Name') + self.assertEqual(namenode.pytype(), '%s.type' % BUILTINS) + abspath = next(self.module2.igetattr('abspath')) + self.assertTrue(isinstance(abspath, nodes.FunctionDef), abspath) + self.assertEqual(abspath.root().name, 'os.path') + self.assertEqual(abspath.qname(), 'os.path.abspath') + self.assertEqual(abspath.pytype(), '%s.function' % BUILTINS) + + def test_real_name(self): + from_ = self.module['NameNode'] + self.assertEqual(from_.real_name('NameNode'), 'Name') + imp_ = self.module['os'] + self.assertEqual(imp_.real_name('os'), 'os') + self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'os.path') + imp_ = self.module['NameNode'] + self.assertEqual(imp_.real_name('NameNode'), 'Name') + self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'Name') + imp_ = self.module2['YO'] + self.assertEqual(imp_.real_name('YO'), 'YO') + self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'data') + + def test_as_string(self): + ast = self.module['modutils'] + self.assertEqual(ast.as_string(), "from astroid import modutils") + ast = self.module['NameNode'] + self.assertEqual(ast.as_string(), "from astroid.node_classes import Name as NameNode") + ast = self.module['os'] + self.assertEqual(ast.as_string(), "import os.path") + code = """from . import here +from .. import door +from .store import bread +from ..cave import wine\n\n""" + ast = abuilder.string_build(code) + self.assertMultiLineEqual(ast.as_string(), code) + + def test_bad_import_inference(self): + # Explication of bug + '''When we import PickleError from nonexistent, a call to the infer + method of this From node will be made by unpack_infer. + inference.infer_from will try to import this module, which will fail and + raise a InferenceException (by mixins.do_import_module). The infer_name + will catch this exception and yield and YES instead. + ''' + + code = ''' + try: + from pickle import PickleError + except ImportError: + from nonexistent import PickleError + + try: + pass + except PickleError: + pass + ''' + astroid = builder.parse(code) + handler_type = astroid.body[1].handlers[0].type + + excs = list(node_classes.unpack_infer(handler_type)) + # The number of returned object can differ on Python 2 + # and Python 3. In one version, an additional item will + # be returned, from the _pickle module, which is not + # present in the other version. + self.assertIsInstance(excs[0], nodes.ClassDef) + self.assertEqual(excs[0].name, 'PickleError') + self.assertIs(excs[-1], util.YES) + + def test_absolute_import(self): + astroid = resources.build_file('data/absimport.py') + ctx = contextmod.InferenceContext() + # will fail if absolute import failed + ctx.lookupname = 'message' + next(astroid['message'].infer(ctx)) + ctx.lookupname = 'email' + m = next(astroid['email'].infer(ctx)) + self.assertFalse(m.source_file.startswith(os.path.join('data', 'email.py'))) + + def test_more_absolute_import(self): + astroid = resources.build_file('data/module1abs/__init__.py', 'data.module1abs') + self.assertIn('sys', astroid._locals) + + +class CmpNodeTest(unittest.TestCase): + def test_as_string(self): + ast = abuilder.string_build("a == 2").body[0] + self.assertEqual(ast.as_string(), "a == 2") + + +class ConstNodeTest(unittest.TestCase): + + def _test(self, value): + node = nodes.const_factory(value) + self.assertIsInstance(node._proxied, nodes.ClassDef) + self.assertEqual(node._proxied.name, value.__class__.__name__) + self.assertIs(node.value, value) + self.assertTrue(node._proxied.parent) + self.assertEqual(node._proxied.root().name, value.__class__.__module__) + + def test_none(self): + self._test(None) + + def test_bool(self): + self._test(True) + + def test_int(self): + self._test(1) + + def test_float(self): + self._test(1.0) + + def test_complex(self): + self._test(1.0j) + + def test_str(self): + self._test('a') + + def test_unicode(self): + self._test(u'a') + + +class NameNodeTest(unittest.TestCase): + def test_assign_to_True(self): + """test that True and False assignements don't crash""" + code = """ + True = False + def hello(False): + pass + del True + """ + if sys.version_info >= (3, 0): + with self.assertRaises(exceptions.AstroidBuildingException): + builder.parse(code) + else: + ast = builder.parse(code) + assign_true = ast['True'] + self.assertIsInstance(assign_true, nodes.AssignName) + self.assertEqual(assign_true.name, "True") + del_true = ast.body[2].targets[0] + self.assertIsInstance(del_true, nodes.DelName) + self.assertEqual(del_true.name, "True") + + +class ArgumentsNodeTC(unittest.TestCase): + def test_linenumbering(self): + ast = builder.parse(''' + def func(a, + b): pass + x = lambda x: None + ''') + self.assertEqual(ast['func'].args.fromlineno, 2) + self.assertFalse(ast['func'].args.is_statement) + xlambda = next(ast['x'].infer()) + self.assertEqual(xlambda.args.fromlineno, 4) + self.assertEqual(xlambda.args.tolineno, 4) + self.assertFalse(xlambda.args.is_statement) + if sys.version_info < (3, 0): + self.assertEqual(ast['func'].args.tolineno, 3) + else: + self.skipTest('FIXME http://bugs.python.org/issue10445 ' + '(no line number on function args)') + + def test_builtin_fromlineno_missing(self): + cls = test_utils.extract_node(''' + class Foo(Exception): #@ + pass + ''') + new = cls.getattr('__new__')[-1] + self.assertEqual(new.args.fromlineno, 0) + + +class UnboundMethodNodeTest(unittest.TestCase): + + def test_no_super_getattr(self): + # This is a test for issue + # https://bitbucket.org/logilab/astroid/issue/91, which tests + # that UnboundMethod doesn't call super when doing .getattr. + + ast = builder.parse(''' + class A(object): + def test(self): + pass + meth = A.test + ''') + node = next(ast['meth'].infer()) + with self.assertRaises(exceptions.NotFoundError): + node.getattr('__missssing__') + name = node.getattr('__name__')[0] + self.assertIsInstance(name, nodes.Const) + self.assertEqual(name.value, 'test') + + +class BoundMethodNodeTest(unittest.TestCase): + + def test_is_property(self): + ast = builder.parse(''' + import abc + + def cached_property(): + # Not a real decorator, but we don't care + pass + def reify(): + # Same as cached_property + pass + def lazy_property(): + pass + def lazyproperty(): + pass + def lazy(): pass + class A(object): + @property + def builtin_property(self): + return 42 + @abc.abstractproperty + def abc_property(self): + return 42 + @cached_property + def cached_property(self): return 42 + @reify + def reified(self): return 42 + @lazy_property + def lazy_prop(self): return 42 + @lazyproperty + def lazyprop(self): return 42 + def not_prop(self): pass + @lazy + def decorated_with_lazy(self): return 42 + + cls = A() + builtin_property = cls.builtin_property + abc_property = cls.abc_property + cached_p = cls.cached_property + reified = cls.reified + not_prop = cls.not_prop + lazy_prop = cls.lazy_prop + lazyprop = cls.lazyprop + decorated_with_lazy = cls.decorated_with_lazy + ''') + for prop in ('builtin_property', 'abc_property', 'cached_p', 'reified', + 'lazy_prop', 'lazyprop', 'decorated_with_lazy'): + inferred = next(ast[prop].infer()) + self.assertIsInstance(inferred, nodes.Const, prop) + self.assertEqual(inferred.value, 42, prop) + + inferred = next(ast['not_prop'].infer()) + self.assertIsInstance(inferred, bases.BoundMethod) + + +class AliasesTest(unittest.TestCase): + + def setUp(self): + self.transformer = transforms.TransformVisitor() + + def parse_transform(self, code): + module = parse(code, apply_transforms=False) + return self.transformer.visit(module) + + def test_aliases(self): + def test_from(node): + node.names = node.names + [('absolute_import', None)] + return node + + def test_class(node): + node.name = 'Bar' + return node + + def test_function(node): + node.name = 'another_test' + return node + + def test_callfunc(node): + if node.func.name == 'Foo': + node.func.name = 'Bar' + return node + + def test_assname(node): + if node.name == 'foo': + n = nodes.AssignName() + n.name = 'bar' + return n + def test_assattr(node): + if node.attrname == 'a': + node.attrname = 'b' + return node + + def test_getattr(node): + if node.attrname == 'a': + node.attrname = 'b' + return node + + def test_genexpr(node): + if node.elt.value == 1: + node.elt = nodes.Const(2) + return node + + self.transformer.register_transform(nodes.From, test_from) + self.transformer.register_transform(nodes.Class, test_class) + self.transformer.register_transform(nodes.Function, test_function) + self.transformer.register_transform(nodes.CallFunc, test_callfunc) + self.transformer.register_transform(nodes.AssName, test_assname) + self.transformer.register_transform(nodes.AssAttr, test_assattr) + self.transformer.register_transform(nodes.Getattr, test_getattr) + self.transformer.register_transform(nodes.GenExpr, test_genexpr) + + string = ''' + from __future__ import print_function + + class Foo: pass + + def test(a): return a + + foo = Foo() + foo.a = test(42) + foo.a + (1 for _ in range(0, 42)) + ''' + + module = self.parse_transform(string) + + self.assertEqual(len(module.body[0].names), 2) + self.assertIsInstance(module.body[0], nodes.ImportFrom) + self.assertEqual(module.body[1].name, 'Bar') + self.assertIsInstance(module.body[1], nodes.ClassDef) + self.assertEqual(module.body[2].name, 'another_test') + self.assertIsInstance(module.body[2], nodes.FunctionDef) + self.assertEqual(module.body[3].targets[0].name, 'bar') + self.assertIsInstance(module.body[3].targets[0], nodes.AssignName) + self.assertEqual(module.body[3].value.func.name, 'Bar') + self.assertIsInstance(module.body[3].value, nodes.Call) + self.assertEqual(module.body[4].targets[0].attrname, 'b') + self.assertIsInstance(module.body[4].targets[0], nodes.AssignAttr) + self.assertIsInstance(module.body[5], nodes.Expr) + self.assertEqual(module.body[5].value.attrname, 'b') + self.assertIsInstance(module.body[5].value, nodes.Attribute) + self.assertEqual(module.body[6].value.elt.value, 2) + self.assertIsInstance(module.body[6].value, nodes.GeneratorExp) + + @unittest.skipIf(six.PY3, "Python 3 doesn't have Repr nodes.") + def test_repr(self): + def test_backquote(node): + node.value.name = 'bar' + return node + + self.transformer.register_transform(nodes.Backquote, test_backquote) + + module = self.parse_transform('`foo`') + + self.assertEqual(module.body[0].value.value.name, 'bar') + self.assertIsInstance(module.body[0].value, nodes.Repr) + + +class DeprecationWarningsTest(unittest.TestCase): + def test_asstype_warnings(self): + string = ''' + class C: pass + c = C() + with warnings.catch_warnings(record=True) as w: + pass + ''' + module = parse(string) + filter_stmts_mixin = module.body[0] + assign_type_mixin = module.body[1].targets[0] + parent_assign_type_mixin = module.body[2] + + warnings.simplefilter('always') + + with warnings.catch_warnings(record=True) as w: + filter_stmts_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + with warnings.catch_warnings(record=True) as w: + assign_type_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + with warnings.catch_warnings(record=True) as w: + parent_assign_type_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + + def test_isinstance_warnings(self): + msg_format = ("%r is deprecated and slated for removal in astroid " + "2.0, use %r instead") + for cls in (nodes.Discard, nodes.Backquote, nodes.AssName, + nodes.AssAttr, nodes.Getattr, nodes.CallFunc, nodes.From): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + isinstance(42, cls) + self.assertIsInstance(w[0].message, PendingDeprecationWarning) + actual_msg = msg_format % (cls.__class__.__name__, cls.__wrapped__.__name__) + self.assertEqual(str(w[0].message), actual_msg) + + +@test_utils.require_version('3.5') +class Python35AsyncTest(unittest.TestCase): + + def test_async_await_keywords(self): + async_def, async_for, async_with, await_node = test_utils.extract_node(''' + async def func(): #@ + async for i in range(10): #@ + f = __(await i) + async with test(): #@ + pass + ''') + self.assertIsInstance(async_def, nodes.AsyncFunctionDef) + self.assertIsInstance(async_for, nodes.AsyncFor) + self.assertIsInstance(async_with, nodes.AsyncWith) + self.assertIsInstance(await_node, nodes.Await) + self.assertIsInstance(await_node.value, nodes.Name) + + def _test_await_async_as_string(self, code): + ast_node = parse(code) + self.assertEqual(ast_node.as_string().strip(), code.strip()) + + def test_await_as_string(self): + code = textwrap.dedent(''' + async def function(): + await 42 + ''') + self._test_await_async_as_string(code) + + def test_asyncwith_as_string(self): + code = textwrap.dedent(''' + async def function(): + async with (42): + pass + ''') + self._test_await_async_as_string(code) + + def test_asyncfor_as_string(self): + code = textwrap.dedent(''' + async def function(): + async for i in range(10): + await 42 + ''') + self._test_await_async_as_string(code) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_objects.py b/pymode/libs/astroid/tests/unittest_objects.py new file mode 100644 index 00000000..62d3f4ff --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_objects.py @@ -0,0 +1,530 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +import unittest + +from astroid import bases +from astroid import exceptions +from astroid import nodes +from astroid import objects +from astroid import test_utils + + +class ObjectsTest(unittest.TestCase): + + def test_frozenset(self): + node = test_utils.extract_node(""" + frozenset({1: 2, 2: 3}) #@ + """) + infered = next(node.infer()) + self.assertIsInstance(infered, objects.FrozenSet) + + self.assertEqual(infered.pytype(), "%s.frozenset" % bases.BUILTINS) + + itered = infered.itered() + self.assertEqual(len(itered), 2) + self.assertIsInstance(itered[0], nodes.Const) + self.assertEqual([const.value for const in itered], [1, 2]) + + proxied = infered._proxied + self.assertEqual(infered.qname(), "%s.frozenset" % bases.BUILTINS) + self.assertIsInstance(proxied, nodes.ClassDef) + + +class SuperTests(unittest.TestCase): + + def test_inferring_super_outside_methods(self): + ast_nodes = test_utils.extract_node(''' + class Module(object): + pass + class StaticMethod(object): + @staticmethod + def static(): + # valid, but we don't bother with it. + return super(StaticMethod, StaticMethod) #@ + # super outside methods aren't inferred + super(Module, Module) #@ + # no argument super is not recognised outside methods as well. + super() #@ + ''') + in_static = next(ast_nodes[0].value.infer()) + self.assertIsInstance(in_static, bases.Instance) + self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) + + module_level = next(ast_nodes[1].infer()) + self.assertIsInstance(module_level, bases.Instance) + self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) + + no_arguments = next(ast_nodes[2].infer()) + self.assertIsInstance(no_arguments, bases.Instance) + self.assertEqual(no_arguments.qname(), "%s.super" % bases.BUILTINS) + + def test_inferring_unbound_super_doesnt_work(self): + node = test_utils.extract_node(''' + class Test(object): + def __init__(self): + super(Test) #@ + ''') + unbounded = next(node.infer()) + self.assertIsInstance(unbounded, bases.Instance) + self.assertEqual(unbounded.qname(), "%s.super" % bases.BUILTINS) + + def test_use_default_inference_on_not_inferring_args(self): + ast_nodes = test_utils.extract_node(''' + class Test(object): + def __init__(self): + super(Lala, self) #@ + super(Test, lala) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.Instance) + self.assertEqual(first.qname(), "%s.super" % bases.BUILTINS) + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.Instance) + self.assertEqual(second.qname(), "%s.super" % bases.BUILTINS) + + @test_utils.require_version(maxver='3.0') + def test_super_on_old_style_class(self): + # super doesn't work on old style class, but leave + # that as an error for pylint. We'll infer Super objects, + # but every call will result in a failure at some point. + node = test_utils.extract_node(''' + class OldStyle: + def __init__(self): + super(OldStyle, self) #@ + ''') + old = next(node.infer()) + self.assertIsInstance(old, objects.Super) + self.assertIsInstance(old.mro_pointer, nodes.ClassDef) + self.assertEqual(old.mro_pointer.name, 'OldStyle') + with self.assertRaises(exceptions.SuperError) as cm: + old.super_mro() + self.assertEqual(str(cm.exception), + "Unable to call super on old-style classes.") + + @test_utils.require_version(minver='3.0') + def test_no_arguments_super(self): + ast_nodes = test_utils.extract_node(''' + class First(object): pass + class Second(First): + def test(self): + super() #@ + @classmethod + def test_classmethod(cls): + super() #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + self.assertIsInstance(first.type, bases.Instance) + self.assertEqual(first.type.name, 'Second') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(first.mro_pointer.name, 'Second') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, objects.Super) + self.assertIsInstance(second.type, nodes.ClassDef) + self.assertEqual(second.type.name, 'Second') + self.assertIsInstance(second.mro_pointer, nodes.ClassDef) + self.assertEqual(second.mro_pointer.name, 'Second') + + def test_super_simple_cases(self): + ast_nodes = test_utils.extract_node(''' + class First(object): pass + class Second(First): pass + class Third(First): + def test(self): + super(Third, self) #@ + super(Second, self) #@ + + # mro position and the type + super(Third, Third) #@ + super(Third, Second) #@ + super(Fourth, Fourth) #@ + + class Fourth(Third): + pass + ''') + + # .type is the object which provides the mro. + # .mro_pointer is the position in the mro from where + # the lookup should be done. + + # super(Third, self) + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + self.assertIsInstance(first.type, bases.Instance) + self.assertEqual(first.type.name, 'Third') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(first.mro_pointer.name, 'Third') + + # super(Second, self) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, objects.Super) + self.assertIsInstance(second.type, bases.Instance) + self.assertEqual(second.type.name, 'Third') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(second.mro_pointer.name, 'Second') + + # super(Third, Third) + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, objects.Super) + self.assertIsInstance(third.type, nodes.ClassDef) + self.assertEqual(third.type.name, 'Third') + self.assertIsInstance(third.mro_pointer, nodes.ClassDef) + self.assertEqual(third.mro_pointer.name, 'Third') + + # super(Third, second) + fourth = next(ast_nodes[3].infer()) + self.assertIsInstance(fourth, objects.Super) + self.assertIsInstance(fourth.type, nodes.ClassDef) + self.assertEqual(fourth.type.name, 'Second') + self.assertIsInstance(fourth.mro_pointer, nodes.ClassDef) + self.assertEqual(fourth.mro_pointer.name, 'Third') + + # Super(Fourth, Fourth) + fifth = next(ast_nodes[4].infer()) + self.assertIsInstance(fifth, objects.Super) + self.assertIsInstance(fifth.type, nodes.ClassDef) + self.assertEqual(fifth.type.name, 'Fourth') + self.assertIsInstance(fifth.mro_pointer, nodes.ClassDef) + self.assertEqual(fifth.mro_pointer.name, 'Fourth') + + def test_super_infer(self): + node = test_utils.extract_node(''' + class Super(object): + def __init__(self): + super(Super, self) #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, objects.Super) + reinferred = next(inferred.infer()) + self.assertIsInstance(reinferred, objects.Super) + self.assertIs(inferred, reinferred) + + def test_inferring_invalid_supers(self): + ast_nodes = test_utils.extract_node(''' + class Super(object): + def __init__(self): + # MRO pointer is not a type + super(1, self) #@ + # MRO type is not a subtype + super(Super, 1) #@ + # self is not a subtype of Bupper + super(Bupper, self) #@ + class Bupper(Super): + pass + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + with self.assertRaises(exceptions.SuperError) as cm: + first.super_mro() + self.assertEqual(str(cm.exception), "The first super argument must be type.") + + for node in ast_nodes[1:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, objects.Super, node) + with self.assertRaises(exceptions.SuperArgumentTypeError) as cm: + inferred.super_mro() + self.assertEqual(str(cm.exception), + "super(type, obj): obj must be an instance " + "or subtype of type", node) + + def test_proxied(self): + node = test_utils.extract_node(''' + class Super(object): + def __init__(self): + super(Super, self) #@ + ''') + infered = next(node.infer()) + proxied = infered._proxied + self.assertEqual(proxied.qname(), "%s.super" % bases.BUILTINS) + self.assertIsInstance(proxied, nodes.ClassDef) + + def test_super_bound_model(self): + ast_nodes = test_utils.extract_node(''' + class First(object): + def method(self): + pass + @classmethod + def class_method(cls): + pass + class Super_Type_Type(First): + def method(self): + super(Super_Type_Type, Super_Type_Type).method #@ + super(Super_Type_Type, Super_Type_Type).class_method #@ + @classmethod + def class_method(cls): + super(Super_Type_Type, Super_Type_Type).method #@ + super(Super_Type_Type, Super_Type_Type).class_method #@ + + class Super_Type_Object(First): + def method(self): + super(Super_Type_Object, self).method #@ + super(Super_Type_Object, self).class_method #@ + ''') + # Super(type, type) is the same for both functions and classmethods. + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.FunctionDef) + self.assertEqual(first.name, 'method') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'First') + self.assertEqual(second.type, 'classmethod') + + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, nodes.FunctionDef) + self.assertEqual(third.name, 'method') + + fourth = next(ast_nodes[3].infer()) + self.assertIsInstance(fourth, bases.BoundMethod) + self.assertEqual(fourth.bound.name, 'First') + self.assertEqual(fourth.type, 'classmethod') + + # Super(type, obj) can lead to different attribute bindings + # depending on the type of the place where super was called. + fifth = next(ast_nodes[4].infer()) + self.assertIsInstance(fifth, bases.BoundMethod) + self.assertEqual(fifth.bound.name, 'First') + self.assertEqual(fifth.type, 'method') + + sixth = next(ast_nodes[5].infer()) + self.assertIsInstance(sixth, bases.BoundMethod) + self.assertEqual(sixth.bound.name, 'First') + self.assertEqual(sixth.type, 'classmethod') + + def test_super_getattr_single_inheritance(self): + ast_nodes = test_utils.extract_node(''' + class First(object): + def test(self): pass + class Second(First): + def test2(self): pass + class Third(Second): + test3 = 42 + def __init__(self): + super(Third, self).test2 #@ + super(Third, self).test #@ + # test3 is local, no MRO lookup is done. + super(Third, self).test3 #@ + super(Third, self) #@ + + # Unbounds. + super(Third, Third).test2 #@ + super(Third, Third).test #@ + + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.BoundMethod) + self.assertEqual(first.bound.name, 'Second') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'First') + + with self.assertRaises(exceptions.InferenceError): + next(ast_nodes[2].infer()) + fourth = next(ast_nodes[3].infer()) + with self.assertRaises(exceptions.NotFoundError): + fourth.getattr('test3') + with self.assertRaises(exceptions.NotFoundError): + next(fourth.igetattr('test3')) + + first_unbound = next(ast_nodes[4].infer()) + self.assertIsInstance(first_unbound, nodes.FunctionDef) + self.assertEqual(first_unbound.name, 'test2') + self.assertEqual(first_unbound.parent.name, 'Second') + + second_unbound = next(ast_nodes[5].infer()) + self.assertIsInstance(second_unbound, nodes.FunctionDef) + self.assertEqual(second_unbound.name, 'test') + self.assertEqual(second_unbound.parent.name, 'First') + + def test_super_invalid_mro(self): + node = test_utils.extract_node(''' + class A(object): + test = 42 + class Super(A, A): + def __init__(self): + super(Super, self) #@ + ''') + inferred = next(node.infer()) + with self.assertRaises(exceptions.NotFoundError): + next(inferred.getattr('test')) + + def test_super_complex_mro(self): + ast_nodes = test_utils.extract_node(''' + class A(object): + def spam(self): return "A" + def foo(self): return "A" + @staticmethod + def static(self): pass + class B(A): + def boo(self): return "B" + def spam(self): return "B" + class C(A): + def boo(self): return "C" + class E(C, B): + def __init__(self): + super(E, self).boo #@ + super(C, self).boo #@ + super(E, self).spam #@ + super(E, self).foo #@ + super(E, self).static #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.BoundMethod) + self.assertEqual(first.bound.name, 'C') + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'B') + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, bases.BoundMethod) + self.assertEqual(third.bound.name, 'B') + fourth = next(ast_nodes[3].infer()) + self.assertEqual(fourth.bound.name, 'A') + static = next(ast_nodes[4].infer()) + self.assertIsInstance(static, nodes.FunctionDef) + self.assertEqual(static.parent.scope().name, 'A') + + def test_super_data_model(self): + ast_nodes = test_utils.extract_node(''' + class X(object): pass + class A(X): + def __init__(self): + super(A, self) #@ + super(A, A) #@ + super(X, A) #@ + ''') + first = next(ast_nodes[0].infer()) + thisclass = first.getattr('__thisclass__')[0] + self.assertIsInstance(thisclass, nodes.ClassDef) + self.assertEqual(thisclass.name, 'A') + selfclass = first.getattr('__self_class__')[0] + self.assertIsInstance(selfclass, nodes.ClassDef) + self.assertEqual(selfclass.name, 'A') + self_ = first.getattr('__self__')[0] + self.assertIsInstance(self_, bases.Instance) + self.assertEqual(self_.name, 'A') + cls = first.getattr('__class__')[0] + self.assertEqual(cls, first._proxied) + + second = next(ast_nodes[1].infer()) + thisclass = second.getattr('__thisclass__')[0] + self.assertEqual(thisclass.name, 'A') + self_ = second.getattr('__self__')[0] + self.assertIsInstance(self_, nodes.ClassDef) + self.assertEqual(self_.name, 'A') + + third = next(ast_nodes[2].infer()) + thisclass = third.getattr('__thisclass__')[0] + self.assertEqual(thisclass.name, 'X') + selfclass = third.getattr('__self_class__')[0] + self.assertEqual(selfclass.name, 'A') + + def assertEqualMro(self, klass, expected_mro): + self.assertEqual( + [member.name for member in klass.super_mro()], + expected_mro) + + def test_super_mro(self): + ast_nodes = test_utils.extract_node(''' + class A(object): pass + class B(A): pass + class C(A): pass + class E(C, B): + def __init__(self): + super(E, self) #@ + super(C, self) #@ + super(B, self) #@ + + super(B, 1) #@ + super(1, B) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertEqualMro(first, ['C', 'B', 'A', 'object']) + second = next(ast_nodes[1].infer()) + self.assertEqualMro(second, ['B', 'A', 'object']) + third = next(ast_nodes[2].infer()) + self.assertEqualMro(third, ['A', 'object']) + + fourth = next(ast_nodes[3].infer()) + with self.assertRaises(exceptions.SuperError): + fourth.super_mro() + fifth = next(ast_nodes[4].infer()) + with self.assertRaises(exceptions.SuperError): + fifth.super_mro() + + def test_super_yes_objects(self): + ast_nodes = test_utils.extract_node(''' + from collections import Missing + class A(object): + def __init__(self): + super(Missing, self) #@ + super(A, Missing) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.Instance) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.Instance) + + def test_super_invalid_types(self): + node = test_utils.extract_node(''' + import collections + class A(object): + def __init__(self): + super(A, collections) #@ + ''') + inferred = next(node.infer()) + with self.assertRaises(exceptions.SuperError): + inferred.super_mro() + with self.assertRaises(exceptions.SuperArgumentTypeError): + inferred.super_mro() + + def test_super_pytype_display_type_name(self): + node = test_utils.extract_node(''' + class A(object): + def __init__(self): + super(A, self) #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred.pytype(), "%s.super" % bases.BUILTINS) + self.assertEqual(inferred.display_type(), 'Super of') + self.assertEqual(inferred.name, 'A') + + def test_super_properties(self): + node = test_utils.extract_node(''' + class Foo(object): + @property + def dict(self): + return 42 + + class Bar(Foo): + @property + def dict(self): + return super(Bar, self).dict + + Bar().dict + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_peephole.py b/pymode/libs/astroid/tests/unittest_peephole.py new file mode 100644 index 00000000..78349898 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_peephole.py @@ -0,0 +1,121 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +"""Tests for the astroid AST peephole optimizer.""" + +import ast +import textwrap +import unittest + +import astroid +from astroid import astpeephole +from astroid import builder +from astroid import manager +from astroid import test_utils +from astroid.tests import resources + + +MANAGER = manager.AstroidManager() + + +class PeepholeOptimizer(unittest.TestCase): + @classmethod + def setUpClass(cls): + MANAGER.optimize_ast = True + + @classmethod + def tearDownClass(cls): + MANAGER.optimize_ast = False + + def setUp(self): + self._optimizer = astpeephole.ASTPeepholeOptimizer() + + @staticmethod + def _get_binops(code): + module = ast.parse(textwrap.dedent(code)) + return [node.value for node in module.body + if isinstance(node, ast.Expr)] + + @test_utils.require_version(maxver='3.0') + def test_optimize_binop_unicode(self): + nodes = self._get_binops(""" + u"a" + u"b" + u"c" + + u"a" + "c" + "b" + u"a" + b"c" + """) + + result = self._optimizer.optimize_binop(nodes[0]) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, u"abc") + + self.assertIsNone(self._optimizer.optimize_binop(nodes[1])) + self.assertIsNone(self._optimizer.optimize_binop(nodes[2])) + + def test_optimize_binop(self): + nodes = self._get_binops(""" + "a" + "b" + "c" + "d" + b"a" + b"b" + b"c" + b"d" + "a" + "b" + + "a" + "b" + 1 + object + var = 4 + "a" + "b" + var + "c" + "a" + "b" + "c" - "4" + "a" + "b" + "c" + "d".format() + "a" - "b" + "a" + 1 + 4 + 5 + 6 + """) + + result = self._optimizer.optimize_binop(nodes[0]) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, "abcd") + + result = self._optimizer.optimize_binop(nodes[1]) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, b"abcd") + + for node in nodes[2:]: + self.assertIsNone(self._optimizer.optimize_binop(node)) + + def test_big_binop_crash(self): + # Test that we don't fail on a lot of joined strings + # through the addition operator. + module = resources.build_file('data/joined_strings.py') + element = next(module['x'].infer()) + self.assertIsInstance(element, astroid.Const) + self.assertEqual(len(element.value), 61660) + + def test_optimisation_disabled(self): + try: + MANAGER.optimize_ast = False + module = builder.parse(""" + '1' + '2' + '3' + """) + self.assertIsInstance(module.body[0], astroid.Expr) + self.assertIsInstance(module.body[0].value, astroid.BinOp) + self.assertIsInstance(module.body[0].value.left, astroid.BinOp) + self.assertIsInstance(module.body[0].value.left.left, + astroid.Const) + finally: + MANAGER.optimize_ast = True + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_protocols.py b/pymode/libs/astroid/tests/unittest_protocols.py new file mode 100644 index 00000000..16745129 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_protocols.py @@ -0,0 +1,176 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +import contextlib +import unittest + +import astroid +from astroid.test_utils import extract_node, require_version +from astroid import InferenceError +from astroid import nodes +from astroid import util +from astroid.node_classes import AssignName, Const, Name, Starred + + +@contextlib.contextmanager +def _add_transform(manager, node, transform, predicate=None): + manager.register_transform(node, transform, predicate) + try: + yield + finally: + manager.unregister_transform(node, transform, predicate) + + +class ProtocolTests(unittest.TestCase): + + def assertConstNodesEqual(self, nodes_list_expected, nodes_list_got): + self.assertEqual(len(nodes_list_expected), len(nodes_list_got)) + for node in nodes_list_got: + self.assertIsInstance(node, Const) + for node, expected_value in zip(nodes_list_got, nodes_list_expected): + self.assertEqual(expected_value, node.value) + + def assertNameNodesEqual(self, nodes_list_expected, nodes_list_got): + self.assertEqual(len(nodes_list_expected), len(nodes_list_got)) + for node in nodes_list_got: + self.assertIsInstance(node, Name) + for node, expected_name in zip(nodes_list_got, nodes_list_expected): + self.assertEqual(expected_name, node.name) + + def test_assigned_stmts_simple_for(self): + assign_stmts = extract_node(""" + for a in (1, 2, 3): #@ + pass + + for b in range(3): #@ + pass + """) + + for1_assnode = next(assign_stmts[0].nodes_of_class(AssignName)) + assigned = list(for1_assnode.assigned_stmts()) + self.assertConstNodesEqual([1, 2, 3], assigned) + + for2_assnode = next(assign_stmts[1].nodes_of_class(AssignName)) + self.assertRaises(InferenceError, + list, for2_assnode.assigned_stmts()) + + @require_version(minver='3.0') + def test_assigned_stmts_starred_for(self): + assign_stmts = extract_node(""" + for *a, b in ((1, 2, 3), (4, 5, 6, 7)): #@ + pass + """) + + for1_starred = next(assign_stmts.nodes_of_class(Starred)) + assigned = next(for1_starred.assigned_stmts()) + self.assertEqual(assigned, util.YES) + + def _get_starred_stmts(self, code): + assign_stmt = extract_node("{} #@".format(code)) + starred = next(assign_stmt.nodes_of_class(Starred)) + return next(starred.assigned_stmts()) + + def _helper_starred_expected_const(self, code, expected): + stmts = self._get_starred_stmts(code) + self.assertIsInstance(stmts, nodes.List) + stmts = stmts.elts + self.assertConstNodesEqual(expected, stmts) + + def _helper_starred_expected(self, code, expected): + stmts = self._get_starred_stmts(code) + self.assertEqual(expected, stmts) + + def _helper_starred_inference_error(self, code): + assign_stmt = extract_node("{} #@".format(code)) + starred = next(assign_stmt.nodes_of_class(Starred)) + self.assertRaises(InferenceError, list, starred.assigned_stmts()) + + @require_version(minver='3.0') + def test_assigned_stmts_starred_assnames(self): + self._helper_starred_expected_const( + "a, *b = (1, 2, 3, 4) #@", [2, 3, 4]) + self._helper_starred_expected_const( + "*a, b = (1, 2, 3) #@", [1, 2]) + self._helper_starred_expected_const( + "a, *b, c = (1, 2, 3, 4, 5) #@", + [2, 3, 4]) + self._helper_starred_expected_const( + "a, *b = (1, 2) #@", [2]) + self._helper_starred_expected_const( + "*b, a = (1, 2) #@", [1]) + self._helper_starred_expected_const( + "[*b] = (1, 2) #@", [1, 2]) + + @require_version(minver='3.0') + def test_assigned_stmts_starred_yes(self): + # Not something iterable and known + self._helper_starred_expected("a, *b = range(3) #@", util.YES) + # Not something inferrable + self._helper_starred_expected("a, *b = balou() #@", util.YES) + # In function, unknown. + self._helper_starred_expected(""" + def test(arg): + head, *tail = arg #@""", util.YES) + # These cases aren't worth supporting. + self._helper_starred_expected( + "a, (*b, c), d = (1, (2, 3, 4), 5) #@", util.YES) + + @require_version(minver='3.0') + def test_assign_stmts_starred_fails(self): + # Too many starred + self._helper_starred_inference_error("a, *b, *c = (1, 2, 3) #@") + # Too many lhs values + self._helper_starred_inference_error("a, *b, c = (1, 2) #@") + # This could be solved properly, but it complicates needlessly the + # code for assigned_stmts, without oferring real benefit. + self._helper_starred_inference_error( + "(*a, b), (c, *d) = (1, 2, 3), (4, 5, 6) #@") + + def test_assigned_stmts_assignments(self): + assign_stmts = extract_node(""" + c = a #@ + + d, e = b, c #@ + """) + + simple_assnode = next(assign_stmts[0].nodes_of_class(AssignName)) + assigned = list(simple_assnode.assigned_stmts()) + self.assertNameNodesEqual(['a'], assigned) + + assnames = assign_stmts[1].nodes_of_class(AssignName) + simple_mul_assnode_1 = next(assnames) + assigned = list(simple_mul_assnode_1.assigned_stmts()) + self.assertNameNodesEqual(['b'], assigned) + simple_mul_assnode_2 = next(assnames) + assigned = list(simple_mul_assnode_2.assigned_stmts()) + self.assertNameNodesEqual(['c'], assigned) + + def test_sequence_assigned_stmts_not_accepting_empty_node(self): + def transform(node): + node.root().locals['__all__'] = [node.value] + + manager = astroid.MANAGER + with _add_transform(manager, astroid.Assign, transform): + module = astroid.parse(''' + __all__ = ['a'] + ''') + module.wildcard_import_names() + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_python3.py b/pymode/libs/astroid/tests/unittest_python3.py new file mode 100644 index 00000000..87010571 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_python3.py @@ -0,0 +1,254 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +from textwrap import dedent +import unittest + +from astroid import nodes +from astroid.node_classes import Assign, Expr, YieldFrom, Name, Const +from astroid.builder import AstroidBuilder +from astroid.scoped_nodes import ClassDef, FunctionDef +from astroid.test_utils import require_version, extract_node + + +class Python3TC(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.builder = AstroidBuilder() + + @require_version('3.0') + def test_starred_notation(self): + astroid = self.builder.string_build("*a, b = [1, 2, 3]", 'test', 'test') + + # Get the star node + node = next(next(next(astroid.get_children()).get_children()).get_children()) + + self.assertTrue(isinstance(node.assign_type(), Assign)) + + @require_version('3.3') + def test_yield_from(self): + body = dedent(""" + def func(): + yield from iter([1, 2]) + """) + astroid = self.builder.string_build(body) + func = astroid.body[0] + self.assertIsInstance(func, FunctionDef) + yieldfrom_stmt = func.body[0] + + self.assertIsInstance(yieldfrom_stmt, Expr) + self.assertIsInstance(yieldfrom_stmt.value, YieldFrom) + self.assertEqual(yieldfrom_stmt.as_string(), + 'yield from iter([1, 2])') + + @require_version('3.3') + def test_yield_from_is_generator(self): + body = dedent(""" + def func(): + yield from iter([1, 2]) + """) + astroid = self.builder.string_build(body) + func = astroid.body[0] + self.assertIsInstance(func, FunctionDef) + self.assertTrue(func.is_generator()) + + @require_version('3.3') + def test_yield_from_as_string(self): + body = dedent(""" + def func(): + yield from iter([1, 2]) + value = yield from other() + """) + astroid = self.builder.string_build(body) + func = astroid.body[0] + self.assertEqual(func.as_string().strip(), body.strip()) + + # metaclass tests + + @require_version('3.0') + def test_simple_metaclass(self): + astroid = self.builder.string_build("class Test(metaclass=type): pass") + klass = astroid.body[0] + + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, ClassDef) + self.assertEqual(metaclass.name, 'type') + + @require_version('3.0') + def test_metaclass_error(self): + astroid = self.builder.string_build("class Test(metaclass=typ): pass") + klass = astroid.body[0] + self.assertFalse(klass.metaclass()) + + @require_version('3.0') + def test_metaclass_imported(self): + astroid = self.builder.string_build(dedent(""" + from abc import ABCMeta + class Test(metaclass=ABCMeta): pass""")) + klass = astroid.body[1] + + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + @require_version('3.0') + def test_as_string(self): + body = dedent(""" + from abc import ABCMeta + class Test(metaclass=ABCMeta): pass""") + astroid = self.builder.string_build(body) + klass = astroid.body[1] + + self.assertEqual(klass.as_string(), + '\n\nclass Test(metaclass=ABCMeta):\n pass\n') + + @require_version('3.0') + def test_old_syntax_works(self): + astroid = self.builder.string_build(dedent(""" + class Test: + __metaclass__ = type + class SubTest(Test): pass + """)) + klass = astroid['SubTest'] + metaclass = klass.metaclass() + self.assertIsNone(metaclass) + + @require_version('3.0') + def test_metaclass_yes_leak(self): + astroid = self.builder.string_build(dedent(""" + # notice `ab` instead of `abc` + from ab import ABCMeta + + class Meta(metaclass=ABCMeta): pass + """)) + klass = astroid['Meta'] + self.assertIsNone(klass.metaclass()) + + @require_version('3.0') + def test_parent_metaclass(self): + astroid = self.builder.string_build(dedent(""" + from abc import ABCMeta + class Test(metaclass=ABCMeta): pass + class SubTest(Test): pass + """)) + klass = astroid['SubTest'] + self.assertTrue(klass.newstyle) + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + @require_version('3.0') + def test_metaclass_ancestors(self): + astroid = self.builder.string_build(dedent(""" + from abc import ABCMeta + + class FirstMeta(metaclass=ABCMeta): pass + class SecondMeta(metaclass=type): + pass + + class Simple: + pass + + class FirstImpl(FirstMeta): pass + class SecondImpl(FirstImpl): pass + class ThirdImpl(Simple, SecondMeta): + pass + """)) + classes = { + 'ABCMeta': ('FirstImpl', 'SecondImpl'), + 'type': ('ThirdImpl', ) + } + for metaclass, names in classes.items(): + for name in names: + impl = astroid[name] + meta = impl.metaclass() + self.assertIsInstance(meta, ClassDef) + self.assertEqual(meta.name, metaclass) + + @require_version('3.0') + def test_annotation_support(self): + astroid = self.builder.string_build(dedent(""" + def test(a: int, b: str, c: None, d, e, + *args: float, **kwargs: int)->int: + pass + """)) + func = astroid['test'] + self.assertIsInstance(func.args.varargannotation, Name) + self.assertEqual(func.args.varargannotation.name, 'float') + self.assertIsInstance(func.args.kwargannotation, Name) + self.assertEqual(func.args.kwargannotation.name, 'int') + self.assertIsInstance(func.returns, Name) + self.assertEqual(func.returns.name, 'int') + arguments = func.args + self.assertIsInstance(arguments.annotations[0], Name) + self.assertEqual(arguments.annotations[0].name, 'int') + self.assertIsInstance(arguments.annotations[1], Name) + self.assertEqual(arguments.annotations[1].name, 'str') + self.assertIsInstance(arguments.annotations[2], Const) + self.assertIsNone(arguments.annotations[2].value) + self.assertIsNone(arguments.annotations[3]) + self.assertIsNone(arguments.annotations[4]) + + astroid = self.builder.string_build(dedent(""" + def test(a: int=1, b: str=2): + pass + """)) + func = astroid['test'] + self.assertIsInstance(func.args.annotations[0], Name) + self.assertEqual(func.args.annotations[0].name, 'int') + self.assertIsInstance(func.args.annotations[1], Name) + self.assertEqual(func.args.annotations[1].name, 'str') + self.assertIsNone(func.returns) + + @require_version('3.0') + def test_annotation_as_string(self): + code1 = dedent(''' + def test(a, b:int=4, c=2, f:'lala'=4)->2: + pass''') + code2 = dedent(''' + def test(a:typing.Generic[T], c:typing.Any=24)->typing.Iterable: + pass''') + for code in (code1, code2): + func = extract_node(code) + self.assertEqual(func.as_string(), code) + + @require_version('3.5') + def test_unpacking_in_dicts(self): + code = "{'x': 1, **{'y': 2}}" + node = extract_node(code) + self.assertEqual(node.as_string(), code) + keys = [key for (key, _) in node.items] + self.assertIsInstance(keys[0], nodes.Const) + self.assertIsInstance(keys[1], nodes.DictUnpack) + + @require_version('3.5') + def test_nested_unpacking_in_dicts(self): + code = "{'x': 1, **{'y': 2, **{'z': 3}}}" + node = extract_node(code) + self.assertEqual(node.as_string(), code) + + @require_version('3.5') + def test_unpacking_in_dict_getitem(self): + node = extract_node('{1:2, **{2:3, 3:4}, **{5: 6}}') + for key, expected in ((1, 2), (2, 3), (3, 4), (5, 6)): + value = node.getitem(key) + self.assertIsInstance(value, nodes.Const) + self.assertEqual(value.value, expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_raw_building.py b/pymode/libs/astroid/tests/unittest_raw_building.py new file mode 100644 index 00000000..2bdaac17 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_raw_building.py @@ -0,0 +1,85 @@ +import inspect +import os +import unittest + +from six.moves import builtins # pylint: disable=import-error + +from astroid.builder import AstroidBuilder +from astroid.raw_building import ( + attach_dummy_node, build_module, + build_class, build_function, build_from_import +) +from astroid import test_utils +from astroid import nodes +from astroid.bases import BUILTINS + + +class RawBuildingTC(unittest.TestCase): + + def test_attach_dummy_node(self): + node = build_module('MyModule') + attach_dummy_node(node, 'DummyNode') + self.assertEqual(1, len(list(node.get_children()))) + + def test_build_module(self): + node = build_module('MyModule') + self.assertEqual(node.name, 'MyModule') + self.assertEqual(node.pure_python, False) + self.assertEqual(node.package, False) + self.assertEqual(node.parent, None) + + def test_build_class(self): + node = build_class('MyClass') + self.assertEqual(node.name, 'MyClass') + self.assertEqual(node.doc, None) + + def test_build_function(self): + node = build_function('MyFunction') + self.assertEqual(node.name, 'MyFunction') + self.assertEqual(node.doc, None) + + def test_build_function_args(self): + args = ['myArgs1', 'myArgs2'] + node = build_function('MyFunction', args) + self.assertEqual('myArgs1', node.args.args[0].name) + self.assertEqual('myArgs2', node.args.args[1].name) + self.assertEqual(2, len(node.args.args)) + + def test_build_function_defaults(self): + defaults = ['defaults1', 'defaults2'] + node = build_function('MyFunction', None, defaults) + self.assertEqual(2, len(node.args.defaults)) + + def test_build_from_import(self): + names = ['exceptions, inference, inspector'] + node = build_from_import('astroid', names) + self.assertEqual(len(names), len(node.names)) + + @test_utils.require_version(minver='3.0') + def test_io_is__io(self): + # _io module calls itself io. This leads + # to cyclic dependencies when astroid tries to resolve + # what io.BufferedReader is. The code that handles this + # is in astroid.raw_building.imported_member, which verifies + # the true name of the module. + import _io + + builder = AstroidBuilder() + module = builder.inspect_build(_io) + buffered_reader = module.getattr('BufferedReader')[0] + self.assertEqual(buffered_reader.root().name, 'io') + + @unittest.skipUnless(os.name == 'java', 'Requires Jython') + def test_open_is_inferred_correctly(self): + # Lot of Jython builtins don't have a __module__ attribute. + for name, _ in inspect.getmembers(builtins, predicate=inspect.isbuiltin): + if name == 'print': + continue + node = test_utils.extract_node('{0} #@'.format(name)) + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.FunctionDef, name) + self.assertEqual(inferred.root().name, BUILTINS, name) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_regrtest.py b/pymode/libs/astroid/tests/unittest_regrtest.py new file mode 100644 index 00000000..158c7119 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_regrtest.py @@ -0,0 +1,364 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import sys +import unittest +import textwrap + +import six + +from astroid import MANAGER, Instance, nodes +from astroid.bases import BUILTINS +from astroid.builder import AstroidBuilder +from astroid import exceptions +from astroid.raw_building import build_module +from astroid.manager import AstroidManager +from astroid.test_utils import require_version, extract_node +from astroid.tests import resources +from astroid import transforms + + +class NonRegressionTests(resources.AstroidCacheSetupMixin, + unittest.TestCase): + + def setUp(self): + sys.path.insert(0, resources.find('data')) + MANAGER.always_load_extensions = True + MANAGER.astroid_cache[BUILTINS] = self._builtins + + def tearDown(self): + # Since we may have created a brainless manager, leading + # to a new cache builtin module and proxy classes in the constants, + # clear out the global manager cache. + MANAGER.clear_cache(self._builtins) + MANAGER.always_load_extensions = False + sys.path.pop(0) + sys.path_importer_cache.pop(resources.find('data'), None) + + def brainless_manager(self): + manager = AstroidManager() + # avoid caching into the AstroidManager borg since we get problems + # with other tests : + manager.__dict__ = {} + manager._failed_import_hooks = [] + manager.astroid_cache = {} + manager._mod_file_cache = {} + manager._transform = transforms.TransformVisitor() + manager.clear_cache() # trigger proper bootstraping + return manager + + def test_module_path(self): + man = self.brainless_manager() + mod = man.ast_from_module_name('package.import_package_subpackage_module') + package = next(mod.igetattr('package')) + self.assertEqual(package.name, 'package') + subpackage = next(package.igetattr('subpackage')) + self.assertIsInstance(subpackage, nodes.Module) + self.assertTrue(subpackage.package) + self.assertEqual(subpackage.name, 'package.subpackage') + module = next(subpackage.igetattr('module')) + self.assertEqual(module.name, 'package.subpackage.module') + + + def test_package_sidepackage(self): + manager = self.brainless_manager() + assert 'package.sidepackage' not in MANAGER.astroid_cache + package = manager.ast_from_module_name('absimp') + self.assertIsInstance(package, nodes.Module) + self.assertTrue(package.package) + subpackage = next(package.getattr('sidepackage')[0].infer()) + self.assertIsInstance(subpackage, nodes.Module) + self.assertTrue(subpackage.package) + self.assertEqual(subpackage.name, 'absimp.sidepackage') + + + def test_living_property(self): + builder = AstroidBuilder() + builder._done = {} + builder._module = sys.modules[__name__] + builder.object_build(build_module('module_name', ''), Whatever) + + + def test_new_style_class_detection(self): + try: + import pygtk # pylint: disable=unused-variable + except ImportError: + self.skipTest('test skipped: pygtk is not available') + # XXX may fail on some pygtk version, because objects in + # gobject._gobject have __module__ set to gobject :( + builder = AstroidBuilder() + data = """ +import pygtk +pygtk.require("2.6") +import gobject + +class A(gobject.GObject): + pass +""" + astroid = builder.string_build(data, __name__, __file__) + a = astroid['A'] + self.assertTrue(a.newstyle) + + + def test_pylint_config_attr(self): + try: + from pylint import lint # pylint: disable=unused-variable + except ImportError: + self.skipTest('pylint not available') + mod = MANAGER.ast_from_module_name('pylint.lint') + pylinter = mod['PyLinter'] + expect = ['OptionsManagerMixIn', 'object', 'MessagesHandlerMixIn', + 'ReportsHandlerMixIn', 'BaseTokenChecker', 'BaseChecker', + 'OptionsProviderMixIn'] + self.assertListEqual([c.name for c in pylinter.ancestors()], + expect) + self.assertTrue(list(Instance(pylinter).getattr('config'))) + inferred = list(Instance(pylinter).igetattr('config')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0].root().name, 'optparse') + self.assertEqual(inferred[0].name, 'Values') + + def test_numpy_crash(self): + """test don't crash on numpy""" + #a crash occured somewhere in the past, and an + # InferenceError instead of a crash was better, but now we even infer! + try: + import numpy # pylint: disable=unused-variable + except ImportError: + self.skipTest('test skipped: numpy is not available') + builder = AstroidBuilder() + data = """ +from numpy import multiply + +multiply(1, 2, 3) +""" + astroid = builder.string_build(data, __name__, __file__) + callfunc = astroid.body[1].value.func + inferred = callfunc.inferred() + self.assertEqual(len(inferred), 2) + + @require_version('3.0') + def test_nameconstant(self): + # used to fail for Python 3.4 + builder = AstroidBuilder() + astroid = builder.string_build("def test(x=True): pass") + default = astroid.body[0].args.args[0] + self.assertEqual(default.name, 'x') + self.assertEqual(next(default.infer()).value, True) + + @require_version('2.7') + def test_with_infer_assignnames(self): + builder = AstroidBuilder() + data = """ +with open('a.txt') as stream, open('b.txt'): + stream.read() +""" + astroid = builder.string_build(data, __name__, __file__) + # Used to crash due to the fact that the second + # context manager didn't use an assignment name. + list(astroid.nodes_of_class(nodes.Call))[-1].inferred() + + def test_recursion_regression_issue25(self): + builder = AstroidBuilder() + data = """ +import recursion as base + +_real_Base = base.Base + +class Derived(_real_Base): + pass + +def run(): + base.Base = Derived +""" + astroid = builder.string_build(data, __name__, __file__) + # Used to crash in _is_metaclass, due to wrong + # ancestors chain + classes = astroid.nodes_of_class(nodes.ClassDef) + for klass in classes: + # triggers the _is_metaclass call + klass.type # pylint: disable=pointless-statement + + def test_decorator_callchain_issue42(self): + builder = AstroidBuilder() + data = """ + +def test(): + def factory(func): + def newfunc(): + func() + return newfunc + return factory + +@test() +def crash(): + pass +""" + astroid = builder.string_build(data, __name__, __file__) + self.assertEqual(astroid['crash'].type, 'function') + + def test_filter_stmts_scoping(self): + builder = AstroidBuilder() + data = """ +def test(): + compiler = int() + class B(compiler.__class__): + pass + compiler = B() + return compiler +""" + astroid = builder.string_build(data, __name__, __file__) + test = astroid['test'] + result = next(test.infer_call_result(astroid)) + self.assertIsInstance(result, Instance) + base = next(result._proxied.bases[0].infer()) + self.assertEqual(base.name, 'int') + + def test_ancestors_patching_class_recursion(self): + node = AstroidBuilder().string_build(textwrap.dedent(""" + import string + Template = string.Template + + class A(Template): + pass + + class B(A): + pass + + def test(x=False): + if x: + string.Template = A + else: + string.Template = B + """)) + klass = node['A'] + ancestors = list(klass.ancestors()) + self.assertEqual(ancestors[0].qname(), 'string.Template') + + def test_ancestors_yes_in_bases(self): + # Test for issue https://bitbucket.org/logilab/astroid/issue/84 + # This used to crash astroid with a TypeError, because an YES + # node was present in the bases + node = extract_node(""" + def with_metaclass(meta, *bases): + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + import lala + + class A(with_metaclass(object, lala.lala)): #@ + pass + """) + ancestors = list(node.ancestors()) + if six.PY3: + self.assertEqual(len(ancestors), 1) + self.assertEqual(ancestors[0].qname(), + "{}.object".format(BUILTINS)) + else: + self.assertEqual(len(ancestors), 0) + + def test_ancestors_missing_from_function(self): + # Test for https://www.logilab.org/ticket/122793 + node = extract_node(''' + def gen(): yield + GEN = gen() + next(GEN) + ''') + self.assertRaises(exceptions.InferenceError, next, node.infer()) + + def test_unicode_in_docstring(self): + # Crashed for astroid==1.4.1 + # Test for https://bitbucket.org/logilab/astroid/issues/273/ + + # In a regular file, "coding: utf-8" would have been used. + node = extract_node(u''' + from __future__ import unicode_literals + + class MyClass(object): + def method(self): + "With unicode : %s " + + instance = MyClass() + ''' % u"\u2019") + + next(node.value.infer()).as_string() + + def test_binop_generates_nodes_with_parents(self): + node = extract_node(''' + def no_op(*args): + pass + def foo(*args): + def inner(*more_args): + args + more_args #@ + return inner + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertIsNotNone(inferred.parent) + self.assertIsInstance(inferred.parent, nodes.BinOp) + + def test_decorator_names_inference_error_leaking(self): + node = extract_node(''' + class Parent(object): + @property + def foo(self): + pass + + class Child(Parent): + @Parent.foo.getter + def foo(self): #@ + return super(Child, self).foo + ['oink'] + ''') + inferred = next(node.infer()) + self.assertEqual(inferred.decoratornames(), set()) + + def test_ssl_protocol(self): + node = extract_node(''' + import ssl + ssl.PROTOCOL_TLSv1 + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + + def test_uninferable_string_argument_of_namedtuple(self): + node = extract_node(''' + import collections + collections.namedtuple('{}'.format("a"), '')() + ''') + next(node.infer()) + + @require_version(maxver='3.0') + def test_reassignment_in_except_handler(self): + node = extract_node(''' + import exceptions + try: + {}["a"] + except KeyError, exceptions.IndexError: + pass + + IndexError #@ + ''') + self.assertEqual(len(node.inferred()), 1) + + +class Whatever(object): + a = property(lambda x: x, lambda x: x) + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_scoped_nodes.py b/pymode/libs/astroid/tests/unittest_scoped_nodes.py new file mode 100644 index 00000000..a15c923a --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_scoped_nodes.py @@ -0,0 +1,1583 @@ +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +"""tests for specific behaviour of astroid scoped nodes (i.e. module, class and +function) +""" +import os +import sys +from functools import partial +import unittest +import warnings + +from astroid import builder +from astroid import nodes +from astroid import scoped_nodes +from astroid import util +from astroid.exceptions import ( + InferenceError, NotFoundError, + NoDefault, ResolveError, MroError, + InconsistentMroError, DuplicateBasesError, +) +from astroid.bases import ( + BUILTINS, Instance, + BoundMethod, UnboundMethod, Generator +) +from astroid import __pkginfo__ +from astroid import test_utils +from astroid.tests import resources + + +def _test_dict_interface(self, node, test_attr): + self.assertIs(node[test_attr], node[test_attr]) + self.assertIn(test_attr, node) + node.keys() + node.values() + node.items() + iter(node) + + +class ModuleLoader(resources.SysPathSetup): + def setUp(self): + super(ModuleLoader, self).setUp() + self.module = resources.build_file('data/module.py', 'data.module') + self.module2 = resources.build_file('data/module2.py', 'data.module2') + self.nonregr = resources.build_file('data/nonregr.py', 'data.nonregr') + self.pack = resources.build_file('data/__init__.py', 'data') + + +class ModuleNodeTest(ModuleLoader, unittest.TestCase): + + def test_special_attributes(self): + self.assertEqual(len(self.module.getattr('__name__')), 1) + self.assertIsInstance(self.module.getattr('__name__')[0], nodes.Const) + self.assertEqual(self.module.getattr('__name__')[0].value, 'data.module') + self.assertEqual(len(self.module.getattr('__doc__')), 1) + self.assertIsInstance(self.module.getattr('__doc__')[0], nodes.Const) + self.assertEqual(self.module.getattr('__doc__')[0].value, 'test module for astroid\n') + self.assertEqual(len(self.module.getattr('__file__')), 1) + self.assertIsInstance(self.module.getattr('__file__')[0], nodes.Const) + self.assertEqual(self.module.getattr('__file__')[0].value, + os.path.abspath(resources.find('data/module.py'))) + self.assertEqual(len(self.module.getattr('__dict__')), 1) + self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) + self.assertRaises(NotFoundError, self.module.getattr, '__path__') + self.assertEqual(len(self.pack.getattr('__path__')), 1) + self.assertIsInstance(self.pack.getattr('__path__')[0], nodes.List) + + def test_dict_interface(self): + _test_dict_interface(self, self.module, 'YO') + + def test_getattr(self): + yo = self.module.getattr('YO')[0] + self.assertIsInstance(yo, nodes.ClassDef) + self.assertEqual(yo.name, 'YO') + red = next(self.module.igetattr('redirect')) + self.assertIsInstance(red, nodes.FunctionDef) + self.assertEqual(red.name, 'four_args') + namenode = next(self.module.igetattr('NameNode')) + self.assertIsInstance(namenode, nodes.ClassDef) + self.assertEqual(namenode.name, 'Name') + # resolve packageredirection + mod = resources.build_file('data/appl/myConnection.py', + 'data.appl.myConnection') + ssl = next(mod.igetattr('SSL1')) + cnx = next(ssl.igetattr('Connection')) + self.assertEqual(cnx.__class__, nodes.ClassDef) + self.assertEqual(cnx.name, 'Connection') + self.assertEqual(cnx.root().name, 'data.SSL1.Connection1') + self.assertEqual(len(self.nonregr.getattr('enumerate')), 2) + # raise ResolveError + self.assertRaises(InferenceError, self.nonregr.igetattr, 'YOAA') + + def test_wildcard_import_names(self): + m = resources.build_file('data/all.py', 'all') + self.assertEqual(m.wildcard_import_names(), ['Aaa', '_bla', 'name']) + m = resources.build_file('data/notall.py', 'notall') + res = sorted(m.wildcard_import_names()) + self.assertEqual(res, ['Aaa', 'func', 'name', 'other']) + + def test_public_names(self): + m = builder.parse(''' + name = 'a' + _bla = 2 + other = 'o' + class Aaa: pass + def func(): print('yo') + __all__ = 'Aaa', '_bla', 'name' + ''') + values = sorted(['Aaa', 'name', 'other', 'func']) + self.assertEqual(sorted(m._public_names()), values) + m = builder.parse(''' + name = 'a' + _bla = 2 + other = 'o' + class Aaa: pass + + def func(): return 'yo' + ''') + res = sorted(m._public_names()) + self.assertEqual(res, values) + + m = builder.parse(''' + from missing import tzop + trop = "test" + __all__ = (trop, "test1", tzop, 42) + ''') + res = sorted(m._public_names()) + self.assertEqual(res, ["trop", "tzop"]) + + m = builder.parse(''' + test = tzop = 42 + __all__ = ('test', ) + ('tzop', ) + ''') + res = sorted(m._public_names()) + self.assertEqual(res, ['test', 'tzop']) + + def test_module_getattr(self): + data = ''' + appli = application + appli += 2 + del appli + ''' + astroid = builder.parse(data, __name__) + # test del statement not returned by getattr + self.assertEqual(len(astroid.getattr('appli')), 2, + astroid.getattr('appli')) + + def test_relative_to_absolute_name(self): + # package + mod = nodes.Module('very.multi.package', 'doc') + mod.package = True + modname = mod.relative_to_absolute_name('utils', 1) + self.assertEqual(modname, 'very.multi.package.utils') + modname = mod.relative_to_absolute_name('utils', 2) + self.assertEqual(modname, 'very.multi.utils') + modname = mod.relative_to_absolute_name('utils', 0) + self.assertEqual(modname, 'very.multi.package.utils') + modname = mod.relative_to_absolute_name('', 1) + self.assertEqual(modname, 'very.multi.package') + # non package + mod = nodes.Module('very.multi.module', 'doc') + mod.package = False + modname = mod.relative_to_absolute_name('utils', 0) + self.assertEqual(modname, 'very.multi.utils') + modname = mod.relative_to_absolute_name('utils', 1) + self.assertEqual(modname, 'very.multi.utils') + modname = mod.relative_to_absolute_name('utils', 2) + self.assertEqual(modname, 'very.utils') + modname = mod.relative_to_absolute_name('', 1) + self.assertEqual(modname, 'very.multi') + + def test_import_1(self): + data = '''from . import subpackage''' + sys.path.insert(0, resources.find('data')) + astroid = builder.parse(data, 'package', 'data/package/__init__.py') + try: + m = astroid.import_module('', level=1) + self.assertEqual(m.name, 'package') + inferred = list(astroid.igetattr('subpackage')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0].name, 'package.subpackage') + finally: + del sys.path[0] + + + def test_import_2(self): + data = '''from . import subpackage as pouet''' + astroid = builder.parse(data, 'package', 'data/package/__init__.py') + sys.path.insert(0, resources.find('data')) + try: + m = astroid.import_module('', level=1) + self.assertEqual(m.name, 'package') + inferred = list(astroid.igetattr('pouet')) + self.assertEqual(len(inferred), 1) + self.assertEqual(inferred[0].name, 'package.subpackage') + finally: + del sys.path[0] + + + def test_file_stream_in_memory(self): + data = '''irrelevant_variable is irrelevant''' + astroid = builder.parse(data, 'in_memory') + with warnings.catch_warnings(record=True): + self.assertEqual(astroid.file_stream.read().decode(), data) + + def test_file_stream_physical(self): + path = resources.find('data/all.py') + astroid = builder.AstroidBuilder().file_build(path, 'all') + with open(path, 'rb') as file_io: + with warnings.catch_warnings(record=True): + self.assertEqual(astroid.file_stream.read(), file_io.read()) + + def test_file_stream_api(self): + path = resources.find('data/all.py') + astroid = builder.AstroidBuilder().file_build(path, 'all') + if __pkginfo__.numversion >= (1, 6): + # file_stream is slated for removal in astroid 1.6. + with self.assertRaises(AttributeError): + # pylint: disable=pointless-statement + astroid.file_stream + else: + # Until astroid 1.6, Module.file_stream will emit + # PendingDeprecationWarning in 1.4, DeprecationWarning + # in 1.5 and finally it will be removed in 1.6, leaving + # only Module.stream as the recommended way to retrieve + # its file stream. + with warnings.catch_warnings(record=True) as cm: + warnings.simplefilter("always") + self.assertIsNot(astroid.file_stream, astroid.file_stream) + self.assertGreater(len(cm), 1) + self.assertEqual(cm[0].category, PendingDeprecationWarning) + + def test_stream_api(self): + path = resources.find('data/all.py') + astroid = builder.AstroidBuilder().file_build(path, 'all') + stream = astroid.stream() + self.assertTrue(hasattr(stream, 'close')) + with stream: + with open(path, 'rb') as file_io: + self.assertEqual(stream.read(), file_io.read()) + + +class FunctionNodeTest(ModuleLoader, unittest.TestCase): + + def test_special_attributes(self): + func = self.module2['make_class'] + self.assertEqual(len(func.getattr('__name__')), 1) + self.assertIsInstance(func.getattr('__name__')[0], nodes.Const) + self.assertEqual(func.getattr('__name__')[0].value, 'make_class') + self.assertEqual(len(func.getattr('__doc__')), 1) + self.assertIsInstance(func.getattr('__doc__')[0], nodes.Const) + self.assertEqual(func.getattr('__doc__')[0].value, 'check base is correctly resolved to Concrete0') + self.assertEqual(len(self.module.getattr('__dict__')), 1) + self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) + + def test_dict_interface(self): + _test_dict_interface(self, self.module['global_access'], 'local') + + def test_default_value(self): + func = self.module2['make_class'] + self.assertIsInstance(func.args.default_value('base'), nodes.Attribute) + self.assertRaises(NoDefault, func.args.default_value, 'args') + self.assertRaises(NoDefault, func.args.default_value, 'kwargs') + self.assertRaises(NoDefault, func.args.default_value, 'any') + #self.assertIsInstance(func.mularg_class('args'), nodes.Tuple) + #self.assertIsInstance(func.mularg_class('kwargs'), nodes.Dict) + #self.assertIsNone(func.mularg_class('base')) + + def test_navigation(self): + function = self.module['global_access'] + self.assertEqual(function.statement(), function) + l_sibling = function.previous_sibling() + # check taking parent if child is not a stmt + self.assertIsInstance(l_sibling, nodes.Assign) + child = function.args.args[0] + self.assertIs(l_sibling, child.previous_sibling()) + r_sibling = function.next_sibling() + self.assertIsInstance(r_sibling, nodes.ClassDef) + self.assertEqual(r_sibling.name, 'YO') + self.assertIs(r_sibling, child.next_sibling()) + last = r_sibling.next_sibling().next_sibling().next_sibling() + self.assertIsInstance(last, nodes.Assign) + self.assertIsNone(last.next_sibling()) + first = l_sibling.root().body[0] + self.assertIsNone(first.previous_sibling()) + + def test_nested_args(self): + if sys.version_info >= (3, 0): + self.skipTest("nested args has been removed in py3.x") + code = ''' + def nested_args(a, (b, c, d)): + "nested arguments test" + ''' + tree = builder.parse(code) + func = tree['nested_args'] + self.assertEqual(sorted(func._locals), ['a', 'b', 'c', 'd']) + self.assertEqual(func.args.format_args(), 'a, (b, c, d)') + + def test_four_args(self): + func = self.module['four_args'] + #self.assertEqual(func.args.args, ['a', ('b', 'c', 'd')]) + local = sorted(func.keys()) + self.assertEqual(local, ['a', 'b', 'c', 'd']) + self.assertEqual(func.type, 'function') + + def test_format_args(self): + func = self.module2['make_class'] + self.assertEqual(func.args.format_args(), + 'any, base=data.module.YO, *args, **kwargs') + func = self.module['four_args'] + self.assertEqual(func.args.format_args(), 'a, b, c, d') + + def test_is_generator(self): + self.assertTrue(self.module2['generator'].is_generator()) + self.assertFalse(self.module2['not_a_generator'].is_generator()) + self.assertFalse(self.module2['make_class'].is_generator()) + + def test_is_abstract(self): + method = self.module2['AbstractClass']['to_override'] + self.assertTrue(method.is_abstract(pass_is_abstract=False)) + self.assertEqual(method.qname(), 'data.module2.AbstractClass.to_override') + self.assertEqual(method.pytype(), '%s.instancemethod' % BUILTINS) + method = self.module2['AbstractClass']['return_something'] + self.assertFalse(method.is_abstract(pass_is_abstract=False)) + # non regression : test raise "string" doesn't cause an exception in is_abstract + func = self.module2['raise_string'] + self.assertFalse(func.is_abstract(pass_is_abstract=False)) + + def test_is_abstract_decorated(self): + methods = test_utils.extract_node(""" + import abc + + class Klass(object): + @abc.abstractproperty + def prop(self): #@ + pass + + @abc.abstractmethod + def method1(self): #@ + pass + + some_other_decorator = lambda x: x + @some_other_decorator + def method2(self): #@ + pass + """) + self.assertTrue(methods[0].is_abstract(pass_is_abstract=False)) + self.assertTrue(methods[1].is_abstract(pass_is_abstract=False)) + self.assertFalse(methods[2].is_abstract(pass_is_abstract=False)) + +## def test_raises(self): +## method = self.module2['AbstractClass']['to_override'] +## self.assertEqual([str(term) for term in method.raises()], +## ["Call(Name('NotImplementedError'), [], None, None)"] ) + +## def test_returns(self): +## method = self.module2['AbstractClass']['return_something'] +## # use string comp since Node doesn't handle __cmp__ +## self.assertEqual([str(term) for term in method.returns()], +## ["Const('toto')", "Const(None)"]) + + def test_lambda_pytype(self): + data = ''' + def f(): + g = lambda: None + ''' + astroid = builder.parse(data) + g = list(astroid['f'].ilookup('g'))[0] + self.assertEqual(g.pytype(), '%s.function' % BUILTINS) + + def test_lambda_qname(self): + astroid = builder.parse('lmbd = lambda: None', __name__) + self.assertEqual('%s.' % __name__, astroid['lmbd'].parent.value.qname()) + + def test_is_method(self): + data = ''' + class A: + def meth1(self): + return 1 + @classmethod + def meth2(cls): + return 2 + @staticmethod + def meth3(): + return 3 + + def function(): + return 0 + + @staticmethod + def sfunction(): + return -1 + ''' + astroid = builder.parse(data) + self.assertTrue(astroid['A']['meth1'].is_method()) + self.assertTrue(astroid['A']['meth2'].is_method()) + self.assertTrue(astroid['A']['meth3'].is_method()) + self.assertFalse(astroid['function'].is_method()) + self.assertFalse(astroid['sfunction'].is_method()) + + def test_argnames(self): + if sys.version_info < (3, 0): + code = 'def f(a, (b, c), *args, **kwargs): pass' + else: + code = 'def f(a, b, c, *args, **kwargs): pass' + astroid = builder.parse(code, __name__) + self.assertEqual(astroid['f'].argnames(), ['a', 'b', 'c', 'args', 'kwargs']) + + def test_return_nothing(self): + """test inferred value on a function with empty return""" + data = ''' + def func(): + return + + a = func() + ''' + astroid = builder.parse(data) + call = astroid.body[1].value + func_vals = call.inferred() + self.assertEqual(len(func_vals), 1) + self.assertIsInstance(func_vals[0], nodes.Const) + self.assertIsNone(func_vals[0].value) + + def test_func_instance_attr(self): + """test instance attributes for functions""" + data = """ + def test(): + print(test.bar) + + test.bar = 1 + test() + """ + astroid = builder.parse(data, 'mod') + func = astroid.body[2].value.func.inferred()[0] + self.assertIsInstance(func, nodes.FunctionDef) + self.assertEqual(func.name, 'test') + one = func.getattr('bar')[0].inferred()[0] + self.assertIsInstance(one, nodes.Const) + self.assertEqual(one.value, 1) + + def test_type_builtin_descriptor_subclasses(self): + astroid = builder.parse(""" + class classonlymethod(classmethod): + pass + class staticonlymethod(staticmethod): + pass + + class Node: + @classonlymethod + def clsmethod_subclass(cls): + pass + @classmethod + def clsmethod(cls): + pass + @staticonlymethod + def staticmethod_subclass(cls): + pass + @staticmethod + def stcmethod(cls): + pass + """) + node = astroid._locals['Node'][0] + self.assertEqual(node._locals['clsmethod_subclass'][0].type, + 'classmethod') + self.assertEqual(node._locals['clsmethod'][0].type, + 'classmethod') + self.assertEqual(node._locals['staticmethod_subclass'][0].type, + 'staticmethod') + self.assertEqual(node._locals['stcmethod'][0].type, + 'staticmethod') + + def test_decorator_builtin_descriptors(self): + astroid = builder.parse(""" + def static_decorator(platform=None, order=50): + def wrapper(f): + f.cgm_module = True + f.cgm_module_order = order + f.cgm_module_platform = platform + return staticmethod(f) + return wrapper + + def long_classmethod_decorator(platform=None, order=50): + def wrapper(f): + def wrapper2(f): + def wrapper3(f): + f.cgm_module = True + f.cgm_module_order = order + f.cgm_module_platform = platform + return classmethod(f) + return wrapper3(f) + return wrapper2(f) + return wrapper + + def classmethod_decorator(platform=None): + def wrapper(f): + f.platform = platform + return classmethod(f) + return wrapper + + def classmethod_wrapper(fn): + def wrapper(cls, *args, **kwargs): + result = fn(cls, *args, **kwargs) + return result + + return classmethod(wrapper) + + def staticmethod_wrapper(fn): + def wrapper(*args, **kwargs): + return fn(*args, **kwargs) + return staticmethod(wrapper) + + class SomeClass(object): + @static_decorator() + def static(node, cfg): + pass + @classmethod_decorator() + def classmethod(cls): + pass + @static_decorator + def not_so_static(node): + pass + @classmethod_decorator + def not_so_classmethod(node): + pass + @classmethod_wrapper + def classmethod_wrapped(cls): + pass + @staticmethod_wrapper + def staticmethod_wrapped(): + pass + @long_classmethod_decorator() + def long_classmethod(cls): + pass + """) + node = astroid._locals['SomeClass'][0] + self.assertEqual(node._locals['static'][0].type, + 'staticmethod') + self.assertEqual(node._locals['classmethod'][0].type, + 'classmethod') + self.assertEqual(node._locals['not_so_static'][0].type, + 'method') + self.assertEqual(node._locals['not_so_classmethod'][0].type, + 'method') + self.assertEqual(node._locals['classmethod_wrapped'][0].type, + 'classmethod') + self.assertEqual(node._locals['staticmethod_wrapped'][0].type, + 'staticmethod') + self.assertEqual(node._locals['long_classmethod'][0].type, + 'classmethod') + + def test_igetattr(self): + func = test_utils.extract_node(''' + def test(): + pass + ''') + func._instance_attrs['value'] = [nodes.Const(42)] + value = func.getattr('value') + self.assertEqual(len(value), 1) + self.assertIsInstance(value[0], nodes.Const) + self.assertEqual(value[0].value, 42) + inferred = next(func.igetattr('value')) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + @test_utils.require_version(minver='3.0') + def test_return_annotation_is_not_the_last(self): + func = builder.parse(''' + def test() -> bytes: + pass + pass + return + ''').body[0] + last_child = func.last_child() + self.assertIsInstance(last_child, nodes.Return) + self.assertEqual(func.tolineno, 5) + + +class ClassNodeTest(ModuleLoader, unittest.TestCase): + + def test_dict_interface(self): + _test_dict_interface(self, self.module['YOUPI'], 'method') + + def test_cls_special_attributes_1(self): + cls = self.module['YO'] + self.assertEqual(len(cls.getattr('__bases__')), 1) + self.assertEqual(len(cls.getattr('__name__')), 1) + self.assertIsInstance(cls.getattr('__name__')[0], nodes.Const) + self.assertEqual(cls.getattr('__name__')[0].value, 'YO') + self.assertEqual(len(cls.getattr('__doc__')), 1) + self.assertIsInstance(cls.getattr('__doc__')[0], nodes.Const) + self.assertEqual(cls.getattr('__doc__')[0].value, 'hehe') + self.assertEqual(len(cls.getattr('__module__')), 1) + self.assertIsInstance(cls.getattr('__module__')[0], nodes.Const) + self.assertEqual(cls.getattr('__module__')[0].value, 'data.module') + self.assertEqual(len(cls.getattr('__dict__')), 1) + if not cls.newstyle: + self.assertRaises(NotFoundError, cls.getattr, '__mro__') + for cls in (nodes.List._proxied, nodes.Const(1)._proxied): + self.assertEqual(len(cls.getattr('__bases__')), 1) + self.assertEqual(len(cls.getattr('__name__')), 1) + self.assertEqual(len(cls.getattr('__doc__')), 1, (cls, cls.getattr('__doc__'))) + self.assertEqual(cls.getattr('__doc__')[0].value, cls.doc) + self.assertEqual(len(cls.getattr('__module__')), 1) + self.assertEqual(len(cls.getattr('__dict__')), 1) + self.assertEqual(len(cls.getattr('__mro__')), 1) + + def test__mro__attribute(self): + node = test_utils.extract_node(''' + class A(object): pass + class B(object): pass + class C(A, B): pass + ''') + mro = node.getattr('__mro__')[0] + self.assertIsInstance(mro, nodes.Tuple) + self.assertEqual(mro.elts, node.mro()) + + def test__bases__attribute(self): + node = test_utils.extract_node(''' + class A(object): pass + class B(object): pass + class C(A, B): pass + class D(C): pass + ''') + bases = node.getattr('__bases__')[0] + self.assertIsInstance(bases, nodes.Tuple) + self.assertEqual(len(bases.elts), 1) + self.assertIsInstance(bases.elts[0], nodes.ClassDef) + self.assertEqual(bases.elts[0].name, 'C') + + def test_cls_special_attributes_2(self): + astroid = builder.parse(''' + class A: pass + class B: pass + + A.__bases__ += (B,) + ''', __name__) + self.assertEqual(len(astroid['A'].getattr('__bases__')), 2) + self.assertIsInstance(astroid['A'].getattr('__bases__')[0], nodes.Tuple) + self.assertIsInstance(astroid['A'].getattr('__bases__')[1], nodes.AssignAttr) + + def test_instance_special_attributes(self): + for inst in (Instance(self.module['YO']), nodes.List(), nodes.Const(1)): + self.assertRaises(NotFoundError, inst.getattr, '__mro__') + self.assertRaises(NotFoundError, inst.getattr, '__bases__') + self.assertRaises(NotFoundError, inst.getattr, '__name__') + self.assertEqual(len(inst.getattr('__dict__')), 1) + self.assertEqual(len(inst.getattr('__doc__')), 1) + + def test_navigation(self): + klass = self.module['YO'] + self.assertEqual(klass.statement(), klass) + l_sibling = klass.previous_sibling() + self.assertTrue(isinstance(l_sibling, nodes.FunctionDef), l_sibling) + self.assertEqual(l_sibling.name, 'global_access') + r_sibling = klass.next_sibling() + self.assertIsInstance(r_sibling, nodes.ClassDef) + self.assertEqual(r_sibling.name, 'YOUPI') + + def test_local_attr_ancestors(self): + module = builder.parse(''' + class A(): + def __init__(self): pass + class B(A): pass + class C(B): pass + class D(object): pass + class F(): pass + class E(F, D): pass + ''') + # Test old-style (Python 2) / new-style (Python 3+) ancestors lookups + klass2 = module['C'] + it = klass2.local_attr_ancestors('__init__') + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'A') + if sys.version_info[0] == 2: + self.assertRaises(StopIteration, partial(next, it)) + else: + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'object') + self.assertRaises(StopIteration, partial(next, it)) + + it = klass2.local_attr_ancestors('method') + self.assertRaises(StopIteration, partial(next, it)) + + # Test mixed-style ancestor lookups + klass2 = module['E'] + it = klass2.local_attr_ancestors('__init__') + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'object') + self.assertRaises(StopIteration, partial(next, it)) + + def test_local_attr_mro(self): + module = builder.parse(''' + class A(object): + def __init__(self): pass + class B(A): + def __init__(self, arg, arg2): pass + class C(A): pass + class D(C, B): pass + ''') + dclass = module['D'] + init = dclass.local_attr('__init__')[0] + self.assertIsInstance(init, nodes.FunctionDef) + self.assertEqual(init.parent.name, 'B') + + cclass = module['C'] + init = cclass.local_attr('__init__')[0] + self.assertIsInstance(init, nodes.FunctionDef) + self.assertEqual(init.parent.name, 'A') + + ancestors = list(dclass.local_attr_ancestors('__init__')) + self.assertEqual([node.name for node in ancestors], ['B', 'A', 'object']) + + def test_instance_attr_ancestors(self): + klass2 = self.module['YOUPI'] + it = klass2.instance_attr_ancestors('yo') + anc_klass = next(it) + self.assertIsInstance(anc_klass, nodes.ClassDef) + self.assertEqual(anc_klass.name, 'YO') + self.assertRaises(StopIteration, partial(next, it)) + klass2 = self.module['YOUPI'] + it = klass2.instance_attr_ancestors('member') + self.assertRaises(StopIteration, partial(next, it)) + + def test_methods(self): + expected_methods = {'__init__', 'class_method', 'method', 'static_method'} + klass2 = self.module['YOUPI'] + methods = {m.name for m in klass2.methods()} + self.assertTrue( + methods.issuperset(expected_methods)) + methods = {m.name for m in klass2.mymethods()} + self.assertSetEqual(expected_methods, methods) + klass2 = self.module2['Specialization'] + methods = {m.name for m in klass2.mymethods()} + self.assertSetEqual(set([]), methods) + method_locals = klass2.local_attr('method') + self.assertEqual(len(method_locals), 1) + self.assertEqual(method_locals[0].name, 'method') + self.assertRaises(NotFoundError, klass2.local_attr, 'nonexistant') + methods = {m.name for m in klass2.methods()} + self.assertTrue(methods.issuperset(expected_methods)) + + #def test_rhs(self): + # my_dict = self.module['MY_DICT'] + # self.assertIsInstance(my_dict.rhs(), nodes.Dict) + # a = self.module['YO']['a'] + # value = a.rhs() + # self.assertIsInstance(value, nodes.Const) + # self.assertEqual(value.value, 1) + + @unittest.skipIf(sys.version_info[0] >= 3, "Python 2 class semantics required.") + def test_ancestors(self): + klass = self.module['YOUPI'] + self.assertEqual(['YO'], [a.name for a in klass.ancestors()]) + klass = self.module2['Specialization'] + self.assertEqual(['YOUPI', 'YO'], [a.name for a in klass.ancestors()]) + + @unittest.skipIf(sys.version_info[0] < 3, "Python 3 class semantics required.") + def test_ancestors_py3(self): + klass = self.module['YOUPI'] + self.assertEqual(['YO', 'object'], [a.name for a in klass.ancestors()]) + klass = self.module2['Specialization'] + self.assertEqual(['YOUPI', 'YO', 'object'], [a.name for a in klass.ancestors()]) + + def test_type(self): + klass = self.module['YOUPI'] + self.assertEqual(klass.type, 'class') + klass = self.module2['Metaclass'] + self.assertEqual(klass.type, 'metaclass') + klass = self.module2['MyException'] + self.assertEqual(klass.type, 'exception') + klass = self.module2['MyError'] + self.assertEqual(klass.type, 'exception') + # the following class used to be detected as a metaclass + # after the fix which used instance._proxied in .ancestors(), + # when in fact it is a normal class + klass = self.module2['NotMetaclass'] + self.assertEqual(klass.type, 'class') + + def test_inner_classes(self): + eee = self.nonregr['Ccc']['Eee'] + self.assertEqual([n.name for n in eee.ancestors()], ['Ddd', 'Aaa', 'object']) + + + def test_classmethod_attributes(self): + data = ''' + class WebAppObject(object): + def registered(cls, application): + cls.appli = application + cls.schema = application.schema + cls.config = application.config + return cls + registered = classmethod(registered) + ''' + astroid = builder.parse(data, __name__) + cls = astroid['WebAppObject'] + self.assertEqual(sorted(cls._locals.keys()), + ['appli', 'config', 'registered', 'schema']) + + def test_class_getattr(self): + data = ''' + class WebAppObject(object): + appli = application + appli += 2 + del self.appli + ''' + astroid = builder.parse(data, __name__) + cls = astroid['WebAppObject'] + # test del statement not returned by getattr + self.assertEqual(len(cls.getattr('appli')), 2) + + + def test_instance_getattr(self): + data = ''' + class WebAppObject(object): + def __init__(self, application): + self.appli = application + self.appli += 2 + del self.appli + ''' + astroid = builder.parse(data) + inst = Instance(astroid['WebAppObject']) + # test del statement not returned by getattr + self.assertEqual(len(inst.getattr('appli')), 2) + + + def test_instance_getattr_with_class_attr(self): + data = ''' + class Parent: + aa = 1 + cc = 1 + + class Klass(Parent): + aa = 0 + bb = 0 + + def incr(self, val): + self.cc = self.aa + if val > self.aa: + val = self.aa + if val < self.bb: + val = self.bb + self.aa += val + ''' + astroid = builder.parse(data) + inst = Instance(astroid['Klass']) + self.assertEqual(len(inst.getattr('aa')), 3, inst.getattr('aa')) + self.assertEqual(len(inst.getattr('bb')), 1, inst.getattr('bb')) + self.assertEqual(len(inst.getattr('cc')), 2, inst.getattr('cc')) + + + def test_getattr_method_transform(self): + data = ''' + class Clazz(object): + + def m1(self, value): + self.value = value + m2 = m1 + + def func(arg1, arg2): + "function that will be used as a method" + return arg1.value + arg2 + + Clazz.m3 = func + inst = Clazz() + inst.m4 = func + ''' + astroid = builder.parse(data) + cls = astroid['Clazz'] + # test del statement not returned by getattr + for method in ('m1', 'm2', 'm3'): + inferred = list(cls.igetattr(method)) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], UnboundMethod) + inferred = list(Instance(cls).igetattr(method)) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], BoundMethod) + inferred = list(Instance(cls).igetattr('m4')) + self.assertEqual(len(inferred), 1) + self.assertIsInstance(inferred[0], nodes.FunctionDef) + + def test_getattr_from_grandpa(self): + data = ''' + class Future: + attr = 1 + + class Present(Future): + pass + + class Past(Present): + pass + ''' + astroid = builder.parse(data) + past = astroid['Past'] + attr = past.getattr('attr') + self.assertEqual(len(attr), 1) + attr1 = attr[0] + self.assertIsInstance(attr1, nodes.AssignName) + self.assertEqual(attr1.name, 'attr') + + def test_function_with_decorator_lineno(self): + data = ''' + @f(a=2, + b=3) + def g1(x): + print(x) + + @f(a=2, + b=3) + def g2(): + pass + ''' + astroid = builder.parse(data) + self.assertEqual(astroid['g1'].fromlineno, 4) + self.assertEqual(astroid['g1'].tolineno, 5) + self.assertEqual(astroid['g2'].fromlineno, 9) + self.assertEqual(astroid['g2'].tolineno, 10) + + @test_utils.require_version(maxver='3.0') + def test_simple_metaclass(self): + astroid = builder.parse(""" + class Test(object): + __metaclass__ = type + """) + klass = astroid['Test'] + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.name, 'type') + + def test_metaclass_error(self): + astroid = builder.parse(""" + class Test(object): + __metaclass__ = typ + """) + klass = astroid['Test'] + self.assertFalse(klass.metaclass()) + + @test_utils.require_version(maxver='3.0') + def test_metaclass_imported(self): + astroid = builder.parse(""" + from abc import ABCMeta + class Test(object): + __metaclass__ = ABCMeta + """) + klass = astroid['Test'] + + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + def test_metaclass_yes_leak(self): + astroid = builder.parse(""" + # notice `ab` instead of `abc` + from ab import ABCMeta + + class Meta(object): + __metaclass__ = ABCMeta + """) + klass = astroid['Meta'] + self.assertIsNone(klass.metaclass()) + + @test_utils.require_version(maxver='3.0') + def test_newstyle_and_metaclass_good(self): + astroid = builder.parse(""" + from abc import ABCMeta + class Test: + __metaclass__ = ABCMeta + """) + klass = astroid['Test'] + self.assertTrue(klass.newstyle) + self.assertEqual(klass.metaclass().name, 'ABCMeta') + astroid = builder.parse(""" + from abc import ABCMeta + __metaclass__ = ABCMeta + class Test: + pass + """) + klass = astroid['Test'] + self.assertTrue(klass.newstyle) + self.assertEqual(klass.metaclass().name, 'ABCMeta') + + @test_utils.require_version(maxver='3.0') + def test_nested_metaclass(self): + astroid = builder.parse(""" + from abc import ABCMeta + class A(object): + __metaclass__ = ABCMeta + class B: pass + + __metaclass__ = ABCMeta + class C: + __metaclass__ = type + class D: pass + """) + a = astroid['A'] + b = a._locals['B'][0] + c = astroid['C'] + d = c._locals['D'][0] + self.assertEqual(a.metaclass().name, 'ABCMeta') + self.assertFalse(b.newstyle) + self.assertIsNone(b.metaclass()) + self.assertEqual(c.metaclass().name, 'type') + self.assertEqual(d.metaclass().name, 'ABCMeta') + + @test_utils.require_version(maxver='3.0') + def test_parent_metaclass(self): + astroid = builder.parse(""" + from abc import ABCMeta + class Test: + __metaclass__ = ABCMeta + class SubTest(Test): pass + """) + klass = astroid['SubTest'] + self.assertTrue(klass.newstyle) + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.name, 'ABCMeta') + + @test_utils.require_version(maxver='3.0') + def test_metaclass_ancestors(self): + astroid = builder.parse(""" + from abc import ABCMeta + + class FirstMeta(object): + __metaclass__ = ABCMeta + + class SecondMeta(object): + __metaclass__ = type + + class Simple(object): + pass + + class FirstImpl(FirstMeta): pass + class SecondImpl(FirstImpl): pass + class ThirdImpl(Simple, SecondMeta): + pass + """) + classes = { + 'ABCMeta': ('FirstImpl', 'SecondImpl'), + 'type': ('ThirdImpl', ) + } + for metaclass, names in classes.items(): + for name in names: + impl = astroid[name] + meta = impl.metaclass() + self.assertIsInstance(meta, nodes.ClassDef) + self.assertEqual(meta.name, metaclass) + + def test_metaclass_type(self): + klass = test_utils.extract_node(""" + def with_metaclass(meta, base=object): + return meta("NewBase", (base, ), {}) + + class ClassWithMeta(with_metaclass(type)): #@ + pass + """) + self.assertEqual( + ['NewBase', 'object'], + [base.name for base in klass.ancestors()]) + + def test_no_infinite_metaclass_loop(self): + klass = test_utils.extract_node(""" + class SSS(object): + + class JJJ(object): + pass + + @classmethod + def Init(cls): + cls.JJJ = type('JJJ', (cls.JJJ,), {}) + + class AAA(SSS): + pass + + class BBB(AAA.JJJ): + pass + """) + self.assertFalse(scoped_nodes._is_metaclass(klass)) + ancestors = [base.name for base in klass.ancestors()] + self.assertIn('object', ancestors) + self.assertIn('JJJ', ancestors) + + def test_no_infinite_metaclass_loop_with_redefine(self): + nodes = test_utils.extract_node(""" + import datetime + + class A(datetime.date): #@ + @classmethod + def now(cls): + return cls() + + class B(datetime.date): #@ + pass + + datetime.date = A + datetime.date = B + """) + for klass in nodes: + self.assertEqual(None, klass.metaclass()) + + def test_metaclass_generator_hack(self): + klass = test_utils.extract_node(""" + import six + + class WithMeta(six.with_metaclass(type, object)): #@ + pass + """) + self.assertEqual( + ['object'], + [base.name for base in klass.ancestors()]) + self.assertEqual( + 'type', klass.metaclass().name) + + def test_using_six_add_metaclass(self): + klass = test_utils.extract_node(''' + import six + import abc + + @six.add_metaclass(abc.ABCMeta) + class WithMeta(object): + pass + ''') + inferred = next(klass.infer()) + metaclass = inferred.metaclass() + self.assertIsInstance(metaclass, scoped_nodes.ClassDef) + self.assertEqual(metaclass.qname(), 'abc.ABCMeta') + + def test_using_invalid_six_add_metaclass_call(self): + klass = test_utils.extract_node(''' + import six + @six.add_metaclass() + class Invalid(object): + pass + ''') + inferred = next(klass.infer()) + self.assertIsNone(inferred.metaclass()) + + def test_nonregr_infer_callresult(self): + astroid = builder.parse(""" + class Delegate(object): + def __get__(self, obj, cls): + return getattr(obj._subject, self.attribute) + + class CompositeBuilder(object): + __call__ = Delegate() + + builder = CompositeBuilder(result, composite) + tgts = builder() + """) + instance = astroid['tgts'] + # used to raise "'_Yes' object is not iterable", see + # https://bitbucket.org/logilab/astroid/issue/17 + self.assertEqual(list(instance.infer()), [util.YES]) + + def test_slots(self): + astroid = builder.parse(""" + from collections import deque + from textwrap import dedent + + class First(object): #@ + __slots__ = ("a", "b", 1) + class Second(object): #@ + __slots__ = "a" + class Third(object): #@ + __slots__ = deque(["a", "b", "c"]) + class Fourth(object): #@ + __slots__ = {"a": "a", "b": "b"} + class Fifth(object): #@ + __slots__ = list + class Sixth(object): #@ + __slots__ = "" + class Seventh(object): #@ + __slots__ = dedent.__name__ + class Eight(object): #@ + __slots__ = ("parens") + class Ninth(object): #@ + pass + class Ten(object): #@ + __slots__ = dict({"a": "b", "c": "d"}) + """) + expected = [ + ('First', ('a', 'b')), + ('Second', ('a', )), + ('Third', None), + ('Fourth', ('a', 'b')), + ('Fifth', None), + ('Sixth', None), + ('Seventh', ('dedent', )), + ('Eight', ('parens', )), + ('Ninth', None), + ('Ten', ('a', 'c')), + ] + for cls, expected_value in expected: + slots = astroid[cls].slots() + if expected_value is None: + self.assertIsNone(slots) + else: + self.assertEqual(list(expected_value), + [node.value for node in slots]) + + @test_utils.require_version(maxver='3.0') + def test_slots_py2(self): + module = builder.parse(""" + class UnicodeSlots(object): + __slots__ = (u"a", u"b", "c") + """) + slots = module['UnicodeSlots'].slots() + self.assertEqual(len(slots), 3) + self.assertEqual(slots[0].value, "a") + self.assertEqual(slots[1].value, "b") + self.assertEqual(slots[2].value, "c") + + @test_utils.require_version(maxver='3.0') + def test_slots_py2_not_implemented(self): + module = builder.parse(""" + class OldStyle: + __slots__ = ("a", "b") + """) + msg = "The concept of slots is undefined for old-style classes." + with self.assertRaises(NotImplementedError) as cm: + module['OldStyle'].slots() + self.assertEqual(str(cm.exception), msg) + + def test_slots_empty_list_of_slots(self): + module = builder.parse(""" + class Klass(object): + __slots__ = () + """) + cls = module['Klass'] + self.assertEqual(cls.slots(), []) + + def test_slots_taken_from_parents(self): + module = builder.parse(''' + class FirstParent(object): + __slots__ = ('a', 'b', 'c') + class SecondParent(FirstParent): + __slots__ = ('d', 'e') + class Third(SecondParent): + __slots__ = ('d', ) + ''') + cls = module['Third'] + slots = cls.slots() + self.assertEqual(sorted(set(slot.value for slot in slots)), + ['a', 'b', 'c', 'd', 'e']) + + def test_all_ancestors_need_slots(self): + module = builder.parse(''' + class A(object): + __slots__ = ('a', ) + class B(A): pass + class C(B): + __slots__ = ('a', ) + ''') + cls = module['C'] + self.assertIsNone(cls.slots()) + cls = module['B'] + self.assertIsNone(cls.slots()) + + def assertEqualMro(self, klass, expected_mro): + self.assertEqual( + [member.name for member in klass.mro()], + expected_mro) + + @test_utils.require_version(maxver='3.0') + def test_no_mro_for_old_style(self): + node = test_utils.extract_node(""" + class Old: pass""") + with self.assertRaises(NotImplementedError) as cm: + node.mro() + self.assertEqual(str(cm.exception), "Could not obtain mro for " + "old-style classes.") + + @test_utils.require_version(maxver='3.0') + def test_combined_newstyle_oldstyle_in_mro(self): + node = test_utils.extract_node(''' + class Old: + pass + class New(object): + pass + class New1(object): + pass + class New2(New, New1): + pass + class NewOld(New2, Old): #@ + pass + ''') + self.assertEqualMro(node, ['NewOld', 'New2', 'New', 'New1', 'object', 'Old']) + self.assertTrue(node.newstyle) + + def test_with_metaclass_mro(self): + astroid = builder.parse(""" + import six + + class C(object): + pass + class B(C): + pass + class A(six.with_metaclass(type, B)): + pass + """) + self.assertEqualMro(astroid['A'], ['A', 'B', 'C', 'object']) + + def test_mro(self): + astroid = builder.parse(""" + class C(object): pass + class D(dict, C): pass + + class A1(object): pass + class B1(A1): pass + class C1(A1): pass + class D1(B1, C1): pass + class E1(C1, B1): pass + class F1(D1, E1): pass + class G1(E1, D1): pass + + class Boat(object): pass + class DayBoat(Boat): pass + class WheelBoat(Boat): pass + class EngineLess(DayBoat): pass + class SmallMultihull(DayBoat): pass + class PedalWheelBoat(EngineLess, WheelBoat): pass + class SmallCatamaran(SmallMultihull): pass + class Pedalo(PedalWheelBoat, SmallCatamaran): pass + + class OuterA(object): + class Inner(object): + pass + class OuterB(OuterA): + class Inner(OuterA.Inner): + pass + class OuterC(OuterA): + class Inner(OuterA.Inner): + pass + class OuterD(OuterC): + class Inner(OuterC.Inner, OuterB.Inner): + pass + class Duplicates(str, str): pass + + """) + self.assertEqualMro(astroid['D'], ['D', 'dict', 'C', 'object']) + self.assertEqualMro(astroid['D1'], ['D1', 'B1', 'C1', 'A1', 'object']) + self.assertEqualMro(astroid['E1'], ['E1', 'C1', 'B1', 'A1', 'object']) + with self.assertRaises(InconsistentMroError) as cm: + astroid['F1'].mro() + self.assertEqual(str(cm.exception), + "Cannot create a consistent method resolution order " + "for bases (B1, C1, A1, object), " + "(C1, B1, A1, object)") + + with self.assertRaises(InconsistentMroError) as cm: + astroid['G1'].mro() + self.assertEqual(str(cm.exception), + "Cannot create a consistent method resolution order " + "for bases (C1, B1, A1, object), " + "(B1, C1, A1, object)") + + self.assertEqualMro( + astroid['PedalWheelBoat'], + ["PedalWheelBoat", "EngineLess", + "DayBoat", "WheelBoat", "Boat", "object"]) + + self.assertEqualMro( + astroid["SmallCatamaran"], + ["SmallCatamaran", "SmallMultihull", "DayBoat", "Boat", "object"]) + + self.assertEqualMro( + astroid["Pedalo"], + ["Pedalo", "PedalWheelBoat", "EngineLess", "SmallCatamaran", + "SmallMultihull", "DayBoat", "WheelBoat", "Boat", "object"]) + + self.assertEqualMro( + astroid['OuterD']['Inner'], + ['Inner', 'Inner', 'Inner', 'Inner', 'object']) + + with self.assertRaises(DuplicateBasesError) as cm: + astroid['Duplicates'].mro() + self.assertEqual(str(cm.exception), "Duplicates found in the mro.") + self.assertTrue(issubclass(cm.exception.__class__, MroError)) + self.assertTrue(issubclass(cm.exception.__class__, ResolveError)) + + def test_generator_from_infer_call_result_parent(self): + func = test_utils.extract_node(""" + import contextlib + + @contextlib.contextmanager + def test(): #@ + yield + """) + result = next(func.infer_call_result(func)) + self.assertIsInstance(result, Generator) + self.assertEqual(result.parent, func) + + def test_type_three_arguments(self): + classes = test_utils.extract_node(""" + type('A', (object, ), {"a": 1, "b": 2, missing: 3}) #@ + """) + first = next(classes.infer()) + self.assertIsInstance(first, nodes.ClassDef) + self.assertEqual(first.name, "A") + self.assertEqual(first.basenames, ["object"]) + self.assertIsInstance(first["a"], nodes.Const) + self.assertEqual(first["a"].value, 1) + self.assertIsInstance(first["b"], nodes.Const) + self.assertEqual(first["b"].value, 2) + with self.assertRaises(NotFoundError): + first.getattr("missing") + + def test_implicit_metaclass(self): + cls = test_utils.extract_node(""" + class A(object): + pass + """) + type_cls = scoped_nodes.builtin_lookup("type")[1][0] + self.assertEqual(cls.implicit_metaclass(), type_cls) + + @test_utils.require_version(maxver='3.0') + def test_implicit_metaclass_is_none(self): + cls = test_utils.extract_node(""" + class A: pass + """) + self.assertIsNone(cls.implicit_metaclass()) + + def test_local_attr_invalid_mro(self): + cls = test_utils.extract_node(""" + # A has an invalid MRO, local_attr should fallback + # to using .ancestors. + class A(object, object): + test = 42 + class B(A): #@ + pass + """) + local = cls.local_attr('test')[0] + inferred = next(local.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_has_dynamic_getattr(self): + module = builder.parse(""" + class Getattr(object): + def __getattr__(self, attrname): + pass + + class Getattribute(object): + def __getattribute__(self, attrname): + pass + + class ParentGetattr(Getattr): + pass + """) + self.assertTrue(module['Getattr'].has_dynamic_getattr()) + self.assertTrue(module['Getattribute'].has_dynamic_getattr()) + self.assertTrue(module['ParentGetattr'].has_dynamic_getattr()) + + # Test that objects analyzed through the live introspection + # aren't considered to have dynamic getattr implemented. + import datetime + astroid_builder = builder.AstroidBuilder() + module = astroid_builder.module_build(datetime) + self.assertFalse(module['timedelta'].has_dynamic_getattr()) + + def test_duplicate_bases_namedtuple(self): + module = builder.parse(""" + import collections + _A = collections.namedtuple('A', 'a') + + class A(_A): pass + + class B(A): pass + """) + self.assertRaises(DuplicateBasesError, module['B'].mro) + + def test_instance_bound_method_lambdas(self): + ast_nodes = test_utils.extract_node(''' + class Test(object): #@ + lam = lambda self: self + not_method = lambda xargs: xargs + Test() #@ + ''') + cls = next(ast_nodes[0].infer()) + self.assertIsInstance(next(cls.igetattr('lam')), scoped_nodes.Lambda) + self.assertIsInstance(next(cls.igetattr('not_method')), scoped_nodes.Lambda) + + instance = next(ast_nodes[1].infer()) + lam = next(instance.igetattr('lam')) + self.assertIsInstance(lam, BoundMethod) + not_method = next(instance.igetattr('not_method')) + self.assertIsInstance(not_method, scoped_nodes.Lambda) + + def test_class_extra_decorators_frame_is_not_class(self): + ast_node = test_utils.extract_node(''' + def ala(): + def bala(): #@ + func = 42 + ''') + self.assertEqual(ast_node.extra_decorators, []) + + def test_class_extra_decorators_only_callfunc_are_considered(self): + ast_node = test_utils.extract_node(''' + class Ala(object): + def func(self): #@ + pass + func = 42 + ''') + self.assertEqual(ast_node.extra_decorators, []) + + def test_class_extra_decorators_only_assignment_names_are_considered(self): + ast_node = test_utils.extract_node(''' + class Ala(object): + def func(self): #@ + pass + def __init__(self): + self.func = staticmethod(func) + + ''') + self.assertEqual(ast_node.extra_decorators, []) + + def test_class_extra_decorators_only_same_name_considered(self): + ast_node = test_utils.extract_node(''' + class Ala(object): + def func(self): #@ + pass + bala = staticmethod(func) + ''') + self.assertEqual(ast_node.extra_decorators, []) + self.assertEqual(ast_node.type, 'method') + + def test_class_extra_decorators(self): + static_method, clsmethod = test_utils.extract_node(''' + class Ala(object): + def static(self): #@ + pass + def class_method(self): #@ + pass + class_method = classmethod(class_method) + static = staticmethod(static) + ''') + self.assertEqual(len(clsmethod.extra_decorators), 1) + self.assertEqual(clsmethod.type, 'classmethod') + self.assertEqual(len(static_method.extra_decorators), 1) + self.assertEqual(static_method.type, 'staticmethod') + + def test_extra_decorators_only_class_level_assignments(self): + node = test_utils.extract_node(''' + def _bind(arg): + return arg.bind + + class A(object): + @property + def bind(self): + return 42 + def irelevant(self): + # This is important, because it used to trigger + # a maximum recursion error. + bind = _bind(self) + return bind + A() #@ + ''') + inferred = next(node.infer()) + bind = next(inferred.igetattr('bind')) + self.assertIsInstance(bind, nodes.Const) + self.assertEqual(bind.value, 42) + parent = bind.scope() + self.assertEqual(len(parent.extra_decorators), 0) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_transforms.py b/pymode/libs/astroid/tests/unittest_transforms.py new file mode 100644 index 00000000..1553bfc4 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_transforms.py @@ -0,0 +1,245 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +from __future__ import print_function + +import contextlib +import time +import unittest + +from astroid import builder +from astroid import nodes +from astroid import parse +from astroid import transforms + + +@contextlib.contextmanager +def add_transform(manager, node, transform, predicate=None): + manager.register_transform(node, transform, predicate) + try: + yield + finally: + manager.unregister_transform(node, transform, predicate) + + +class TestTransforms(unittest.TestCase): + + def setUp(self): + self.transformer = transforms.TransformVisitor() + + def parse_transform(self, code): + module = parse(code, apply_transforms=False) + return self.transformer.visit(module) + + def test_function_inlining_transform(self): + def transform_call(node): + # Let's do some function inlining + inferred = next(node.infer()) + return inferred + + self.transformer.register_transform(nodes.Call, + transform_call) + + module = self.parse_transform(''' + def test(): return 42 + test() #@ + ''') + + self.assertIsInstance(module.body[1], nodes.Expr) + self.assertIsInstance(module.body[1].value, nodes.Const) + self.assertEqual(module.body[1].value.value, 42) + + def test_recursive_transforms_into_astroid_fields(self): + # Test that the transformer walks properly the tree + # by going recursively into the _astroid_fields per each node. + def transform_compare(node): + # Let's check the values of the ops + _, right = node.ops[0] + # Assume they are Consts and they were transformed before + # us. + return nodes.const_factory(node.left.value < right.value) + + def transform_name(node): + # Should be Consts + return next(node.infer()) + + self.transformer.register_transform(nodes.Compare, transform_compare) + self.transformer.register_transform(nodes.Name, transform_name) + + module = self.parse_transform(''' + a = 42 + b = 24 + a < b + ''') + + self.assertIsInstance(module.body[2], nodes.Expr) + self.assertIsInstance(module.body[2].value, nodes.Const) + self.assertFalse(module.body[2].value.value) + + def test_transform_patches_locals(self): + def transform_function(node): + assign = nodes.Assign() + name = nodes.AssignName() + name.name = 'value' + assign.targets = [name] + assign.value = nodes.const_factory(42) + node.body.append(assign) + + self.transformer.register_transform(nodes.FunctionDef, + transform_function) + + module = self.parse_transform(''' + def test(): + pass + ''') + + func = module.body[0] + self.assertEqual(len(func.body), 2) + self.assertIsInstance(func.body[1], nodes.Assign) + self.assertEqual(func.body[1].as_string(), 'value = 42') + + def test_predicates(self): + def transform_call(node): + inferred = next(node.infer()) + return inferred + + def should_inline(node): + return node.func.name.startswith('inlineme') + + self.transformer.register_transform(nodes.Call, + transform_call, + should_inline) + + module = self.parse_transform(''' + def inlineme_1(): + return 24 + def dont_inline_me(): + return 42 + def inlineme_2(): + return 2 + inlineme_1() + dont_inline_me() + inlineme_2() + ''') + values = module.body[-3:] + self.assertIsInstance(values[0], nodes.Expr) + self.assertIsInstance(values[0].value, nodes.Const) + self.assertEqual(values[0].value.value, 24) + self.assertIsInstance(values[1], nodes.Expr) + self.assertIsInstance(values[1].value, nodes.Call) + self.assertIsInstance(values[2], nodes.Expr) + self.assertIsInstance(values[2].value, nodes.Const) + self.assertEqual(values[2].value.value, 2) + + def test_transforms_are_separated(self): + # Test that the transforming is done at a separate + # step, which means that we are not doing inference + # on a partially constructred tree anymore, which was the + # source of crashes in the past when certain inference rules + # were used in a transform. + def transform_function(node): + if node.decorators: + for decorator in node.decorators.nodes: + inferred = next(decorator.infer()) + if inferred.qname() == 'abc.abstractmethod': + return next(node.infer_call_result(node)) + + manager = builder.MANAGER + with add_transform(manager, nodes.FunctionDef, transform_function): + module = builder.parse(''' + import abc + from abc import abstractmethod + + class A(object): + @abc.abstractmethod + def ala(self): + return 24 + + @abstractmethod + def bala(self): + return 42 + ''') + + cls = module['A'] + ala = cls.body[0] + bala = cls.body[1] + self.assertIsInstance(ala, nodes.Const) + self.assertEqual(ala.value, 24) + self.assertIsInstance(bala, nodes.Const) + self.assertEqual(bala.value, 42) + + def test_transforms_are_called_for_builtin_modules(self): + # Test that transforms are called for builtin modules. + def transform_function(node): + name = nodes.AssignName() + name.name = 'value' + node.args.args = [name] + return node + + manager = builder.MANAGER + predicate = lambda node: node.root().name == 'time' + with add_transform(manager, nodes.FunctionDef, + transform_function, predicate): + builder_instance = builder.AstroidBuilder() + module = builder_instance.module_build(time) + + asctime = module['asctime'] + self.assertEqual(len(asctime.args.args), 1) + self.assertIsInstance(asctime.args.args[0], nodes.AssignName) + self.assertEqual(asctime.args.args[0].name, 'value') + + def test_builder_apply_transforms(self): + def transform_function(node): + return nodes.const_factory(42) + + manager = builder.MANAGER + with add_transform(manager, nodes.FunctionDef, transform_function): + astroid_builder = builder.AstroidBuilder(apply_transforms=False) + module = astroid_builder.string_build('''def test(): pass''') + + # The transform wasn't applied. + self.assertIsInstance(module.body[0], nodes.FunctionDef) + + def test_transform_crashes_on_is_subtype_of(self): + # Test that we don't crash when having is_subtype_of + # in a transform, as per issue #188. This happened + # before, when the transforms weren't in their own step. + def transform_class(cls): + if cls.is_subtype_of('django.db.models.base.Model'): + return cls + return cls + + self.transformer.register_transform(nodes.ClassDef, + transform_class) + + self.parse_transform(''' + # Change environ to automatically call putenv() if it exists + import os + putenv = os.putenv + try: + # This will fail if there's no putenv + putenv + except NameError: + pass + else: + import UserDict + ''') + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/tests/unittest_utils.py b/pymode/libs/astroid/tests/unittest_utils.py new file mode 100644 index 00000000..ef832252 --- /dev/null +++ b/pymode/libs/astroid/tests/unittest_utils.py @@ -0,0 +1,124 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +import unittest + +from astroid import builder +from astroid import InferenceError +from astroid import nodes +from astroid import node_classes +from astroid import test_utils +from astroid import util as astroid_util + + +class InferenceUtil(unittest.TestCase): + + def test_not_exclusive(self): + module = builder.parse(""" + x = 10 + for x in range(5): + print (x) + + if x > 0: + print ('#' * x) + """, __name__, __file__) + xass1 = module.locals['x'][0] + assert xass1.lineno == 2 + xnames = [n for n in module.nodes_of_class(nodes.Name) if n.name == 'x'] + assert len(xnames) == 3 + assert xnames[1].lineno == 6 + self.assertEqual(node_classes.are_exclusive(xass1, xnames[1]), False) + self.assertEqual(node_classes.are_exclusive(xass1, xnames[2]), False) + + def test_if(self): + module = builder.parse(''' + if 1: + a = 1 + a = 2 + elif 2: + a = 12 + a = 13 + else: + a = 3 + a = 4 + ''') + a1 = module.locals['a'][0] + a2 = module.locals['a'][1] + a3 = module.locals['a'][2] + a4 = module.locals['a'][3] + a5 = module.locals['a'][4] + a6 = module.locals['a'][5] + self.assertEqual(node_classes.are_exclusive(a1, a2), False) + self.assertEqual(node_classes.are_exclusive(a1, a3), True) + self.assertEqual(node_classes.are_exclusive(a1, a5), True) + self.assertEqual(node_classes.are_exclusive(a3, a5), True) + self.assertEqual(node_classes.are_exclusive(a3, a4), False) + self.assertEqual(node_classes.are_exclusive(a5, a6), False) + + def test_try_except(self): + module = builder.parse(''' + try: + def exclusive_func2(): + "docstring" + except TypeError: + def exclusive_func2(): + "docstring" + except: + def exclusive_func2(): + "docstring" + else: + def exclusive_func2(): + "this one redefine the one defined line 42" + ''') + f1 = module.locals['exclusive_func2'][0] + f2 = module.locals['exclusive_func2'][1] + f3 = module.locals['exclusive_func2'][2] + f4 = module.locals['exclusive_func2'][3] + self.assertEqual(node_classes.are_exclusive(f1, f2), True) + self.assertEqual(node_classes.are_exclusive(f1, f3), True) + self.assertEqual(node_classes.are_exclusive(f1, f4), False) + self.assertEqual(node_classes.are_exclusive(f2, f4), True) + self.assertEqual(node_classes.are_exclusive(f3, f4), True) + self.assertEqual(node_classes.are_exclusive(f3, f2), True) + + self.assertEqual(node_classes.are_exclusive(f2, f1), True) + self.assertEqual(node_classes.are_exclusive(f4, f1), False) + self.assertEqual(node_classes.are_exclusive(f4, f2), True) + + def test_unpack_infer_uninferable_nodes(self): + node = test_utils.extract_node(''' + x = [A] * 1 + f = [x, [A] * 2] + f + ''') + inferred = next(node.infer()) + unpacked = list(node_classes.unpack_infer(inferred)) + self.assertEqual(len(unpacked), 3) + self.assertTrue(all(elt is astroid_util.YES + for elt in unpacked)) + + def test_unpack_infer_empty_tuple(self): + node = test_utils.extract_node(''' + () + ''') + inferred = next(node.infer()) + with self.assertRaises(InferenceError): + list(node_classes.unpack_infer(inferred)) + + +if __name__ == '__main__': + unittest.main() diff --git a/pymode/libs/astroid/transforms.py b/pymode/libs/astroid/transforms.py new file mode 100644 index 00000000..5d8fc91b --- /dev/null +++ b/pymode/libs/astroid/transforms.py @@ -0,0 +1,96 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . + +import collections +import warnings + + +class TransformVisitor(object): + """A visitor for handling transforms. + + The standard approach of using it is to call + :meth:`~visit` with an *astroid* module and the class + will take care of the rest, walking the tree and running the + transforms for each encountered node. + """ + + def __init__(self): + self.transforms = collections.defaultdict(list) + + def _transform(self, node): + """Call matching transforms for the given node if any and return the + transformed node. + """ + cls = node.__class__ + if cls not in self.transforms: + # no transform registered for this class of node + return node + + transforms = self.transforms[cls] + orig_node = node # copy the reference + for transform_func, predicate in transforms: + if predicate is None or predicate(node): + ret = transform_func(node) + # if the transformation function returns something, it's + # expected to be a replacement for the node + if ret is not None: + if node is not orig_node: + # node has already be modified by some previous + # transformation, warn about it + warnings.warn('node %s substituted multiple times' % node) + node = ret + return node + + def _visit(self, node): + if hasattr(node, '_astroid_fields'): + for field in node._astroid_fields: + value = getattr(node, field) + visited = self._visit_generic(value) + setattr(node, field, visited) + return self._transform(node) + + def _visit_generic(self, node): + if isinstance(node, list): + return [self._visit_generic(child) for child in node] + elif isinstance(node, tuple): + return tuple(self._visit_generic(child) for child in node) + else: + return self._visit(node) + + def register_transform(self, node_class, transform, predicate=None): + """Register `transform(node)` function to be applied on the given + astroid's `node_class` if `predicate` is None or returns true + when called with the node as argument. + + The transform function may return a value which is then used to + substitute the original node in the tree. + """ + self.transforms[node_class].append((transform, predicate)) + + def unregister_transform(self, node_class, transform, predicate=None): + """Unregister the given transform.""" + self.transforms[node_class].remove((transform, predicate)) + + def visit(self, module): + """Walk the given astroid *tree* and transform each encountered node + + Only the nodes which have transforms registered will actually + be replaced or changed. + """ + module.body = [self._visit(child) for child in module.body] + return self._transform(module) diff --git a/pymode/libs/astroid/util.py b/pymode/libs/astroid/util.py new file mode 100644 index 00000000..44e2039d --- /dev/null +++ b/pymode/libs/astroid/util.py @@ -0,0 +1,89 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of astroid. +# +# astroid is free software: you can redistribute it and/or modify it +# under the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 2.1 of the License, or (at your +# option) any later version. +# +# astroid is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License +# for more details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with astroid. If not, see . +# +# The code in this file was originally part of logilab-common, licensed under +# the same license. +import warnings + +from astroid import exceptions + + +def generate_warning(message, warning): + return lambda *args: warnings.warn(message % args, warning, stacklevel=3) + +rename_warning = generate_warning( + "%r is deprecated and will be removed in astroid %.1f, use %r instead", + PendingDeprecationWarning) + +attribute_to_method_warning = generate_warning( + "%s is deprecated and will be removed in astroid %.1f, use the " + "method '%s()' instead.", PendingDeprecationWarning) + +attribute_to_function_warning = generate_warning( + "%s is deprecated and will be removed in astroid %.1f, use the " + "function '%s()' instead.", PendingDeprecationWarning) + +method_to_function_warning = generate_warning( + "%s() is deprecated and will be removed in astroid %.1f, use the " + "function '%s()' instead.", PendingDeprecationWarning) + + +class _Yes(object): + """Special inference object, which is returned when inference fails.""" + def __repr__(self): + return 'YES' + + __str__ = __repr__ + + def __getattribute__(self, name): + if name == 'next': + raise AttributeError('next method should not be called') + if name.startswith('__') and name.endswith('__'): + return super(_Yes, self).__getattribute__(name) + if name == 'accept': + return super(_Yes, self).__getattribute__(name) + return self + + def __call__(self, *args, **kwargs): + return self + + def accept(self, visitor): + func = getattr(visitor, "visit_yes") + return func(self) + + +YES = _Yes() + +def safe_infer(node, context=None): + """Return the inferred value for the given node. + + Return None if inference failed or if there is some ambiguity (more than + one node has been inferred). + """ + try: + inferit = node.infer(context=context) + value = next(inferit) + except exceptions.InferenceError: + return + try: + next(inferit) + return # None if there is ambiguity on the inferred node + except exceptions.InferenceError: + return # there is some kind of ambiguity + except StopIteration: + return value diff --git a/pymode/libs/backports.functools_lru_cache-1.3-py3.5-nspkg.pth b/pymode/libs/backports.functools_lru_cache-1.3-py3.5-nspkg.pth new file mode 100644 index 00000000..0b1f79dd --- /dev/null +++ b/pymode/libs/backports.functools_lru_cache-1.3-py3.5-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('backports',));ie = os.path.exists(os.path.join(p,'__init__.py'));m = not ie and sys.modules.setdefault('backports', types.ModuleType('backports'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/pymode/libs/backports/configparser/__init__.py b/pymode/libs/backports/configparser/__init__.py new file mode 100644 index 00000000..06d7a085 --- /dev/null +++ b/pymode/libs/backports/configparser/__init__.py @@ -0,0 +1,1390 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Configuration file parser. + +A configuration file consists of sections, lead by a "[section]" header, +and followed by "name: value" entries, with continuations and such in +the style of RFC 822. + +Intrinsic defaults can be specified by passing them into the +ConfigParser constructor as a dictionary. + +class: + +ConfigParser -- responsible for parsing a list of + configuration files, and managing the parsed database. + + methods: + + __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, + delimiters=('=', ':'), comment_prefixes=('#', ';'), + inline_comment_prefixes=None, strict=True, + empty_lines_in_values=True, default_section='DEFAULT', + interpolation=, converters=): + Create the parser. When `defaults' is given, it is initialized into the + dictionary or intrinsic defaults. The keys must be strings, the values + must be appropriate for %()s string interpolation. + + When `dict_type' is given, it will be used to create the dictionary + objects for the list of sections, for the options within a section, and + for the default values. + + When `delimiters' is given, it will be used as the set of substrings + that divide keys from values. + + When `comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in empty lines. Comments can be + indented. + + When `inline_comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in non-empty lines. + + When `strict` is True, the parser won't allow for any section or option + duplicates while reading from a single source (file, string or + dictionary). Default is True. + + When `empty_lines_in_values' is False (default: True), each empty line + marks the end of an option. Otherwise, internal empty lines of + a multiline option are kept as part of the value. + + When `allow_no_value' is True (default: False), options without + values are accepted; the value presented for these is None. + + sections() + Return all the configuration section names, sans DEFAULT. + + has_section(section) + Return whether the given section exists. + + has_option(section, option) + Return whether the given option exists in the given section. + + options(section) + Return list of configuration options for the named section. + + read(filenames, encoding=None) + Read and parse the list of named configuration files, given by + name. A single filename is also allowed. Non-existing files + are ignored. Return list of successfully read files. + + read_file(f, filename=None) + Read and parse one configuration file, given as a file object. + The filename defaults to f.name; it is only used in error + messages (if f has no `name' attribute, the string `' is used). + + read_string(string) + Read configuration from a given string. + + read_dict(dictionary) + Read configuration from a dictionary. Keys are section names, + values are dictionaries with keys and values that should be present + in the section. If the used dictionary type preserves order, sections + and their keys will be added in order. Values are automatically + converted to strings. + + get(section, option, raw=False, vars=None, fallback=_UNSET) + Return a string value for the named option. All % interpolations are + expanded in the return values, based on the defaults passed into the + constructor and the DEFAULT section. Additional substitutions may be + provided using the `vars' argument, which must be a dictionary whose + contents override any pre-existing defaults. If `option' is a key in + `vars', the value from `vars' is used. + + getint(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to an integer. + + getfloat(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a float. + + getboolean(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a boolean (currently case + insensitively defined as 0, false, no, off for False, and 1, true, + yes, on for True). Returns False or True. + + items(section=_UNSET, raw=False, vars=None) + If section is given, return a list of tuples with (name, value) for + each option in the section. Otherwise, return a list of tuples with + (section_name, section_proxy) for each section, including DEFAULTSECT. + + remove_section(section) + Remove the given file section and all its options. + + remove_option(section, option) + Remove the given option from the given section. + + set(section, option, value) + Set the given option. + + write(fp, space_around_delimiters=True) + Write the configuration state in .ini format. If + `space_around_delimiters' is True (the default), delimiters + between keys and values are surrounded by spaces. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from collections import MutableMapping +import functools +import io +import itertools +import re +import sys +import warnings + +from backports.configparser.helpers import OrderedDict as _default_dict +from backports.configparser.helpers import ChainMap as _ChainMap +from backports.configparser.helpers import from_none, open, str, PY2 + +__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError", + "NoOptionError", "InterpolationError", "InterpolationDepthError", + "InterpolationMissingOptionError", "InterpolationSyntaxError", + "ParsingError", "MissingSectionHeaderError", + "ConfigParser", "SafeConfigParser", "RawConfigParser", + "Interpolation", "BasicInterpolation", "ExtendedInterpolation", + "LegacyInterpolation", "SectionProxy", "ConverterMapping", + "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"] + +DEFAULTSECT = "DEFAULT" + +MAX_INTERPOLATION_DEPTH = 10 + + +# exception classes +class Error(Exception): + """Base class for ConfigParser exceptions.""" + + def __init__(self, msg=''): + self.message = msg + Exception.__init__(self, msg) + + def __repr__(self): + return self.message + + __str__ = __repr__ + + +class NoSectionError(Error): + """Raised when no section matches a requested option.""" + + def __init__(self, section): + Error.__init__(self, 'No section: %r' % (section,)) + self.section = section + self.args = (section, ) + + +class DuplicateSectionError(Error): + """Raised when a section is repeated in an input source. + + Possible repetitions that raise this exception are: multiple creation + using the API or in strict parsers when a section is found more than once + in a single input file, string or dictionary. + """ + + def __init__(self, section, source=None, lineno=None): + msg = [repr(section), " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": section ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Section ") + Error.__init__(self, "".join(msg)) + self.section = section + self.source = source + self.lineno = lineno + self.args = (section, source, lineno) + + +class DuplicateOptionError(Error): + """Raised by strict parsers when an option is repeated in an input source. + + Current implementation raises this exception only when an option is found + more than once in a single file, string or dictionary. + """ + + def __init__(self, section, option, source=None, lineno=None): + msg = [repr(option), " in section ", repr(section), + " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": option ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Option ") + Error.__init__(self, "".join(msg)) + self.section = section + self.option = option + self.source = source + self.lineno = lineno + self.args = (section, option, source, lineno) + + +class NoOptionError(Error): + """A requested option was not found.""" + + def __init__(self, option, section): + Error.__init__(self, "No option %r in section: %r" % + (option, section)) + self.option = option + self.section = section + self.args = (option, section) + + +class InterpolationError(Error): + """Base class for interpolation-related exceptions.""" + + def __init__(self, option, section, msg): + Error.__init__(self, msg) + self.option = option + self.section = section + self.args = (option, section, msg) + + +class InterpolationMissingOptionError(InterpolationError): + """A string substitution required a setting which was not available.""" + + def __init__(self, option, section, rawval, reference): + msg = ("Bad value substitution: option {0!r} in section {1!r} contains " + "an interpolation key {2!r} which is not a valid option name. " + "Raw value: {3!r}".format(option, section, reference, rawval)) + InterpolationError.__init__(self, option, section, msg) + self.reference = reference + self.args = (option, section, rawval, reference) + + +class InterpolationSyntaxError(InterpolationError): + """Raised when the source text contains invalid syntax. + + Current implementation raises this exception when the source text into + which substitutions are made does not conform to the required syntax. + """ + + +class InterpolationDepthError(InterpolationError): + """Raised when substitutions are nested too deeply.""" + + def __init__(self, option, section, rawval): + msg = ("Recursion limit exceeded in value substitution: option {0!r} " + "in section {1!r} contains an interpolation key which " + "cannot be substituted in {2} steps. Raw value: {3!r}" + "".format(option, section, MAX_INTERPOLATION_DEPTH, + rawval)) + InterpolationError.__init__(self, option, section, msg) + self.args = (option, section, rawval) + + +class ParsingError(Error): + """Raised when a configuration file does not follow legal syntax.""" + + def __init__(self, source=None, filename=None): + # Exactly one of `source'/`filename' arguments has to be given. + # `filename' kept for compatibility. + if filename and source: + raise ValueError("Cannot specify both `filename' and `source'. " + "Use `source'.") + elif not filename and not source: + raise ValueError("Required argument `source' not given.") + elif filename: + source = filename + Error.__init__(self, 'Source contains parsing errors: %r' % source) + self.source = source + self.errors = [] + self.args = (source, ) + + @property + def filename(self): + """Deprecated, use `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + return self.source + + @filename.setter + def filename(self, value): + """Deprecated, user `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + self.source = value + + def append(self, lineno, line): + self.errors.append((lineno, line)) + self.message += '\n\t[line %2d]: %s' % (lineno, line) + + +class MissingSectionHeaderError(ParsingError): + """Raised when a key-value pair is found before any section header.""" + + def __init__(self, filename, lineno, line): + Error.__init__( + self, + 'File contains no section headers.\nfile: %r, line: %d\n%r' % + (filename, lineno, line)) + self.source = filename + self.lineno = lineno + self.line = line + self.args = (filename, lineno, line) + + +# Used in parser getters to indicate the default behaviour when a specific +# option is not found it to raise an exception. Created to enable `None' as +# a valid fallback value. +_UNSET = object() + + +class Interpolation(object): + """Dummy interpolation that passes the value through with no changes.""" + + def before_get(self, parser, section, option, value, defaults): + return value + + def before_set(self, parser, section, option, value): + return value + + def before_read(self, parser, section, option, value): + return value + + def before_write(self, parser, section, option, value): + return value + + +class BasicInterpolation(Interpolation): + """Interpolation as implemented in the classic ConfigParser. + + The option values can contain format strings which refer to other values in + the same section, or values in the special default section. + + For example: + + something: %(dir)s/whatever + + would resolve the "%(dir)s" to the value of dir. All reference + expansions are done late, on demand. If a user needs to use a bare % in + a configuration file, she can escape it by writing %%. Other % usage + is considered a user error and raises `InterpolationSyntaxError'.""" + + _KEYCRE = re.compile(r"%\(([^)]+)\)s") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('%%', '') # escaped percent signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '%' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('%'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("%") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "%": + accum.append("%") + rest = rest[2:] + elif c == "(": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + var = parser.optionxform(m.group(1)) + rest = rest[m.end():] + try: + v = map[var] + except KeyError: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, var)) + if "%" in v: + self._interpolate_some(parser, option, accum, v, + section, map, depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'%%' must be followed by '%%' or '(', " + "found: %r" % (rest,)) + + +class ExtendedInterpolation(Interpolation): + """Advanced variant of interpolation, supports the syntax used by + `zc.buildout'. Enables interpolation between sections.""" + + _KEYCRE = re.compile(r"\$\{([^}]+)\}") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('$$', '') # escaped dollar signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '$' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('$'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("$") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "$": + accum.append("$") + rest = rest[2:] + elif c == "{": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + path = m.group(1).split(':') + rest = rest[m.end():] + sect = section + opt = option + try: + if len(path) == 1: + opt = parser.optionxform(path[0]) + v = map[opt] + elif len(path) == 2: + sect = path[0] + opt = parser.optionxform(path[1]) + v = parser.get(sect, opt, raw=True) + else: + raise InterpolationSyntaxError( + option, section, + "More than one ':' found: %r" % (rest,)) + except (KeyError, NoSectionError, NoOptionError): + raise from_none(InterpolationMissingOptionError( + option, section, rawval, ":".join(path))) + if "$" in v: + self._interpolate_some(parser, opt, accum, v, sect, + dict(parser.items(sect, raw=True)), + depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'$' must be followed by '$' or '{', " + "found: %r" % (rest,)) + + +class LegacyInterpolation(Interpolation): + """Deprecated interpolation used in old versions of ConfigParser. + Use BasicInterpolation or ExtendedInterpolation instead.""" + + _KEYCRE = re.compile(r"%\(([^)]*)\)s|.") + + def before_get(self, parser, section, option, value, vars): + rawval = value + depth = MAX_INTERPOLATION_DEPTH + while depth: # Loop through this until it's done + depth -= 1 + if value and "%(" in value: + replace = functools.partial(self._interpolation_replace, + parser=parser) + value = self._KEYCRE.sub(replace, value) + try: + value = value % vars + except KeyError as e: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, e.args[0])) + else: + break + if value and "%(" in value: + raise InterpolationDepthError(option, section, rawval) + return value + + def before_set(self, parser, section, option, value): + return value + + @staticmethod + def _interpolation_replace(match, parser): + s = match.group(1) + if s is None: + return match.group() + else: + return "%%(%s)s" % parser.optionxform(s) + + +class RawConfigParser(MutableMapping): + """ConfigParser that does not do interpolation.""" + + # Regular expressions for parsing section headers and options + _SECT_TMPL = r""" + \[ # [ + (?P
    [^]]+) # very permissive! + \] # ] + """ + _OPT_TMPL = r""" + (?P