Commit 9f1f040b authored by vapier@chromium.org's avatar vapier@chromium.org

pylint: upgrade to 1.4.0

This is largely a bugfix release, so should be much easier to transition.

BUG=chromium:431514
TEST=ran on some code bases and checked output

Review URL: https://codereview.chromium.org/753543006

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@293355 0039d316-1c4b-4281-b951-d872f2087c98
parent a64c0b08
URL: http://www.logilab.org/project/logilab-astng URL: http://www.logilab.org/project/logilab-astng
Version: 1.2.1 Version: 1.3.2
License: GPL License: GPL
License File: LICENSE.txt License File: LICENSE.txt
......
...@@ -79,6 +79,9 @@ class AsStringRegexpPredicate(object): ...@@ -79,6 +79,9 @@ class AsStringRegexpPredicate(object):
If specified, the second argument is an `attrgetter` expression that will be If specified, the second argument is an `attrgetter` expression that will be
applied on the node first to get the actual node on which `as_string` should applied on the node first to get the actual node on which `as_string` should
be called. be called.
WARNING: This can be fairly slow, as it has to convert every AST node back
to Python code; you should consider examining the AST directly instead.
""" """
def __init__(self, regexp, expression=None): def __init__(self, regexp, expression=None):
self.regexp = re.compile(regexp) self.regexp = re.compile(regexp)
...@@ -98,13 +101,23 @@ def inference_tip(infer_function): ...@@ -98,13 +101,23 @@ def inference_tip(infer_function):
.. sourcecode:: python .. sourcecode:: python
MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple), MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func')) predicate)
""" """
def transform(node, infer_function=infer_function): def transform(node, infer_function=infer_function):
node._explicit_inference = infer_function node._explicit_inference = infer_function
return node return node
return transform return transform
def register_module_extender(manager, module_name, get_extension_mod):
def transform(node):
extension_module = get_extension_mod()
for name, obj in extension_module.locals.items():
node.locals[name] = obj
manager.register_transform(Module, transform, lambda n: n.name == module_name)
# load brain plugins # load brain plugins
from os import listdir from os import listdir
from os.path import join, dirname from os.path import join, dirname
......
...@@ -16,30 +16,23 @@ ...@@ -16,30 +16,23 @@
# You should have received a copy of the GNU Lesser General Public License along # You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>. # with astroid. If not, see <http://www.gnu.org/licenses/>.
"""astroid packaging information""" """astroid packaging information"""
distname = 'astroid' distname = 'astroid'
modname = 'astroid' modname = 'astroid'
numversion = (1, 2, 1) numversion = (1, 3, 2)
version = '.'.join([str(num) for num in numversion]) version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.60.0'] install_requires = ['logilab-common >= 0.60.0', 'six']
license = 'LGPL' license = 'LGPL'
author = 'Logilab' author = 'Logilab'
author_email = 'python-projects@lists.logilab.org' author_email = 'pylint-dev@lists.logilab.org'
mailinglist = "mailto://%s" % author_email mailinglist = "mailto://%s" % author_email
web = 'http://bitbucket.org/logilab/astroid' web = 'http://bitbucket.org/logilab/astroid'
description = "rebuild a new abstract syntax tree from Python's ast" description = "A abstract syntax tree for Python with inference support."
from os.path import join
include_dirs = ['brain',
join('test', 'regrtest_data'),
join('test', 'data'), join('test', 'data2')
]
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules", classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance", "Topic :: Software Development :: Quality Assurance",
......
...@@ -148,7 +148,7 @@ class AsStringVisitor(object): ...@@ -148,7 +148,7 @@ class AsStringVisitor(object):
bases = bases and '(%s)' % bases or '' bases = bases and '(%s)' % bases or ''
else: else:
metaclass = node.metaclass() metaclass = node.metaclass()
if metaclass: if metaclass and not node.has_metaclass_hack():
if bases: if bases:
bases = '(%s, metaclass=%s)' % (bases, metaclass.name) bases = '(%s, metaclass=%s)' % (bases, metaclass.name)
else: else:
......
...@@ -24,6 +24,8 @@ __docformat__ = "restructuredtext en" ...@@ -24,6 +24,8 @@ __docformat__ = "restructuredtext en"
import sys import sys
from contextlib import contextmanager from contextlib import contextmanager
from logilab.common.decorators import cachedproperty
from astroid.exceptions import (InferenceError, AstroidError, NotFoundError, from astroid.exceptions import (InferenceError, AstroidError, NotFoundError,
UnresolvableName, UseInferenceDefault) UnresolvableName, UseInferenceDefault)
...@@ -56,63 +58,84 @@ class Proxy(object): ...@@ -56,63 +58,84 @@ class Proxy(object):
# Inference ################################################################## # Inference ##################################################################
MISSING = object()
class InferenceContext(object): class InferenceContext(object):
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode') __slots__ = ('path', 'callcontext', 'boundnode', 'infered')
def __init__(self, path=None): def __init__(self,
path=None, callcontext=None, boundnode=None, infered=None):
if path is None: if path is None:
self.path = set() self.path = frozenset()
else: else:
self.path = path self.path = path
self.lookupname = None self.callcontext = callcontext
self.callcontext = None self.boundnode = boundnode
self.boundnode = None if infered is None:
self.infered = {}
def push(self, node): else:
name = self.lookupname self.infered = infered
if (node, name) in self.path:
raise StopIteration() def push(self, key):
self.path.add((node, name)) # This returns a NEW context with the same attributes, but a new key
# added to `path`. The intention is that it's only passed to callees
def clone(self): # and then destroyed; otherwise scope() may not work correctly.
# XXX copy lookupname/callcontext ? # The cache will be shared, since it's the same exact dict.
clone = InferenceContext(self.path) if key in self.path:
clone.callcontext = self.callcontext # End the containing generator
clone.boundnode = self.boundnode raise StopIteration
return clone
return InferenceContext(
self.path.union([key]),
self.callcontext,
self.boundnode,
self.infered,
)
@contextmanager @contextmanager
def restore_path(self): def scope(self, callcontext=MISSING, boundnode=MISSING):
path = set(self.path) try:
yield orig = self.callcontext, self.boundnode
self.path = path if callcontext is not MISSING:
self.callcontext = callcontext
def copy_context(context): if boundnode is not MISSING:
if context is not None: self.boundnode = boundnode
return context.clone() yield
else: finally:
return InferenceContext() self.callcontext, self.boundnode = orig
def cache_generator(self, key, generator):
results = []
for result in generator:
results.append(result)
yield result
self.infered[key] = tuple(results)
return
def _infer_stmts(stmts, context, frame=None): def _infer_stmts(stmts, context, frame=None, lookupname=None):
"""return an iterator on statements inferred by each statement in <stmts> """return an iterator on statements inferred by each statement in <stmts>
""" """
stmt = None stmt = None
infered = False infered = False
if context is not None: if context is None:
name = context.lookupname
context = context.clone()
else:
name = None
context = InferenceContext() context = InferenceContext()
for stmt in stmts: for stmt in stmts:
if stmt is YES: if stmt is YES:
yield stmt yield stmt
infered = True infered = True
continue continue
context.lookupname = stmt._infer_name(frame, name)
kw = {}
infered_name = stmt._infer_name(frame, lookupname)
if infered_name is not None:
# only returns not None if .infer() accepts a lookupname kwarg
kw['lookupname'] = infered_name
try: try:
for infered in stmt.infer(context): for infered in stmt.infer(context, **kw):
yield infered yield infered
infered = True infered = True
except UnresolvableName: except UnresolvableName:
...@@ -170,20 +193,24 @@ class Instance(Proxy): ...@@ -170,20 +193,24 @@ class Instance(Proxy):
def igetattr(self, name, context=None): def igetattr(self, name, context=None):
"""inferred getattr""" """inferred getattr"""
if not context:
context = InferenceContext()
try: try:
# avoid recursively inferring the same attr on the same class # avoid recursively inferring the same attr on the same class
if context: new_context = context.push((self._proxied, name))
context.push((self._proxied, name))
# XXX frame should be self._proxied, or not ? # XXX frame should be self._proxied, or not ?
get_attr = self.getattr(name, context, lookupclass=False) get_attr = self.getattr(name, new_context, lookupclass=False)
return _infer_stmts(self._wrap_attr(get_attr, context), context, return _infer_stmts(
frame=self) self._wrap_attr(get_attr, new_context),
new_context,
frame=self,
)
except NotFoundError: except NotFoundError:
try: try:
# fallback to class'igetattr since it has some logic to handle # fallback to class'igetattr since it has some logic to handle
# descriptors # descriptors
return self._wrap_attr(self._proxied.igetattr(name, context), return self._wrap_attr(self._proxied.igetattr(name, context),
context) context)
except NotFoundError: except NotFoundError:
raise InferenceError(name) raise InferenceError(name)
...@@ -274,9 +301,9 @@ class BoundMethod(UnboundMethod): ...@@ -274,9 +301,9 @@ class BoundMethod(UnboundMethod):
return True return True
def infer_call_result(self, caller, context): def infer_call_result(self, caller, context):
context = context.clone() with context.scope(boundnode=self.bound):
context.boundnode = self.bound for infered in self._proxied.infer_call_result(caller, context):
return self._proxied.infer_call_result(caller, context) yield infered
class Generator(Instance): class Generator(Instance):
...@@ -308,7 +335,8 @@ def path_wrapper(func): ...@@ -308,7 +335,8 @@ def path_wrapper(func):
"""wrapper function handling context""" """wrapper function handling context"""
if context is None: if context is None:
context = InferenceContext() context = InferenceContext()
context.push(node) context = context.push((node, kwargs.get('lookupname')))
yielded = set() yielded = set()
for res in _func(node, context, **kwargs): for res in _func(node, context, **kwargs):
# unproxy only true instance, not const, tuple, dict... # unproxy only true instance, not const, tuple, dict...
...@@ -377,7 +405,15 @@ class NodeNG(object): ...@@ -377,7 +405,15 @@ class NodeNG(object):
return self._explicit_inference(self, context, **kwargs) return self._explicit_inference(self, context, **kwargs)
except UseInferenceDefault: except UseInferenceDefault:
pass pass
return self._infer(context, **kwargs)
if not context:
return self._infer(context, **kwargs)
key = (self, kwargs.get('lookupname'), context.callcontext, context.boundnode)
if key in context.infered:
return iter(context.infered[key])
return context.cache_generator(key, self._infer(context, **kwargs))
def _repr_name(self): def _repr_name(self):
"""return self.name or self.attrname or '' for nice representation""" """return self.name or self.attrname or '' for nice representation"""
...@@ -415,7 +451,7 @@ class NodeNG(object): ...@@ -415,7 +451,7 @@ class NodeNG(object):
attr = getattr(self, field) attr = getattr(self, field)
if not attr: # None or empty listy / tuple if not attr: # None or empty listy / tuple
continue continue
if isinstance(attr, (list, tuple)): if attr.__class__ in (list, tuple):
return attr[-1] return attr[-1]
else: else:
return attr return attr
...@@ -506,16 +542,28 @@ class NodeNG(object): ...@@ -506,16 +542,28 @@ class NodeNG(object):
# FIXME: raise an exception if nearest is None ? # FIXME: raise an exception if nearest is None ?
return nearest[0] return nearest[0]
def set_line_info(self, lastchild): # these are lazy because they're relatively expensive to compute for every
# single node, and they rarely get looked at
@cachedproperty
def fromlineno(self):
if self.lineno is None: if self.lineno is None:
self.fromlineno = self._fixed_source_line() return self._fixed_source_line()
else:
return self.lineno
@cachedproperty
def tolineno(self):
if not self._astroid_fields:
# can't have children
lastchild = None
else: else:
self.fromlineno = self.lineno lastchild = self.last_child()
if lastchild is None: if lastchild is None:
self.tolineno = self.fromlineno return self.fromlineno
else: else:
self.tolineno = lastchild.tolineno return lastchild.tolineno
return
# TODO / FIXME: # TODO / FIXME:
assert self.fromlineno is not None, self assert self.fromlineno is not None, self
assert self.tolineno is not None, self assert self.tolineno is not None, self
...@@ -530,7 +578,7 @@ class NodeNG(object): ...@@ -530,7 +578,7 @@ class NodeNG(object):
_node = self _node = self
try: try:
while line is None: while line is None:
_node = _node.get_children().next() _node = next(_node.get_children())
line = _node.lineno line = _node.lineno
except StopIteration: except StopIteration:
_node = self.parent _node = self.parent
......
...@@ -4,6 +4,7 @@ Helps with understanding everything imported from 'gi.repository' ...@@ -4,6 +4,7 @@ Helps with understanding everything imported from 'gi.repository'
""" """
import inspect import inspect
import itertools
import sys import sys
import re import re
...@@ -111,40 +112,33 @@ def _gi_build_stub(parent): ...@@ -111,40 +112,33 @@ def _gi_build_stub(parent):
return ret return ret
# Overwrite Module.module_import to _actually_ import the introspected module if def _import_gi_module(modname):
# it's a gi module, then build stub code by examining its info and get an astng # we only consider gi.repository submodules
# from that if not modname.startswith('gi.repository.'):
raise AstroidBuildingException()
from astroid.scoped_nodes import Module
_orig_import_module = Module.import_module
def _new_import_module(self, modname, relative_only=False, level=None):
# Could be a static piece of gi.repository or whatever unrelated module,
# let that fall through
try:
return _orig_import_module(self, modname, relative_only, level)
except AstroidBuildingException:
# we only consider gi.repository submodules
if not modname.startswith('gi.repository.'):
if relative_only and level is None:
level = 0
modname = self.relative_to_absolute_name(modname, level)
if not modname.startswith('gi.repository.'):
raise
# build astroid representation unless we already tried so # build astroid representation unless we already tried so
if modname not in _inspected_modules: if modname not in _inspected_modules:
modnames = [modname] modnames = [modname]
# GLib and GObject have some special case handling optional_modnames = []
# in pygobject that we need to cope with
# GLib and GObject may have some special case handling
# in pygobject that we need to cope with. However at
# least as of pygobject3-3.13.91 the _glib module doesn't
# exist anymore, so if treat these modules as optional.
if modname == 'gi.repository.GLib': if modname == 'gi.repository.GLib':
modnames.append('gi._glib') optional_modnames.append('gi._glib')
elif modname == 'gi.repository.GObject': elif modname == 'gi.repository.GObject':
modnames.append('gi._gobject') optional_modnames.append('gi._gobject')
try: try:
modcode = '' modcode = ''
for m in modnames: for m in itertools.chain(modnames, optional_modnames):
__import__(m) try:
modcode += _gi_build_stub(sys.modules[m]) __import__(m)
modcode += _gi_build_stub(sys.modules[m])
except ImportError:
if m not in optional_modnames:
raise
except ImportError: except ImportError:
astng = _inspected_modules[modname] = None astng = _inspected_modules[modname] = None
else: else:
...@@ -156,4 +150,6 @@ def _new_import_module(self, modname, relative_only=False, level=None): ...@@ -156,4 +150,6 @@ def _new_import_module(self, modname, relative_only=False, level=None):
raise AstroidBuildingException('Failed to import module %r' % modname) raise AstroidBuildingException('Failed to import module %r' % modname)
return astng return astng
Module.import_module = _new_import_module
MANAGER.register_failed_import_hook(_import_gi_module)
from astroid import MANAGER from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder from astroid.builder import AstroidBuilder
def mechanize_transform(module): def mechanize_transform():
fake = AstroidBuilder(MANAGER).string_build(''' return AstroidBuilder(MANAGER).string_build('''
class Browser(object): class Browser(object):
def open(self, url, data=None, timeout=None): def open(self, url, data=None, timeout=None):
...@@ -13,8 +13,6 @@ class Browser(object): ...@@ -13,8 +13,6 @@ class Browser(object):
return None return None
''') ''')
module.locals['Browser'] = fake.locals['Browser']
import py2stdlib
py2stdlib.MODULE_TRANSFORMS['mechanize'] = mechanize_transform
register_module_extender(MANAGER, 'mechanize', mechanize_transform)
"""Astroid hooks for pytest."""
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def pytest_transform():
return AstroidBuilder(MANAGER).string_build('''
try:
import _pytest.mark
import _pytest.recwarn
import _pytest.runner
import _pytest.python
except ImportError:
pass
else:
deprecated_call = _pytest.recwarn.deprecated_call
exit = _pytest.runner.exit
fail = _pytest.runner.fail
fixture = _pytest.python.fixture
importorskip = _pytest.runner.importorskip
mark = _pytest.mark.MarkGenerator()
raises = _pytest.python.raises
skip = _pytest.runner.skip
yield_fixture = _pytest.python.yield_fixture
''')
register_module_extender(MANAGER, 'pytest', pytest_transform)
register_module_extender(MANAGER, 'py.test', pytest_transform)
...@@ -5,21 +5,18 @@ Currently help understanding of : ...@@ -5,21 +5,18 @@ Currently help understanding of :
* PyQT4.QtCore * PyQT4.QtCore
""" """
from astroid import MANAGER from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder from astroid.builder import AstroidBuilder
def pyqt4_qtcore_transform(module): def pyqt4_qtcore_transform():
fake = AstroidBuilder(MANAGER).string_build(''' return AstroidBuilder(MANAGER).string_build('''
def SIGNAL(signal_name): pass def SIGNAL(signal_name): pass
class QObject(object): class QObject(object):
def emit(self, signal): pass def emit(self, signal): pass
''') ''')
for klass in ('QObject',):
module.locals[klass] = fake.locals[klass]
import py2stdlib register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform)
py2stdlib.MODULE_TRANSFORMS['PyQt4.QtCore'] = pyqt4_qtcore_transform
...@@ -11,12 +11,11 @@ from textwrap import dedent ...@@ -11,12 +11,11 @@ from textwrap import dedent
from astroid import ( from astroid import (
MANAGER, AsStringRegexpPredicate, MANAGER, AsStringRegexpPredicate,
UseInferenceDefault, inference_tip, UseInferenceDefault, inference_tip,
YES, InferenceError) YES, InferenceError, register_module_extender)
from astroid import exceptions from astroid import exceptions
from astroid import nodes from astroid import nodes
from astroid.builder import AstroidBuilder from astroid.builder import AstroidBuilder
MODULE_TRANSFORMS = {}
PY3K = sys.version_info > (3, 0) PY3K = sys.version_info > (3, 0)
PY33 = sys.version_info >= (3, 3) PY33 = sys.version_info >= (3, 3)
...@@ -26,7 +25,7 @@ def infer_func_form(node, base_type, context=None, enum=False): ...@@ -26,7 +25,7 @@ def infer_func_form(node, base_type, context=None, enum=False):
"""Specific inference function for namedtuple or Python 3 enum. """ """Specific inference function for namedtuple or Python 3 enum. """
def infer_first(node): def infer_first(node):
try: try:
value = node.infer(context=context).next() value = next(node.infer(context=context))
if value is YES: if value is YES:
raise UseInferenceDefault() raise UseInferenceDefault()
else: else:
...@@ -90,39 +89,31 @@ def infer_func_form(node, base_type, context=None, enum=False): ...@@ -90,39 +89,31 @@ def infer_func_form(node, base_type, context=None, enum=False):
# module specific transformation functions ##################################### # module specific transformation functions #####################################
def transform(module): def hashlib_transform():
try:
tr = MODULE_TRANSFORMS[module.name]
except KeyError:
pass
else:
tr(module)
MANAGER.register_transform(nodes.Module, transform)
# module specific transformation functions #####################################
def hashlib_transform(module):
template = ''' template = '''
class %s(object): class %(name)s(object):
def __init__(self, value=''): pass def __init__(self, value=''): pass
def digest(self): def digest(self):
return u'' return %(digest)s
def copy(self):
return self
def update(self, value): pass def update(self, value): pass
def hexdigest(self): def hexdigest(self):
return u'' return ''
@property
def name(self):
return %(name)r
''' '''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(template % hashfunc for hashfunc in algorithms) classes = "".join(
template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'}
for hashfunc in algorithms)
return AstroidBuilder(MANAGER).string_build(classes)
fake = AstroidBuilder(MANAGER).string_build(classes)
for hashfunc in algorithms: def collections_transform():
module.locals[hashfunc] = fake.locals[hashfunc] return AstroidBuilder(MANAGER).string_build('''
def collections_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
class defaultdict(dict): class defaultdict(dict):
default_factory = None default_factory = None
...@@ -146,11 +137,9 @@ class deque(object): ...@@ -146,11 +137,9 @@ class deque(object):
''') ''')
for klass in ('deque', 'defaultdict'):
module.locals[klass] = fake.locals[klass]
def pkg_resources_transform(module): def pkg_resources_transform():
fake = AstroidBuilder(MANAGER).string_build(''' return AstroidBuilder(MANAGER).string_build('''
def resource_exists(package_or_requirement, resource_name): def resource_exists(package_or_requirement, resource_name):
pass pass
...@@ -187,11 +176,8 @@ def cleanup_resources(force=False): ...@@ -187,11 +176,8 @@ def cleanup_resources(force=False):
''') ''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def subprocess_transform():
def subprocess_transform(module):
if PY3K: if PY3K:
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
init = """ init = """
...@@ -217,7 +203,7 @@ def subprocess_transform(module): ...@@ -217,7 +203,7 @@ def subprocess_transform(module):
wait_signature = 'def wait(self, timeout=None)' wait_signature = 'def wait(self, timeout=None)'
else: else:
wait_signature = 'def wait(self)' wait_signature = 'def wait(self)'
fake = AstroidBuilder(MANAGER).string_build(''' return AstroidBuilder(MANAGER).string_build('''
class Popen(object): class Popen(object):
returncode = pid = 0 returncode = pid = 0
...@@ -241,18 +227,17 @@ class Popen(object): ...@@ -241,18 +227,17 @@ class Popen(object):
'communicate': communicate, 'communicate': communicate,
'wait_signature': wait_signature}) 'wait_signature': wait_signature})
for func_name, func in fake.locals.items():
module.locals[func_name] = func
MODULE_TRANSFORMS['hashlib'] = hashlib_transform
MODULE_TRANSFORMS['collections'] = collections_transform
MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
MODULE_TRANSFORMS['subprocess'] = subprocess_transform
# namedtuple support ########################################################### # namedtuple support ###########################################################
def looks_like_namedtuple(node):
func = node.func
if type(func) is nodes.Getattr:
return func.attrname == 'namedtuple'
if type(func) is nodes.Name:
return func.name == 'namedtuple'
return False
def infer_named_tuple(node, context=None): def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple CallFunc node""" """Specific inference function for namedtuple CallFunc node"""
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
...@@ -285,11 +270,12 @@ def infer_enum(node, context=None): ...@@ -285,11 +270,12 @@ def infer_enum(node, context=None):
context=context, enum=True)[0] context=context, enum=True)[0]
return iter([class_node.instanciate_class()]) return iter([class_node.instanciate_class()])
def infer_enum_class(node, context=None): def infer_enum_class(node):
""" Specific inference for enums. """ """ Specific inference for enums. """
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
for basename in node.basenames: for basename in node.basenames:
# TODO: doesn't handle subclasses yet. # TODO: doesn't handle subclasses yet. This implementation
# is a hack to support enums.
if basename not in names: if basename not in names:
continue continue
if node.root().name == 'enum': if node.root().name == 'enum':
...@@ -299,22 +285,26 @@ def infer_enum_class(node, context=None): ...@@ -299,22 +285,26 @@ def infer_enum_class(node, context=None):
if any(not isinstance(value, nodes.AssName) if any(not isinstance(value, nodes.AssName)
for value in values): for value in values):
continue continue
parent = values[0].parent
real_value = parent.value stmt = values[0].statement()
if isinstance(stmt.targets[0], nodes.Tuple):
targets = stmt.targets[0].itered()
else:
targets = stmt.targets
new_targets = [] new_targets = []
for target in parent.targets: for target in targets:
# Replace all the assignments with our mocked class. # Replace all the assignments with our mocked class.
classdef = dedent(''' classdef = dedent('''
class %(name)s(object): class %(name)s(object):
@property @property
def value(self): def value(self):
return %(value)s # Not the best return.
return None
@property @property
def name(self): def name(self):
return %(name)r return %(name)r
%(name)s = %(value)s ''' % {'name': target.name})
''' % {'name': target.name,
'value': real_value.as_string()})
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
fake.parent = target.parent fake.parent = target.parent
for method in node.mymethods(): for method in node.mymethods():
...@@ -324,8 +314,13 @@ def infer_enum_class(node, context=None): ...@@ -324,8 +314,13 @@ def infer_enum_class(node, context=None):
break break
return node return node
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple), MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func')) looks_like_namedtuple)
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum), MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum),
AsStringRegexpPredicate('Enum', 'func')) AsStringRegexpPredicate('Enum', 'func'))
MANAGER.register_transform(nodes.Class, infer_enum_class) MANAGER.register_transform(nodes.Class, infer_enum_class)
register_module_extender(MANAGER, 'hashlib', hashlib_transform)
register_module_extender(MANAGER, 'collections', collections_transform)
register_module_extender(MANAGER, 'pkg_resourcds', pkg_resources_transform)
register_module_extender(MANAGER, 'subprocess', subprocess_transform)
...@@ -44,7 +44,7 @@ if sys.version_info >= (3, 0): ...@@ -44,7 +44,7 @@ if sys.version_info >= (3, 0):
def open_source_file(filename): def open_source_file(filename):
with open(filename, 'rb') as byte_stream: with open(filename, 'rb') as byte_stream:
encoding = detect_encoding(byte_stream.readline)[0] encoding = detect_encoding(byte_stream.readline)[0]
stream = open(filename, 'rU', encoding=encoding) stream = open(filename, 'r', newline=None, encoding=encoding)
try: try:
data = stream.read() data = stream.read()
except UnicodeError: # wrong encodingg except UnicodeError: # wrong encodingg
...@@ -115,23 +115,24 @@ class AstroidBuilder(InspectBuilder): ...@@ -115,23 +115,24 @@ class AstroidBuilder(InspectBuilder):
path is expected to be a python source file path is expected to be a python source file
""" """
try: try:
_, encoding, data = open_source_file(path) stream, encoding, data = open_source_file(path)
except IOError, exc: except IOError as exc:
msg = 'Unable to load file %r (%s)' % (path, exc) msg = 'Unable to load file %r (%s)' % (path, exc)
raise AstroidBuildingException(msg) raise AstroidBuildingException(msg)
except SyntaxError, exc: # py3k encoding specification error except SyntaxError as exc: # py3k encoding specification error
raise AstroidBuildingException(exc) raise AstroidBuildingException(exc)
except LookupError, exc: # unknown encoding except LookupError as exc: # unknown encoding
raise AstroidBuildingException(exc) raise AstroidBuildingException(exc)
# get module name if necessary with stream:
if modname is None: # get module name if necessary
try: if modname is None:
modname = '.'.join(modpath_from_file(path)) try:
except ImportError: modname = '.'.join(modpath_from_file(path))
modname = splitext(basename(path))[0] except ImportError:
# build astroid representation modname = splitext(basename(path))[0]
module = self._data_build(data, modname, path) # build astroid representation
return self._post_build(module, encoding) module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
def string_build(self, data, modname='', path=None): def string_build(self, data, modname='', path=None):
"""build astroid from source code string and return rebuilded astroid""" """build astroid from source code string and return rebuilded astroid"""
...@@ -159,7 +160,10 @@ class AstroidBuilder(InspectBuilder): ...@@ -159,7 +160,10 @@ class AstroidBuilder(InspectBuilder):
def _data_build(self, data, modname, path): def _data_build(self, data, modname, path):
"""build tree node from data and add some informations""" """build tree node from data and add some informations"""
# this method could be wrapped with a pickle/cache function # this method could be wrapped with a pickle/cache function
node = parse(data + '\n') try:
node = parse(data + '\n')
except TypeError as exc:
raise AstroidBuildingException(exc)
if path is not None: if path is not None:
node_file = abspath(path) node_file = abspath(path)
else: else:
...@@ -170,8 +174,7 @@ class AstroidBuilder(InspectBuilder): ...@@ -170,8 +174,7 @@ class AstroidBuilder(InspectBuilder):
else: else:
package = path and path.find('__init__.py') > -1 or False package = path and path.find('__init__.py') > -1 or False
rebuilder = TreeRebuilder(self._manager) rebuilder = TreeRebuilder(self._manager)
module = rebuilder.visit_module(node, modname, package) module = rebuilder.visit_module(node, modname, node_file, package)
module.file = module.path = node_file
module._from_nodes = rebuilder._from_nodes module._from_nodes = rebuilder._from_nodes
module._delayed_assattr = rebuilder._delayed_assattr module._delayed_assattr = rebuilder._delayed_assattr
return module return module
......
...@@ -28,7 +28,7 @@ from astroid.manager import AstroidManager ...@@ -28,7 +28,7 @@ from astroid.manager import AstroidManager
from astroid.exceptions import (AstroidError, InferenceError, NoDefault, from astroid.exceptions import (AstroidError, InferenceError, NoDefault,
NotFoundError, UnresolvableName) NotFoundError, UnresolvableName)
from astroid.bases import (YES, Instance, InferenceContext, from astroid.bases import (YES, Instance, InferenceContext,
_infer_stmts, copy_context, path_wrapper, _infer_stmts, path_wrapper,
raise_if_nothing_infered) raise_if_nothing_infered)
from astroid.protocols import ( from astroid.protocols import (
_arguments_infer_argname, _arguments_infer_argname,
...@@ -175,93 +175,89 @@ def infer_name(self, context=None): ...@@ -175,93 +175,89 @@ def infer_name(self, context=None):
if not stmts: if not stmts:
raise UnresolvableName(self.name) raise UnresolvableName(self.name)
context = context.clone() return _infer_stmts(stmts, context, frame, self.name)
context.lookupname = self.name
return _infer_stmts(stmts, context, frame)
nodes.Name._infer = path_wrapper(infer_name) nodes.Name._infer = path_wrapper(infer_name)
nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper
def infer_callfunc(self, context=None): def infer_callfunc(self, context=None):
"""infer a CallFunc node by trying to guess what the function returns""" """infer a CallFunc node by trying to guess what the function returns"""
callcontext = context.clone() if context is None:
callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs) context = InferenceContext()
callcontext.boundnode = None
for callee in self.func.infer(context): for callee in self.func.infer(context):
if callee is YES: with context.scope(
yield callee callcontext=CallContext(self.args, self.starargs, self.kwargs),
continue boundnode=None,
try: ):
if hasattr(callee, 'infer_call_result'): if callee is YES:
for infered in callee.infer_call_result(self, callcontext): yield callee
yield infered continue
except InferenceError: try:
## XXX log error ? if hasattr(callee, 'infer_call_result'):
continue for infered in callee.infer_call_result(self, context):
yield infered
except InferenceError:
## XXX log error ?
continue
nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc)) nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
def infer_import(self, context=None, asname=True): def infer_import(self, context=None, asname=True, lookupname=None):
"""infer an Import node: return the imported module/object""" """infer an Import node: return the imported module/object"""
name = context.lookupname if lookupname is None:
if name is None:
raise InferenceError() raise InferenceError()
if asname: if asname:
yield self.do_import_module(self.real_name(name)) yield self.do_import_module(self.real_name(lookupname))
else: else:
yield self.do_import_module(name) yield self.do_import_module(lookupname)
nodes.Import._infer = path_wrapper(infer_import) nodes.Import._infer = path_wrapper(infer_import)
def infer_name_module(self, name): def infer_name_module(self, name):
context = InferenceContext() context = InferenceContext()
context.lookupname = name return self.infer(context, asname=False, lookupname=name)
return self.infer(context, asname=False)
nodes.Import.infer_name_module = infer_name_module nodes.Import.infer_name_module = infer_name_module
def infer_from(self, context=None, asname=True): def infer_from(self, context=None, asname=True, lookupname=None):
"""infer a From nodes: return the imported module/object""" """infer a From nodes: return the imported module/object"""
name = context.lookupname if lookupname is None:
if name is None:
raise InferenceError() raise InferenceError()
if asname: if asname:
name = self.real_name(name) lookupname = self.real_name(lookupname)
module = self.do_import_module() module = self.do_import_module()
try: try:
context = copy_context(context) return _infer_stmts(module.getattr(lookupname, ignore_locals=module is self.root()), context, lookupname=lookupname)
context.lookupname = name
return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context)
except NotFoundError: except NotFoundError:
raise InferenceError(name) raise InferenceError(lookupname)
nodes.From._infer = path_wrapper(infer_from) nodes.From._infer = path_wrapper(infer_from)
def infer_getattr(self, context=None): def infer_getattr(self, context=None):
"""infer a Getattr node by using getattr on the associated object""" """infer a Getattr node by using getattr on the associated object"""
#context = context.clone() if not context:
context = InferenceContext()
for owner in self.expr.infer(context): for owner in self.expr.infer(context):
if owner is YES: if owner is YES:
yield owner yield owner
continue continue
try: try:
context.boundnode = owner with context.scope(boundnode=owner):
for obj in owner.igetattr(self.attrname, context): for obj in owner.igetattr(self.attrname, context):
yield obj yield obj
context.boundnode = None
except (NotFoundError, InferenceError): except (NotFoundError, InferenceError):
context.boundnode = None pass
except AttributeError: except AttributeError:
# XXX method / function # XXX method / function
context.boundnode = None pass
nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr)) nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper
def infer_global(self, context=None): def infer_global(self, context=None, lookupname=None):
if context.lookupname is None: if lookupname is None:
raise InferenceError() raise InferenceError()
try: try:
return _infer_stmts(self.root().getattr(context.lookupname), context) return _infer_stmts(self.root().getattr(lookupname), context)
except NotFoundError: except NotFoundError:
raise InferenceError() raise InferenceError()
nodes.Global._infer = path_wrapper(infer_global) nodes.Global._infer = path_wrapper(infer_global)
...@@ -269,12 +265,12 @@ nodes.Global._infer = path_wrapper(infer_global) ...@@ -269,12 +265,12 @@ nodes.Global._infer = path_wrapper(infer_global)
def infer_subscript(self, context=None): def infer_subscript(self, context=None):
"""infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]""" """infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]"""
value = self.value.infer(context).next() value = next(self.value.infer(context))
if value is YES: if value is YES:
yield YES yield YES
return return
index = self.slice.infer(context).next() index = next(self.slice.infer(context))
if index is YES: if index is YES:
yield YES yield YES
return return
...@@ -287,6 +283,12 @@ def infer_subscript(self, context=None): ...@@ -287,6 +283,12 @@ def infer_subscript(self, context=None):
except (IndexError, TypeError): except (IndexError, TypeError):
yield YES yield YES
return return
# Prevent inferring if the infered subscript
# is the same as the original subscripted object.
if self is assigned:
yield YES
return
for infered in assigned.infer(context): for infered in assigned.infer(context):
yield infered yield infered
else: else:
...@@ -347,11 +349,10 @@ def infer_binop(self, context=None): ...@@ -347,11 +349,10 @@ def infer_binop(self, context=None):
nodes.BinOp._infer = path_wrapper(infer_binop) nodes.BinOp._infer = path_wrapper(infer_binop)
def infer_arguments(self, context=None): def infer_arguments(self, context=None, lookupname=None):
name = context.lookupname if lookupname is None:
if name is None:
raise InferenceError() raise InferenceError()
return _arguments_infer_argname(self, name, context) return _arguments_infer_argname(self, lookupname, context)
nodes.Arguments._infer = infer_arguments nodes.Arguments._infer = infer_arguments
......
This diff is collapsed.
...@@ -18,16 +18,18 @@ ...@@ -18,16 +18,18 @@
"""This module contains some mixins for the different nodes. """This module contains some mixins for the different nodes.
""" """
from logilab.common.decorators import cachedproperty
from astroid.exceptions import (AstroidBuildingException, InferenceError, from astroid.exceptions import (AstroidBuildingException, InferenceError,
NotFoundError) NotFoundError)
class BlockRangeMixIn(object): class BlockRangeMixIn(object):
"""override block range """ """override block range """
def set_line_info(self, lastchild):
self.fromlineno = self.lineno @cachedproperty
self.tolineno = lastchild.tolineno def blockstart_tolineno(self):
self.blockstart_tolineno = self._blockstart_toline() return self.lineno
def _elsed_block_range(self, lineno, orelse, last=None): def _elsed_block_range(self, lineno, orelse, last=None):
"""handle block line numbers range for try/finally, for, if and while """handle block line numbers range for try/finally, for, if and while
...@@ -105,7 +107,7 @@ class FromImportMixIn(FilterStmtsMixin): ...@@ -105,7 +107,7 @@ class FromImportMixIn(FilterStmtsMixin):
return mymodule.import_module(modname, level=level) return mymodule.import_module(modname, level=level)
except AstroidBuildingException: except AstroidBuildingException:
raise InferenceError(modname) raise InferenceError(modname)
except SyntaxError, ex: except SyntaxError as ex:
raise InferenceError(str(ex)) raise InferenceError(str(ex))
def real_name(self, asname): def real_name(self, asname):
......
This diff is collapsed.
...@@ -20,6 +20,9 @@ ...@@ -20,6 +20,9 @@
import sys import sys
import six
from logilab.common.decorators import cachedproperty
from astroid.exceptions import NoDefault from astroid.exceptions import NoDefault
from astroid.bases import (NodeNG, Statement, Instance, InferenceContext, from astroid.bases import (NodeNG, Statement, Instance, InferenceContext,
_infer_stmts, YES, BUILTINS) _infer_stmts, YES, BUILTINS)
...@@ -39,7 +42,7 @@ def unpack_infer(stmt, context=None): ...@@ -39,7 +42,7 @@ def unpack_infer(stmt, context=None):
yield infered_elt yield infered_elt
return return
# if infered is a final node, return it and stop # if infered is a final node, return it and stop
infered = stmt.infer(context).next() infered = next(stmt.infer(context))
if infered is stmt: if infered is stmt:
yield infered yield infered
return return
...@@ -127,8 +130,7 @@ class LookupMixIn(object): ...@@ -127,8 +130,7 @@ class LookupMixIn(object):
the lookup method the lookup method
""" """
frame, stmts = self.lookup(name) frame, stmts = self.lookup(name)
context = InferenceContext() return _infer_stmts(stmts, None, frame)
return _infer_stmts(stmts, context, frame)
def _filter_stmts(self, stmts, frame, offset): def _filter_stmts(self, stmts, frame, offset):
"""filter statements to remove ignorable statements. """filter statements to remove ignorable statements.
...@@ -146,6 +148,20 @@ class LookupMixIn(object): ...@@ -146,6 +148,20 @@ class LookupMixIn(object):
myframe = self.frame().parent.frame() myframe = self.frame().parent.frame()
else: else:
myframe = self.frame() myframe = self.frame()
# If the frame of this node is the same as the statement
# of this node, then the node is part of a class or
# a function definition and the frame of this node should be the
# the upper frame, not the frame of the definition.
# For more information why this is important,
# see Pylint issue #295.
# For example, for 'b', the statement is the same
# as the frame / scope:
#
# def test(b=1):
# ...
if self.statement() is myframe and myframe.parent:
myframe = myframe.parent.frame()
if not myframe is frame or self is frame: if not myframe is frame or self is frame:
return stmts return stmts
mystmt = self.statement() mystmt = self.statement()
...@@ -289,6 +305,11 @@ class Arguments(NodeNG, AssignTypeMixin): ...@@ -289,6 +305,11 @@ class Arguments(NodeNG, AssignTypeMixin):
return name return name
return None return None
@cachedproperty
def fromlineno(self):
lineno = super(Arguments, self).fromlineno
return max(lineno, self.parent.fromlineno)
def format_args(self): def format_args(self):
"""return arguments formatted as string""" """return arguments formatted as string"""
result = [] result = []
...@@ -475,7 +496,7 @@ class Const(NodeNG, Instance): ...@@ -475,7 +496,7 @@ class Const(NodeNG, Instance):
self.value = value self.value = value
def getitem(self, index, context=None): def getitem(self, index, context=None):
if isinstance(self.value, basestring): if isinstance(self.value, six.string_types):
return Const(self.value[index]) return Const(self.value[index])
raise TypeError('%r (value=%s)' % (self, self.value)) raise TypeError('%r (value=%s)' % (self, self.value))
...@@ -483,7 +504,7 @@ class Const(NodeNG, Instance): ...@@ -483,7 +504,7 @@ class Const(NodeNG, Instance):
return False return False
def itered(self): def itered(self):
if isinstance(self.value, basestring): if isinstance(self.value, six.string_types):
return self.value return self.value
raise TypeError() raise TypeError()
...@@ -528,7 +549,7 @@ class Dict(NodeNG, Instance): ...@@ -528,7 +549,7 @@ class Dict(NodeNG, Instance):
self.items = [] self.items = []
else: else:
self.items = [(const_factory(k), const_factory(v)) self.items = [(const_factory(k), const_factory(v))
for k, v in items.iteritems()] for k, v in items.items()]
def pytype(self): def pytype(self):
return '%s.dict' % BUILTINS return '%s.dict' % BUILTINS
...@@ -583,7 +604,8 @@ class ExceptHandler(Statement, AssignTypeMixin): ...@@ -583,7 +604,8 @@ class ExceptHandler(Statement, AssignTypeMixin):
name = None name = None
body = None body = None
def _blockstart_toline(self): @cachedproperty
def blockstart_tolineno(self):
if self.name: if self.name:
return self.name.tolineno return self.name.tolineno
elif self.type: elif self.type:
...@@ -591,11 +613,6 @@ class ExceptHandler(Statement, AssignTypeMixin): ...@@ -591,11 +613,6 @@ class ExceptHandler(Statement, AssignTypeMixin):
else: else:
return self.lineno return self.lineno
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self._blockstart_toline()
def catch(self, exceptions): def catch(self, exceptions):
if self.type is None or exceptions is None: if self.type is None or exceptions is None:
return True return True
...@@ -626,7 +643,8 @@ class For(BlockRangeMixIn, AssignTypeMixin, Statement): ...@@ -626,7 +643,8 @@ class For(BlockRangeMixIn, AssignTypeMixin, Statement):
orelse = None orelse = None
optional_assign = True optional_assign = True
def _blockstart_toline(self): @cachedproperty
def blockstart_tolineno(self):
return self.iter.tolineno return self.iter.tolineno
...@@ -661,7 +679,8 @@ class If(BlockRangeMixIn, Statement): ...@@ -661,7 +679,8 @@ class If(BlockRangeMixIn, Statement):
body = None body = None
orelse = None orelse = None
def _blockstart_toline(self): @cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno return self.test.tolineno
def block_range(self, lineno): def block_range(self, lineno):
...@@ -812,9 +831,6 @@ class TryExcept(BlockRangeMixIn, Statement): ...@@ -812,9 +831,6 @@ class TryExcept(BlockRangeMixIn, Statement):
def _infer_name(self, frame, name): def _infer_name(self, frame, name):
return name return name
def _blockstart_toline(self):
return self.lineno
def block_range(self, lineno): def block_range(self, lineno):
"""handle block line numbers range for try/except statements""" """handle block line numbers range for try/except statements"""
last = None last = None
...@@ -834,9 +850,6 @@ class TryFinally(BlockRangeMixIn, Statement): ...@@ -834,9 +850,6 @@ class TryFinally(BlockRangeMixIn, Statement):
body = None body = None
finalbody = None finalbody = None
def _blockstart_toline(self):
return self.lineno
def block_range(self, lineno): def block_range(self, lineno):
"""handle block line numbers range for try/finally statements""" """handle block line numbers range for try/finally statements"""
child = self.body[0] child = self.body[0]
...@@ -880,7 +893,8 @@ class While(BlockRangeMixIn, Statement): ...@@ -880,7 +893,8 @@ class While(BlockRangeMixIn, Statement):
body = None body = None
orelse = None orelse = None
def _blockstart_toline(self): @cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno return self.test.tolineno
def block_range(self, lineno): def block_range(self, lineno):
...@@ -894,7 +908,8 @@ class With(BlockRangeMixIn, AssignTypeMixin, Statement): ...@@ -894,7 +908,8 @@ class With(BlockRangeMixIn, AssignTypeMixin, Statement):
items = None items = None
body = None body = None
def _blockstart_toline(self): @cachedproperty
def blockstart_tolineno(self):
return self.items[-1][0].tolineno return self.items[-1][0].tolineno
def get_children(self): def get_children(self):
......
...@@ -23,7 +23,7 @@ __doctype__ = "restructuredtext en" ...@@ -23,7 +23,7 @@ __doctype__ = "restructuredtext en"
from astroid.exceptions import InferenceError, NoDefault, NotFoundError from astroid.exceptions import InferenceError, NoDefault, NotFoundError
from astroid.node_classes import unpack_infer from astroid.node_classes import unpack_infer
from astroid.bases import copy_context, \ from astroid.bases import InferenceContext, \
raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES
from astroid.nodes import const_factory from astroid.nodes import const_factory
from astroid import nodes from astroid import nodes
...@@ -91,7 +91,7 @@ BIN_OP_IMPL = {'+': lambda a, b: a + b, ...@@ -91,7 +91,7 @@ BIN_OP_IMPL = {'+': lambda a, b: a + b,
'<<': lambda a, b: a << b, '<<': lambda a, b: a << b,
'>>': lambda a, b: a >> b, '>>': lambda a, b: a >> b,
} }
for key, impl in BIN_OP_IMPL.items(): for key, impl in list(BIN_OP_IMPL.items()):
BIN_OP_IMPL[key+'='] = impl BIN_OP_IMPL[key+'='] = impl
def const_infer_binary_op(self, operator, other, context): def const_infer_binary_op(self, operator, other, context):
...@@ -282,7 +282,8 @@ def _arguments_infer_argname(self, name, context): ...@@ -282,7 +282,8 @@ def _arguments_infer_argname(self, name, context):
# if there is a default value, yield it. And then yield YES to reflect # if there is a default value, yield it. And then yield YES to reflect
# we can't guess given argument value # we can't guess given argument value
try: try:
context = copy_context(context) if context is None:
context = InferenceContext()
for infered in self.default_value(name).infer(context): for infered in self.default_value(name).infer(context):
yield infered yield infered
yield YES yield YES
...@@ -294,13 +295,8 @@ def arguments_assigned_stmts(self, node, context, asspath=None): ...@@ -294,13 +295,8 @@ def arguments_assigned_stmts(self, node, context, asspath=None):
if context.callcontext: if context.callcontext:
# reset call context/name # reset call context/name
callcontext = context.callcontext callcontext = context.callcontext
context = copy_context(context) return callcontext.infer_argument(self.parent, node.name, context)
context.callcontext = None return _arguments_infer_argname(self, node.name, context)
for infered in callcontext.infer_argument(self.parent, node.name, context):
yield infered
return
for infered in _arguments_infer_argname(self, node.name, context):
yield infered
nodes.Arguments.assigned_stmts = arguments_assigned_stmts nodes.Arguments.assigned_stmts = arguments_assigned_stmts
......
...@@ -25,6 +25,7 @@ import sys ...@@ -25,6 +25,7 @@ import sys
from os.path import abspath from os.path import abspath
from inspect import (getargspec, isdatadescriptor, isfunction, ismethod, from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
ismethoddescriptor, isclass, isbuiltin, ismodule) ismethoddescriptor, isclass, isbuiltin, ismodule)
import six
from astroid.node_classes import CONST_CLS from astroid.node_classes import CONST_CLS
from astroid.nodes import (Module, Class, Const, const_factory, From, from astroid.nodes import (Module, Class, Const, const_factory, From,
...@@ -57,7 +58,10 @@ def attach_dummy_node(node, name, object=_marker): ...@@ -57,7 +58,10 @@ def attach_dummy_node(node, name, object=_marker):
enode.object = object enode.object = object
_attach_local_node(node, enode, name) _attach_local_node(node, enode, name)
EmptyNode.has_underlying_object = lambda self: self.object is not _marker def _has_underlying_object(self):
return hasattr(self, 'object') and self.object is not _marker
EmptyNode.has_underlying_object = _has_underlying_object
def attach_const_node(node, name, value): def attach_const_node(node, name, value):
"""create a Const node and register it in the locals of the given """create a Const node and register it in the locals of the given
...@@ -247,10 +251,11 @@ class InspectBuilder(object): ...@@ -247,10 +251,11 @@ class InspectBuilder(object):
attach_dummy_node(node, name) attach_dummy_node(node, name)
continue continue
if ismethod(member): if ismethod(member):
member = member.im_func member = six.get_method_function(member)
if isfunction(member): if isfunction(member):
# verify this is not an imported function # verify this is not an imported function
filename = getattr(member.func_code, 'co_filename', None) filename = getattr(six.get_function_code(member),
'co_filename', None)
if filename is None: if filename is None:
assert isinstance(member, object) assert isinstance(member, object)
object_build_methoddescriptor(node, member, name) object_build_methoddescriptor(node, member, name)
...@@ -261,8 +266,6 @@ class InspectBuilder(object): ...@@ -261,8 +266,6 @@ class InspectBuilder(object):
elif isbuiltin(member): elif isbuiltin(member):
if (not _io_discrepancy(member) and if (not _io_discrepancy(member) and
self.imported_member(node, member, name)): self.imported_member(node, member, name)):
#if obj is object:
# print 'skippp', obj, name, member
continue continue
object_build_methoddescriptor(node, member, name) object_build_methoddescriptor(node, member, name)
elif isclass(member): elif isclass(member):
...@@ -299,7 +302,7 @@ class InspectBuilder(object): ...@@ -299,7 +302,7 @@ class InspectBuilder(object):
modname = getattr(member, '__module__', None) modname = getattr(member, '__module__', None)
except: except:
# XXX use logging # XXX use logging
print 'unexpected error while building astroid from living object' print('unexpected error while building astroid from living object')
import traceback import traceback
traceback.print_exc() traceback.print_exc()
modname = None modname = None
......
This diff is collapsed.
This diff is collapsed.
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
"""this module contains some utilities to navigate in the tree or to """this module contains some utilities to navigate in the tree or to
extract information from it extract information from it
""" """
from __future__ import print_function
__docformat__ = "restructuredtext en" __docformat__ = "restructuredtext en"
...@@ -109,22 +110,22 @@ def _check_children(node): ...@@ -109,22 +110,22 @@ def _check_children(node):
for child in node.get_children(): for child in node.get_children():
ok = False ok = False
if child is None: if child is None:
print "Hm, child of %s is None" % node print("Hm, child of %s is None" % node)
continue continue
if not hasattr(child, 'parent'): if not hasattr(child, 'parent'):
print " ERROR: %s has child %s %x with no parent" % ( print(" ERROR: %s has child %s %x with no parent" % (
node, child, id(child)) node, child, id(child)))
elif not child.parent: elif not child.parent:
print " ERROR: %s has child %s %x with parent %r" % ( print(" ERROR: %s has child %s %x with parent %r" % (
node, child, id(child), child.parent) node, child, id(child), child.parent))
elif child.parent is not node: elif child.parent is not node:
print " ERROR: %s %x has child %s %x with wrong parent %s" % ( print(" ERROR: %s %x has child %s %x with wrong parent %s" % (
node, id(node), child, id(child), child.parent) node, id(node), child, id(child), child.parent))
else: else:
ok = True ok = True
if not ok: if not ok:
print "lines;", node.lineno, child.lineno print("lines;", node.lineno, child.lineno)
print "of module", node.root(), node.root().name print("of module", node.root(), node.root().name)
raise AstroidBuildingException raise AstroidBuildingException
_check_children(child) _check_children(child)
......
URL: http://www.logilab.org/project/logilab-common URL: http://www.logilab.org/project/logilab-common
Version: 0.63.1 Version: 0.63.2
License: GPL License: GPL
License File: LICENSE.txt License File: LICENSE.txt
......
...@@ -25,7 +25,7 @@ modname = 'common' ...@@ -25,7 +25,7 @@ modname = 'common'
subpackage_of = 'logilab' subpackage_of = 'logilab'
subpackage_master = True subpackage_master = True
numversion = (0, 63, 0) numversion = (0, 63, 2)
version = '.'.join([str(num) for num in numversion]) version = '.'.join([str(num) for num in numversion])
license = 'LGPL' # 2.1 or later license = 'LGPL' # 2.1 or later
...@@ -43,6 +43,8 @@ include_dirs = [join('test', 'data')] ...@@ -43,6 +43,8 @@ include_dirs = [join('test', 'data')]
install_requires = [ install_requires = [
'six >= 1.4.0', 'six >= 1.4.0',
] ]
test_require = ['pytz']
if sys.version_info < (2, 7): if sys.version_info < (2, 7):
install_requires.append('unittest2 >= 0.5.1') install_requires.append('unittest2 >= 0.5.1')
if os.name == 'nt': if os.name == 'nt':
......
...@@ -33,15 +33,17 @@ Example:: ...@@ -33,15 +33,17 @@ Example::
help_do_pionce = ("pionce", "pionce duree", _("met ton corps en veille")) help_do_pionce = ("pionce", "pionce duree", _("met ton corps en veille"))
def do_pionce(self): def do_pionce(self):
print 'nap is good' print('nap is good')
help_do_ronfle = ("ronfle", "ronfle volume", _("met les autres en veille")) help_do_ronfle = ("ronfle", "ronfle volume", _("met les autres en veille"))
def do_ronfle(self): def do_ronfle(self):
print 'fuuuuuuuuuuuu rhhhhhrhrhrrh' print('fuuuuuuuuuuuu rhhhhhrhrhrrh')
cl = BookShell() cl = BookShell()
""" """
from __future__ import print_function
__docformat__ = "restructuredtext en" __docformat__ = "restructuredtext en"
from six.moves import builtins, input from six.moves import builtins, input
...@@ -66,7 +68,7 @@ def init_readline(complete_method, histfile=None): ...@@ -66,7 +68,7 @@ def init_readline(complete_method, histfile=None):
import atexit import atexit
atexit.register(readline.write_history_file, histfile) atexit.register(readline.write_history_file, histfile)
except: except:
print 'readline is not available :-(' print('readline is not available :-(')
class Completer : class Completer :
...@@ -157,10 +159,10 @@ class CLIHelper: ...@@ -157,10 +159,10 @@ class CLIHelper:
return self.commands.keys() return self.commands.keys()
def _print_help(self, cmd, syntax, explanation): def _print_help(self, cmd, syntax, explanation):
print _('Command %s') % cmd print(_('Command %s') % cmd)
print _('Syntax: %s') % syntax print(_('Syntax: %s') % syntax)
print '\t', explanation print('\t', explanation)
print print()
# predefined commands ##################################################### # predefined commands #####################################################
...@@ -170,20 +172,20 @@ class CLIHelper: ...@@ -170,20 +172,20 @@ class CLIHelper:
if command in self._command_help: if command in self._command_help:
self._print_help(*self._command_help[command]) self._print_help(*self._command_help[command])
elif command is None or command not in self._topics: elif command is None or command not in self._topics:
print _("Use help <topic> or help <command>.") print(_("Use help <topic> or help <command>."))
print _("Available topics are:") print(_("Available topics are:"))
topics = sorted(self._topics.keys()) topics = sorted(self._topics.keys())
for topic in topics: for topic in topics:
print '\t', topic print('\t', topic)
print print()
print _("Available commands are:") print(_("Available commands are:"))
commands = self.commands.keys() commands = self.commands.keys()
commands.sort() commands.sort()
for command in commands: for command in commands:
print '\t', command[len(self.CMD_PREFIX):] print('\t', command[len(self.CMD_PREFIX):])
else: else:
print _('Available commands about %s:') % command print(_('Available commands about %s:') % command)
print print
for command_help_method in self._topics[command]: for command_help_method in self._topics[command]:
try: try:
...@@ -194,8 +196,8 @@ class CLIHelper: ...@@ -194,8 +196,8 @@ class CLIHelper:
except: except:
import traceback import traceback
traceback.print_exc() traceback.print_exc()
print 'ERROR in help method %s'% ( print('ERROR in help method %s'% (
command_help_method.__name__) command_help_method.__name__))
help_do_help = ("help", "help [topic|command]", help_do_help = ("help", "help [topic|command]",
_("print help message for the given topic/command or \ _("print help message for the given topic/command or \
......
...@@ -409,21 +409,20 @@ def rest_format_section(stream, section, options, encoding=None, doc=None): ...@@ -409,21 +409,20 @@ def rest_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using as ReST formatted output""" """format an options section using as ReST formatted output"""
encoding = _get_encoding(encoding, stream) encoding = _get_encoding(encoding, stream)
if section: if section:
print >> stream, '%s\n%s' % (section, "'"*len(section)) print('%s\n%s' % (section, "'"*len(section)), file=stream)
if doc: if doc:
print >> stream, _encode(normalize_text(doc, line_len=79, indent=''), print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream)
encoding) print(file=stream)
print >> stream
for optname, optdict, value in options: for optname, optdict, value in options:
help = optdict.get('help') help = optdict.get('help')
print >> stream, ':%s:' % optname print(':%s:' % optname, file=stream)
if help: if help:
help = normalize_text(help, line_len=79, indent=' ') help = normalize_text(help, line_len=79, indent=' ')
print >> stream, _encode(help, encoding) print(_encode(help, encoding), file=stream)
if value: if value:
value = _encode(format_option_value(optdict, value), encoding) value = _encode(format_option_value(optdict, value), encoding)
print >> stream, '' print(file=stream)
print >> stream, ' Default: ``%s``' % value.replace("`` ", "```` ``") print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
# Options Manager ############################################################## # Options Manager ##############################################################
......
...@@ -51,7 +51,7 @@ def setugid(user): ...@@ -51,7 +51,7 @@ def setugid(user):
os.environ['HOME'] = passwd.pw_dir os.environ['HOME'] = passwd.pw_dir
def daemonize(pidfile=None, uid=None, umask=077): def daemonize(pidfile=None, uid=None, umask=0o77):
"""daemonize a Unix process. Set paranoid umask by default. """daemonize a Unix process. Set paranoid umask by default.
Return 1 in the original process, 2 in the first fork, and None for the Return 1 in the original process, 2 in the first fork, and None for the
...@@ -71,9 +71,6 @@ def daemonize(pidfile=None, uid=None, umask=077): ...@@ -71,9 +71,6 @@ def daemonize(pidfile=None, uid=None, umask=077):
return 2 return 2
# move to the root to avoit mount pb # move to the root to avoit mount pb
os.chdir('/') os.chdir('/')
# set umask if specified
if umask is not None:
os.umask(umask)
# redirect standard descriptors # redirect standard descriptors
null = os.open('/dev/null', os.O_RDWR) null = os.open('/dev/null', os.O_RDWR)
for i in range(3): for i in range(3):
...@@ -95,7 +92,9 @@ def daemonize(pidfile=None, uid=None, umask=077): ...@@ -95,7 +92,9 @@ def daemonize(pidfile=None, uid=None, umask=077):
f = file(pidfile, 'w') f = file(pidfile, 'w')
f.write(str(os.getpid())) f.write(str(os.getpid()))
f.close() f.close()
os.chmod(pidfile, 0644) # set umask if specified
if umask is not None:
os.umask(umask)
# change process uid # change process uid
if uid: if uid:
setugid(uid) setugid(uid)
......
...@@ -314,7 +314,7 @@ def ustrftime(somedate, fmt='%Y-%m-%d'): ...@@ -314,7 +314,7 @@ def ustrftime(somedate, fmt='%Y-%m-%d'):
def utcdatetime(dt): def utcdatetime(dt):
if dt.tzinfo is None: if dt.tzinfo is None:
return dt return dt
return datetime(*dt.utctimetuple()[:7]) return (dt.replace(tzinfo=None) - dt.utcoffset())
def utctime(dt): def utctime(dt):
if dt.tzinfo is None: if dt.tzinfo is None:
......
...@@ -30,6 +30,7 @@ Usage: ...@@ -30,6 +30,7 @@ Usage:
http://www.physics.ox.ac.uk/users/santoso/Software.Repository.html http://www.physics.ox.ac.uk/users/santoso/Software.Repository.html
page says code is "available as is without any warranty or support". page says code is "available as is without any warranty or support".
""" """
from __future__ import print_function
import struct import struct
import os, os.path import os, os.path
...@@ -79,7 +80,7 @@ class Dbase: ...@@ -79,7 +80,7 @@ class Dbase:
def open(self, db_name): def open(self, db_name):
filesize = os.path.getsize(db_name) filesize = os.path.getsize(db_name)
if filesize <= 68: if filesize <= 68:
raise IOError, 'The file is not large enough to be a dbf file' raise IOError('The file is not large enough to be a dbf file')
self.fdb = open(db_name, 'rb') self.fdb = open(db_name, 'rb')
...@@ -152,7 +153,7 @@ class Dbase: ...@@ -152,7 +153,7 @@ class Dbase:
This function accept record number from 0 to N-1 This function accept record number from 0 to N-1
""" """
if rec_no < 0 or rec_no > self.num_records: if rec_no < 0 or rec_no > self.num_records:
raise Exception, 'Unable to extract data outside the range' raise Exception('Unable to extract data outside the range')
offset = self.header['Record Size'] * rec_no offset = self.header['Record Size'] * rec_no
data = self.db_data[offset:offset+self.row_len] data = self.db_data[offset:offset+self.row_len]
...@@ -227,4 +228,4 @@ def readDbf(filename): ...@@ -227,4 +228,4 @@ def readDbf(filename):
if __name__=='__main__': if __name__=='__main__':
rec = readDbf('dbf/sptable.dbf') rec = readDbf('dbf/sptable.dbf')
for line in rec: for line in rec:
print '%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip()) print('%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip()))
...@@ -125,11 +125,12 @@ class DeprecationManager(object): ...@@ -125,11 +125,12 @@ class DeprecationManager(object):
return self.class_deprecated(version)(old_name, (new_class,), clsdict) return self.class_deprecated(version)(old_name, (new_class,), clsdict)
except (NameError, TypeError): except (NameError, TypeError):
# old-style class # old-style class
warn = self.warn
class DeprecatedClass(new_class): class DeprecatedClass(new_class):
"""FIXME: There might be a better way to handle old/new-style class """FIXME: There might be a better way to handle old/new-style class
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.warn(version, message, stacklevel=3) warn(version, message, stacklevel=3)
new_class.__init__(self, *args, **kwargs) new_class.__init__(self, *args, **kwargs)
return DeprecatedClass return DeprecatedClass
......
...@@ -29,6 +29,8 @@ Example: ...@@ -29,6 +29,8 @@ Example:
With mymod.build that defines two functions run and add_options With mymod.build that defines two functions run and add_options
""" """
from __future__ import print_function
__docformat__ = "restructuredtext en" __docformat__ = "restructuredtext en"
from warnings import warn from warnings import warn
...@@ -55,9 +57,9 @@ class OptionParser(optparse.OptionParser): ...@@ -55,9 +57,9 @@ class OptionParser(optparse.OptionParser):
def print_main_help(self): def print_main_help(self):
optparse.OptionParser.print_help(self) optparse.OptionParser.print_help(self)
print '\ncommands:' print('\ncommands:')
for cmdname, (_, help) in self._commands.items(): for cmdname, (_, help) in self._commands.items():
print '% 10s - %s' % (cmdname, help) print('% 10s - %s' % (cmdname, help))
def parse_command(self, args): def parse_command(self, args):
if len(args) == 0: if len(args) == 0:
...@@ -78,7 +80,7 @@ class OptionParser(optparse.OptionParser): ...@@ -78,7 +80,7 @@ class OptionParser(optparse.OptionParser):
# optparse inserts self.description between usage and options help # optparse inserts self.description between usage and options help
self.description = help self.description = help
if isinstance(mod_or_f, str): if isinstance(mod_or_f, str):
exec 'from %s import run, add_options' % mod_or_f exec('from %s import run, add_options' % mod_or_f)
else: else:
run, add_options = mod_or_f run, add_options = mod_or_f
add_options(self) add_options(self)
......
...@@ -119,12 +119,14 @@ from time import time, clock ...@@ -119,12 +119,14 @@ from time import time, clock
import warnings import warnings
import types import types
from inspect import isgeneratorfunction, isclass from inspect import isgeneratorfunction, isclass
from contextlib import contextmanager
from logilab.common.fileutils import abspath_listdir from logilab.common.fileutils import abspath_listdir
from logilab.common import textutils from logilab.common import textutils
from logilab.common import testlib, STD_BLACKLIST from logilab.common import testlib, STD_BLACKLIST
# use the same unittest module as testlib # use the same unittest module as testlib
from logilab.common.testlib import unittest, start_interactive_mode from logilab.common.testlib import unittest, start_interactive_mode
from logilab.common.deprecation import deprecated
import doctest import doctest
import unittest as unittest_legacy import unittest as unittest_legacy
...@@ -145,28 +147,41 @@ except ImportError: ...@@ -145,28 +147,41 @@ except ImportError:
CONF_FILE = 'pytestconf.py' CONF_FILE = 'pytestconf.py'
## coverage hacks, do not read this, do not read this, do not read this ## coverage pausing tools
@contextmanager
def replace_trace(trace=None):
"""A context manager that temporary replaces the trace function"""
oldtrace = sys.gettrace()
sys.settrace(trace)
try:
yield
finally:
# specific hack to work around a bug in pycoverage, see
# https://bitbucket.org/ned/coveragepy/issue/123
if (oldtrace is not None and not callable(oldtrace) and
hasattr(oldtrace, 'pytrace')):
oldtrace = oldtrace.pytrace
sys.settrace(oldtrace)
def pause_trace():
"""A context manager that temporary pauses any tracing"""
return replace_trace()
# hey, but this is an aspect, right ?!!!
class TraceController(object): class TraceController(object):
nesting = 0 ctx_stack = []
@classmethod
@deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
def pause_tracing(cls): def pause_tracing(cls):
if not cls.nesting: cls.ctx_stack.append(pause_trace())
cls.tracefunc = staticmethod(getattr(sys, '__settrace__', sys.settrace)) cls.ctx_stack[-1].__enter__()
cls.oldtracer = getattr(sys, '__tracer__', None)
sys.__notrace__ = True
cls.tracefunc(None)
cls.nesting += 1
pause_tracing = classmethod(pause_tracing)
@classmethod
@deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
def resume_tracing(cls): def resume_tracing(cls):
cls.nesting -= 1 cls.ctx_stack.pop().__exit__(None, None, None)
assert cls.nesting >= 0
if not cls.nesting:
cls.tracefunc(cls.oldtracer)
delattr(sys, '__notrace__')
resume_tracing = classmethod(resume_tracing)
pause_tracing = TraceController.pause_tracing pause_tracing = TraceController.pause_tracing
...@@ -174,20 +189,18 @@ resume_tracing = TraceController.resume_tracing ...@@ -174,20 +189,18 @@ resume_tracing = TraceController.resume_tracing
def nocoverage(func): def nocoverage(func):
"""Function decorator that pauses tracing functions"""
if hasattr(func, 'uncovered'): if hasattr(func, 'uncovered'):
return func return func
func.uncovered = True func.uncovered = True
def not_covered(*args, **kwargs): def not_covered(*args, **kwargs):
pause_tracing() with pause_trace():
try:
return func(*args, **kwargs) return func(*args, **kwargs)
finally:
resume_tracing()
not_covered.uncovered = True not_covered.uncovered = True
return not_covered return not_covered
## end of coverage pausing tools
## end of coverage hacks
TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$") TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$")
...@@ -1082,8 +1095,14 @@ class NonStrictTestLoader(unittest.TestLoader): ...@@ -1082,8 +1095,14 @@ class NonStrictTestLoader(unittest.TestLoader):
testCaseClass) testCaseClass)
return [testname for testname in testnames if not is_skipped(testname)] return [testname for testname in testnames if not is_skipped(testname)]
# The 2 functions below are modified versions of the TestSuite.run method
# that is provided with unittest2 for python 2.6, in unittest2/suite.py
# It is used to monkeypatch the original implementation to support
# extra runcondition and options arguments (see in testlib.py)
def _ts_run(self, result, runcondition=None, options=None): def _ts_run(self, result, runcondition=None, options=None):
self._wrapped_run(result,runcondition=runcondition, options=options) self._wrapped_run(result, runcondition=runcondition, options=options)
self._tearDownPreviousClass(None, result) self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result) self._handleModuleTearDown(result)
return result return result
...@@ -1097,10 +1116,17 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None): ...@@ -1097,10 +1116,17 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None):
self._handleModuleFixture(test, result) self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result) self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__ result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)): getattr(result, '_moduleSetUpFailed', False)):
continue continue
# --- modifications to deal with _wrapped_run ---
# original code is:
#
# if not debug:
# test(result)
# else:
# test.debug()
if hasattr(test, '_wrapped_run'): if hasattr(test, '_wrapped_run'):
try: try:
test._wrapped_run(result, debug, runcondition=runcondition, options=options) test._wrapped_run(result, debug, runcondition=runcondition, options=options)
...@@ -1113,6 +1139,25 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None): ...@@ -1113,6 +1139,25 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None):
test(result) test(result)
else: else:
test.debug() test.debug()
# --- end of modifications to deal with _wrapped_run ---
return result
if sys.version_info >= (2, 7):
# The function below implements a modified version of the
# TestSuite.run method that is provided with python 2.7, in
# unittest/suite.py
def _ts_run(self, result, debug=False, runcondition=None, options=None):
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
self._wrapped_run(result, debug, runcondition, options)
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
def enable_dbc(*args): def enable_dbc(*args):
......
...@@ -42,13 +42,13 @@ def layout_title(layout): ...@@ -42,13 +42,13 @@ def layout_title(layout):
""" """
for child in layout.children: for child in layout.children:
if isinstance(child, Title): if isinstance(child, Title):
return ' '.join([node.data for node in get_nodes(child, Text)]) return u' '.join([node.data for node in get_nodes(child, Text)])
def build_summary(layout, level=1): def build_summary(layout, level=1):
"""make a summary for the report, including X level""" """make a summary for the report, including X level"""
assert level > 0 assert level > 0
level -= 1 level -= 1
summary = List(klass='summary') summary = List(klass=u'summary')
for child in layout.children: for child in layout.children:
if not isinstance(child, Section): if not isinstance(child, Section):
continue continue
...@@ -57,7 +57,7 @@ def build_summary(layout, level=1): ...@@ -57,7 +57,7 @@ def build_summary(layout, level=1):
continue continue
if not child.id: if not child.id:
child.id = label.replace(' ', '-') child.id = label.replace(' ', '-')
node = Link('#'+child.id, label=label or child.id) node = Link(u'#'+child.id, label=label or child.id)
# FIXME: Three following lines produce not very compliant # FIXME: Three following lines produce not very compliant
# docbook: there are some useless <para><para>. They might be # docbook: there are some useless <para><para>. They might be
# replaced by the three commented lines but this then produces # replaced by the three commented lines but this then produces
...@@ -99,7 +99,7 @@ class BaseWriter(object): ...@@ -99,7 +99,7 @@ class BaseWriter(object):
for child in getattr(layout, 'children', ()): for child in getattr(layout, 'children', ()):
child.accept(self) child.accept(self)
def writeln(self, string=''): def writeln(self, string=u''):
"""write a line in the output buffer""" """write a line in the output buffer"""
self.write(string + linesep) self.write(string + linesep)
...@@ -132,7 +132,7 @@ class BaseWriter(object): ...@@ -132,7 +132,7 @@ class BaseWriter(object):
result[-1].append(cell) result[-1].append(cell)
# fill missing cells # fill missing cells
while len(result[-1]) < cols: while len(result[-1]) < cols:
result[-1].append('') result[-1].append(u'')
return result return result
def compute_content(self, layout): def compute_content(self, layout):
...@@ -147,7 +147,7 @@ class BaseWriter(object): ...@@ -147,7 +147,7 @@ class BaseWriter(object):
stream.write(data) stream.write(data)
except UnicodeEncodeError: except UnicodeEncodeError:
stream.write(data.encode(self.encoding)) stream.write(data.encode(self.encoding))
def writeln(data=''): def writeln(data=u''):
try: try:
stream.write(data+linesep) stream.write(data+linesep)
except UnicodeEncodeError: except UnicodeEncodeError:
......
...@@ -27,8 +27,8 @@ from logilab.common.textutils import linesep ...@@ -27,8 +27,8 @@ from logilab.common.textutils import linesep
from logilab.common.ureports import BaseWriter from logilab.common.ureports import BaseWriter
TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^'] TITLE_UNDERLINES = [u'', u'=', u'-', u'`', u'.', u'~', u'^']
BULLETS = ['*', '-'] BULLETS = [u'*', u'-']
class TextWriter(BaseWriter): class TextWriter(BaseWriter):
"""format layouts as text """format layouts as text
...@@ -48,13 +48,13 @@ class TextWriter(BaseWriter): ...@@ -48,13 +48,13 @@ class TextWriter(BaseWriter):
if self.pending_urls: if self.pending_urls:
self.writeln() self.writeln()
for label, url in self.pending_urls: for label, url in self.pending_urls:
self.writeln('.. _`%s`: %s' % (label, url)) self.writeln(u'.. _`%s`: %s' % (label, url))
self.pending_urls = [] self.pending_urls = []
self.section -= 1 self.section -= 1
self.writeln() self.writeln()
def visit_title(self, layout): def visit_title(self, layout):
title = ''.join(list(self.compute_content(layout))) title = u''.join(list(self.compute_content(layout)))
self.writeln(title) self.writeln(title)
try: try:
self.writeln(TITLE_UNDERLINES[self.section] * len(title)) self.writeln(TITLE_UNDERLINES[self.section] * len(title))
...@@ -88,19 +88,19 @@ class TextWriter(BaseWriter): ...@@ -88,19 +88,19 @@ class TextWriter(BaseWriter):
def default_table(self, layout, table_content, cols_width): def default_table(self, layout, table_content, cols_width):
"""format a table""" """format a table"""
cols_width = [size+1 for size in cols_width] cols_width = [size+1 for size in cols_width]
format_strings = ' '.join(['%%-%ss'] * len(cols_width)) format_strings = u' '.join([u'%%-%ss'] * len(cols_width))
format_strings = format_strings % tuple(cols_width) format_strings = format_strings % tuple(cols_width)
format_strings = format_strings.split(' ') format_strings = format_strings.split(' ')
table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n' table_linesep = u'\n+' + u'+'.join([u'-'*w for w in cols_width]) + u'+\n'
headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n' headsep = u'\n+' + u'+'.join([u'='*w for w in cols_width]) + u'+\n'
# FIXME: layout.cheaders # FIXME: layout.cheaders
self.write(table_linesep) self.write(table_linesep)
for i in range(len(table_content)): for i in range(len(table_content)):
self.write('|') self.write(u'|')
line = table_content[i] line = table_content[i]
for j in range(len(line)): for j in range(len(line)):
self.write(format_strings[j] % line[j]) self.write(format_strings[j] % line[j])
self.write('|') self.write(u'|')
if i == 0 and layout.rheaders: if i == 0 and layout.rheaders:
self.write(headsep) self.write(headsep)
else: else:
...@@ -109,7 +109,7 @@ class TextWriter(BaseWriter): ...@@ -109,7 +109,7 @@ class TextWriter(BaseWriter):
def field_table(self, layout, table_content, cols_width): def field_table(self, layout, table_content, cols_width):
"""special case for field table""" """special case for field table"""
assert layout.cols == 2 assert layout.cols == 2
format_string = '%s%%-%ss: %%s' % (linesep, cols_width[0]) format_string = u'%s%%-%ss: %%s' % (linesep, cols_width[0])
for field, value in table_content: for field, value in table_content:
self.write(format_string % (field, value)) self.write(format_string % (field, value))
...@@ -120,14 +120,14 @@ class TextWriter(BaseWriter): ...@@ -120,14 +120,14 @@ class TextWriter(BaseWriter):
indent = ' ' * self.list_level indent = ' ' * self.list_level
self.list_level += 1 self.list_level += 1
for child in layout.children: for child in layout.children:
self.write('%s%s%s ' % (linesep, indent, bullet)) self.write(u'%s%s%s ' % (linesep, indent, bullet))
child.accept(self) child.accept(self)
self.list_level -= 1 self.list_level -= 1
def visit_link(self, layout): def visit_link(self, layout):
"""add a hyperlink""" """add a hyperlink"""
if layout.label != layout.url: if layout.label != layout.url:
self.write('`%s`_' % layout.label) self.write(u'`%s`_' % layout.label)
self.pending_urls.append( (layout.label, layout.url) ) self.pending_urls.append( (layout.label, layout.url) )
else: else:
self.write(layout.url) self.write(layout.url)
...@@ -135,11 +135,11 @@ class TextWriter(BaseWriter): ...@@ -135,11 +135,11 @@ class TextWriter(BaseWriter):
def visit_verbatimtext(self, layout): def visit_verbatimtext(self, layout):
"""display a verbatim layout as text (so difficult ;) """display a verbatim layout as text (so difficult ;)
""" """
self.writeln('::\n') self.writeln(u'::\n')
for line in layout.data.splitlines(): for line in layout.data.splitlines():
self.writeln(' ' + line) self.writeln(u' ' + line)
self.writeln() self.writeln()
def visit_text(self, layout): def visit_text(self, layout):
"""add some text""" """add some text"""
self.write(layout.data) self.write(u'%s' % layout.data)
from __future__ import print_function
import logging import logging
import urllib2 import urllib2
...@@ -84,4 +86,4 @@ if __name__ == '__main__': ...@@ -84,4 +86,4 @@ if __name__ == '__main__':
# test with url sys.argv[1] # test with url sys.argv[1]
h = HTTPGssapiAuthHandler() h = HTTPGssapiAuthHandler()
response = urllib2.build_opener(h, ch).open(sys.argv[1]) response = urllib2.build_opener(h, ch).open(sys.argv[1])
print '\nresponse: %s\n--------------\n' % response.code, response.info() print('\nresponse: %s\n--------------\n' % response.code, response.info())
URL: http://www.pylint.org/ URL: http://www.pylint.org/
Version: 1.3.1 Version: 1.4.0
License: GPL License: GPL
License File: LICENSE.txt License File: LICENSE.txt
...@@ -7,4 +7,4 @@ Description: ...@@ -7,4 +7,4 @@ Description:
This directory contains the pylint module. This directory contains the pylint module.
Local Modifications: Local Modifications:
None - applied upstream fix https://bitbucket.org/logilab/pylint/commits/5df347467ee0
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys import sys
from .__pkginfo__ import version as __version__
def run_pylint(): def run_pylint():
"""run pylint""" """run pylint"""
from pylint.lint import Run from pylint.lint import Run
......
...@@ -15,18 +15,14 @@ ...@@ -15,18 +15,14 @@
# this program; if not, write to the Free Software Foundation, Inc., # this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""pylint packaging information""" """pylint packaging information"""
import sys from __future__ import absolute_import
modname = distname = 'pylint' modname = distname = 'pylint'
numversion = (1, 3, 1) numversion = (1, 4, 0)
version = '.'.join([str(num) for num in numversion]) version = '.'.join([str(num) for num in numversion])
if sys.version_info < (2, 6): install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.3.2', 'six']
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.2.1',
'StringFormat']
else:
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.2.1']
license = 'GPL' license = 'GPL'
description = "python code static checker" description = "python code static checker"
......
...@@ -30,7 +30,9 @@ Base id of standard checkers (used in msg and report ids): ...@@ -30,7 +30,9 @@ Base id of standard checkers (used in msg and report ids):
12: logging 12: logging
13: string_format 13: string_format
14: string_constant 14: string_constant
15-50: not yet used: reserved for future internal checkers. 15: stdlib
16: python3
17-50: not yet used: reserved for future internal checkers.
51-99: perhaps used: reserved for external checkers 51-99: perhaps used: reserved for external checkers
The raw_metrics checker has no number associated since it doesn't emit any The raw_metrics checker has no number associated since it doesn't emit any
...@@ -46,6 +48,8 @@ from logilab.common.configuration import OptionsProviderMixIn ...@@ -46,6 +48,8 @@ from logilab.common.configuration import OptionsProviderMixIn
from pylint.reporters import diff_string from pylint.reporters import diff_string
from pylint.utils import register_plugins from pylint.utils import register_plugins
from pylint.interfaces import UNDEFINED
def table_lines_from_stats(stats, old_stats, columns): def table_lines_from_stats(stats, old_stats, columns):
"""get values listed in <columns> from <stats> and <old_stats>, """get values listed in <columns> from <stats> and <old_stats>,
...@@ -55,7 +59,7 @@ def table_lines_from_stats(stats, old_stats, columns): ...@@ -55,7 +59,7 @@ def table_lines_from_stats(stats, old_stats, columns):
lines = [] lines = []
for m_type in columns: for m_type in columns:
new = stats[m_type] new = stats[m_type]
format = str format = str # pylint: disable=redefined-builtin
if isinstance(new, float): if isinstance(new, float):
format = lambda num: '%.3f' % num format = lambda num: '%.3f' % num
old = old_stats.get(m_type) old = old_stats.get(m_type)
...@@ -80,6 +84,8 @@ class BaseChecker(OptionsProviderMixIn): ...@@ -80,6 +84,8 @@ class BaseChecker(OptionsProviderMixIn):
msgs = {} msgs = {}
# reports issued by this checker # reports issued by this checker
reports = () reports = ()
# mark this checker as enabled or not.
enabled = True
def __init__(self, linter=None): def __init__(self, linter=None):
"""checker instances should have the linter as argument """checker instances should have the linter as argument
...@@ -90,9 +96,9 @@ class BaseChecker(OptionsProviderMixIn): ...@@ -90,9 +96,9 @@ class BaseChecker(OptionsProviderMixIn):
OptionsProviderMixIn.__init__(self) OptionsProviderMixIn.__init__(self)
self.linter = linter self.linter = linter
def add_message(self, msg_id, line=None, node=None, args=None): def add_message(self, msg_id, line=None, node=None, args=None, confidence=UNDEFINED):
"""add a message of a given type""" """add a message of a given type"""
self.linter.add_message(msg_id, line, node, args) self.linter.add_message(msg_id, line, node, args, confidence)
# dummy methods implementing the IChecker interface # dummy methods implementing the IChecker interface
...@@ -103,31 +109,6 @@ class BaseChecker(OptionsProviderMixIn): ...@@ -103,31 +109,6 @@ class BaseChecker(OptionsProviderMixIn):
"""called after visiting project (i.e set of modules)""" """called after visiting project (i.e set of modules)"""
class BaseRawChecker(BaseChecker):
"""base class for raw checkers"""
def process_module(self, node):
"""process a module
the module's content is accessible via the stream object
stream must implement the readline method
"""
warnings.warn("Modules that need access to the tokens should "
"use the ITokenChecker interface.",
DeprecationWarning)
stream = node.file_stream
stream.seek(0) # XXX may be removed with astroid > 0.23
if sys.version_info <= (3, 0):
self.process_tokens(tokenize.generate_tokens(stream.readline))
else:
self.process_tokens(tokenize.tokenize(stream.readline))
def process_tokens(self, tokens):
"""should be overridden by subclasses"""
raise NotImplementedError()
class BaseTokenChecker(BaseChecker): class BaseTokenChecker(BaseChecker):
"""Base class for checkers that want to have access to the token stream.""" """Base class for checkers that want to have access to the token stream."""
......
This diff is collapsed.
This diff is collapsed.
...@@ -15,14 +15,15 @@ ...@@ -15,14 +15,15 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""check for signs of poor design""" """check for signs of poor design"""
import re
from collections import defaultdict
from astroid import Function, If, InferenceError from astroid import Function, If, InferenceError
from pylint.interfaces import IAstroidChecker from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages from pylint.checkers.utils import check_messages
import re
# regexp for ignored argument name # regexp for ignored argument name
IGNORED_ARGUMENT_NAMES = re.compile('_.*') IGNORED_ARGUMENT_NAMES = re.compile('_.*')
...@@ -174,7 +175,7 @@ class MisdesignChecker(BaseChecker): ...@@ -174,7 +175,7 @@ class MisdesignChecker(BaseChecker):
"""initialize visit variables""" """initialize visit variables"""
self.stats = self.linter.add_stats() self.stats = self.linter.add_stats()
self._returns = [] self._returns = []
self._branches = [] self._branches = defaultdict(int)
self._used_abstracts = {} self._used_abstracts = {}
self._used_ifaces = {} self._used_ifaces = {}
self._abstracts = [] self._abstracts = []
...@@ -200,7 +201,6 @@ class MisdesignChecker(BaseChecker): ...@@ -200,7 +201,6 @@ class MisdesignChecker(BaseChecker):
def visit_class(self, node): def visit_class(self, node):
"""check size of inheritance hierarchy and number of instance attributes """check size of inheritance hierarchy and number of instance attributes
""" """
self._inc_branch()
# Is the total inheritance hierarchy is 7 or less? # Is the total inheritance hierarchy is 7 or less?
nb_parents = len(list(node.ancestors())) nb_parents = len(list(node.ancestors()))
if nb_parents > self.config.max_parents: if nb_parents > self.config.max_parents:
...@@ -241,12 +241,9 @@ class MisdesignChecker(BaseChecker): ...@@ -241,12 +241,9 @@ class MisdesignChecker(BaseChecker):
def leave_class(self, node): def leave_class(self, node):
"""check number of public methods""" """check number of public methods"""
nb_public_methods = 0 nb_public_methods = 0
special_methods = set() for method in node.mymethods():
for method in node.methods():
if not method.name.startswith('_'): if not method.name.startswith('_'):
nb_public_methods += 1 nb_public_methods += 1
if method.name.startswith("__"):
special_methods.add(method.name)
# Does the class contain less than 20 public methods ? # Does the class contain less than 20 public methods ?
if nb_public_methods > self.config.max_public_methods: if nb_public_methods > self.config.max_public_methods:
self.add_message('too-many-public-methods', node=node, self.add_message('too-many-public-methods', node=node,
...@@ -257,20 +254,19 @@ class MisdesignChecker(BaseChecker): ...@@ -257,20 +254,19 @@ class MisdesignChecker(BaseChecker):
return return
# Does the class contain more than 5 public methods ? # Does the class contain more than 5 public methods ?
if nb_public_methods < self.config.min_public_methods: if nb_public_methods < self.config.min_public_methods:
self.add_message('R0903', node=node, self.add_message('too-few-public-methods', node=node,
args=(nb_public_methods, args=(nb_public_methods,
self.config.min_public_methods)) self.config.min_public_methods))
@check_messages('too-many-return-statements', 'too-many-branches', @check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals', 'too-many-statements') 'too-many-arguments', 'too-many-locals',
'too-many-statements')
def visit_function(self, node): def visit_function(self, node):
"""check function name, docstring, arguments, redefinition, """check function name, docstring, arguments, redefinition,
variable names, max locals variable names, max locals
""" """
self._inc_branch()
# init branch and returns counters # init branch and returns counters
self._returns.append(0) self._returns.append(0)
self._branches.append(0)
# check number of arguments # check number of arguments
args = node.args.args args = node.args.args
if args is not None: if args is not None:
...@@ -291,7 +287,9 @@ class MisdesignChecker(BaseChecker): ...@@ -291,7 +287,9 @@ class MisdesignChecker(BaseChecker):
# init statements counter # init statements counter
self._stmts = 1 self._stmts = 1
@check_messages('too-many-return-statements', 'too-many-branches', 'too-many-arguments', 'too-many-locals', 'too-many-statements') @check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals',
'too-many-statements')
def leave_function(self, node): def leave_function(self, node):
"""most of the work is done here on close: """most of the work is done here on close:
checks for max returns, branch, return in __init__ checks for max returns, branch, return in __init__
...@@ -300,7 +298,7 @@ class MisdesignChecker(BaseChecker): ...@@ -300,7 +298,7 @@ class MisdesignChecker(BaseChecker):
if returns > self.config.max_returns: if returns > self.config.max_returns:
self.add_message('too-many-return-statements', node=node, self.add_message('too-many-return-statements', node=node,
args=(returns, self.config.max_returns)) args=(returns, self.config.max_returns))
branches = self._branches.pop() branches = self._branches[node]
if branches > self.config.max_branches: if branches > self.config.max_branches:
self.add_message('too-many-branches', node=node, self.add_message('too-many-branches', node=node,
args=(branches, self.config.max_branches)) args=(branches, self.config.max_branches))
...@@ -327,12 +325,12 @@ class MisdesignChecker(BaseChecker): ...@@ -327,12 +325,12 @@ class MisdesignChecker(BaseChecker):
branches = len(node.handlers) branches = len(node.handlers)
if node.orelse: if node.orelse:
branches += 1 branches += 1
self._inc_branch(branches) self._inc_branch(node, branches)
self._stmts += branches self._stmts += branches
def visit_tryfinally(self, _): def visit_tryfinally(self, node):
"""increments the branches counter""" """increments the branches counter"""
self._inc_branch(2) self._inc_branch(node, 2)
self._stmts += 2 self._stmts += 2
def visit_if(self, node): def visit_if(self, node):
...@@ -342,7 +340,7 @@ class MisdesignChecker(BaseChecker): ...@@ -342,7 +340,7 @@ class MisdesignChecker(BaseChecker):
if node.orelse and (len(node.orelse) > 1 or if node.orelse and (len(node.orelse) > 1 or
not isinstance(node.orelse[0], If)): not isinstance(node.orelse[0], If)):
branches += 1 branches += 1
self._inc_branch(branches) self._inc_branch(node, branches)
self._stmts += branches self._stmts += branches
def visit_while(self, node): def visit_while(self, node):
...@@ -350,15 +348,13 @@ class MisdesignChecker(BaseChecker): ...@@ -350,15 +348,13 @@ class MisdesignChecker(BaseChecker):
branches = 1 branches = 1
if node.orelse: if node.orelse:
branches += 1 branches += 1
self._inc_branch(branches) self._inc_branch(node, branches)
visit_for = visit_while visit_for = visit_while
def _inc_branch(self, branchesnum=1): def _inc_branch(self, node, branchesnum=1):
"""increments the branches counter""" """increments the branches counter"""
branches = self._branches self._branches[node.scope()] += branchesnum
for i in xrange(len(branches)):
branches[i] += branchesnum
# FIXME: make a nice report... # FIXME: make a nice report...
......
This diff is collapsed.
This diff is collapsed.
...@@ -16,6 +16,10 @@ ...@@ -16,6 +16,10 @@
"""imports checkers for Python code""" """imports checkers for Python code"""
import sys import sys
from collections import defaultdict
import six
from six.moves import map # pylint: disable=redefined-builtin
from logilab.common.graph import get_cycles, DotBackend from logilab.common.graph import get_cycles, DotBackend
from logilab.common.ureports import VerbatimText, Paragraph from logilab.common.ureports import VerbatimText, Paragraph
...@@ -27,8 +31,16 @@ from astroid.modutils import get_module_part, is_standard_module ...@@ -27,8 +31,16 @@ from astroid.modutils import get_module_part, is_standard_module
from pylint.interfaces import IAstroidChecker from pylint.interfaces import IAstroidChecker
from pylint.utils import EmptyReport from pylint.utils import EmptyReport
from pylint.checkers import BaseChecker from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages from pylint.checkers.utils import check_messages, is_import_error
def _except_import_error(node):
"""
Check if the try-except node has an ImportError handler.
Return True if an ImportError handler was infered, False otherwise.
"""
if not isinstance(node, astroid.TryExcept):
return
return any(map(is_import_error, node.handlers))
def get_first_import(node, context, name, base, level): def get_first_import(node, context, name, base, level):
"""return the node where [base.]<name> is imported or None if not found """return the node where [base.]<name> is imported or None if not found
...@@ -98,14 +110,14 @@ def dependencies_graph(filename, dep_info): ...@@ -98,14 +110,14 @@ def dependencies_graph(filename, dep_info):
done = {} done = {}
printer = DotBackend(filename[:-4], rankdir='LR') printer = DotBackend(filename[:-4], rankdir='LR')
printer.emit('URL="." node[shape="box"]') printer.emit('URL="." node[shape="box"]')
for modname, dependencies in sorted(dep_info.iteritems()): for modname, dependencies in sorted(six.iteritems(dep_info)):
done[modname] = 1 done[modname] = 1
printer.emit_node(modname) printer.emit_node(modname)
for modname in dependencies: for modname in dependencies:
if modname not in done: if modname not in done:
done[modname] = 1 done[modname] = 1
printer.emit_node(modname) printer.emit_node(modname)
for depmodname, dependencies in sorted(dep_info.iteritems()): for depmodname, dependencies in sorted(six.iteritems(dep_info)):
for modname in dependencies: for modname in dependencies:
printer.emit_edge(modname, depmodname) printer.emit_edge(modname, depmodname)
printer.generate(filename) printer.generate(filename)
...@@ -220,20 +232,21 @@ given file (report RP0402 must not be disabled)'} ...@@ -220,20 +232,21 @@ given file (report RP0402 must not be disabled)'}
self.linter.add_stats(dependencies={}) self.linter.add_stats(dependencies={})
self.linter.add_stats(cycles=[]) self.linter.add_stats(cycles=[])
self.stats = self.linter.stats self.stats = self.linter.stats
self.import_graph = {} self.import_graph = defaultdict(set)
def close(self): def close(self):
"""called before visiting project (i.e set of modules)""" """called before visiting project (i.e set of modules)"""
# don't try to compute cycles if the associated message is disabled # don't try to compute cycles if the associated message is disabled
if self.linter.is_message_enabled('cyclic-import'): if self.linter.is_message_enabled('cyclic-import'):
for cycle in get_cycles(self.import_graph): vertices = list(self.import_graph)
for cycle in get_cycles(self.import_graph, vertices=vertices):
self.add_message('cyclic-import', args=' -> '.join(cycle)) self.add_message('cyclic-import', args=' -> '.join(cycle))
def visit_import(self, node): def visit_import(self, node):
"""triggered when an import statement is seen""" """triggered when an import statement is seen"""
modnode = node.root() modnode = node.root()
for name, _ in node.names: for name, _ in node.names:
importedmodnode = self.get_imported_module(modnode, node, name) importedmodnode = self.get_imported_module(node, name)
if importedmodnode is None: if importedmodnode is None:
continue continue
self._check_relative_import(modnode, node, importedmodnode, name) self._check_relative_import(modnode, node, importedmodnode, name)
...@@ -260,7 +273,7 @@ given file (report RP0402 must not be disabled)'} ...@@ -260,7 +273,7 @@ given file (report RP0402 must not be disabled)'}
if name == '*': if name == '*':
self.add_message('wildcard-import', args=basename, node=node) self.add_message('wildcard-import', args=basename, node=node)
modnode = node.root() modnode = node.root()
importedmodnode = self.get_imported_module(modnode, node, basename) importedmodnode = self.get_imported_module(node, basename)
if importedmodnode is None: if importedmodnode is None:
return return
self._check_relative_import(modnode, node, importedmodnode, basename) self._check_relative_import(modnode, node, importedmodnode, basename)
...@@ -270,15 +283,16 @@ given file (report RP0402 must not be disabled)'} ...@@ -270,15 +283,16 @@ given file (report RP0402 must not be disabled)'}
self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name)) self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
self._check_reimport(node, name, basename, node.level) self._check_reimport(node, name, basename, node.level)
def get_imported_module(self, modnode, importnode, modname): def get_imported_module(self, importnode, modname):
try: try:
return importnode.do_import_module(modname) return importnode.do_import_module(modname)
except astroid.InferenceError, ex: except astroid.InferenceError as ex:
if str(ex) != modname: if str(ex) != modname:
args = '%r (%s)' % (modname, ex) args = '%r (%s)' % (modname, ex)
else: else:
args = repr(modname) args = repr(modname)
self.add_message("import-error", args=args, node=importnode) if not _except_import_error(importnode.parent):
self.add_message("import-error", args=args, node=importnode)
def _check_relative_import(self, modnode, importnode, importedmodnode, def _check_relative_import(self, modnode, importnode, importedmodnode,
importedasname): importedasname):
...@@ -295,7 +309,8 @@ given file (report RP0402 must not be disabled)'} ...@@ -295,7 +309,8 @@ given file (report RP0402 must not be disabled)'}
return False return False
if importedmodnode.name != importedasname: if importedmodnode.name != importedasname:
# this must be a relative import... # this must be a relative import...
self.add_message('relative-import', args=(importedasname, importedmodnode.name), self.add_message('relative-import',
args=(importedasname, importedmodnode.name),
node=importnode) node=importnode)
def _add_imported_module(self, node, importedmodname): def _add_imported_module(self, node, importedmodname):
...@@ -315,8 +330,8 @@ given file (report RP0402 must not be disabled)'} ...@@ -315,8 +330,8 @@ given file (report RP0402 must not be disabled)'}
if not context_name in importedmodnames: if not context_name in importedmodnames:
importedmodnames.add(context_name) importedmodnames.add(context_name)
# update import graph # update import graph
mgraph = self.import_graph.setdefault(context_name, set()) mgraph = self.import_graph[context_name]
if not importedmodname in mgraph: if importedmodname not in mgraph:
mgraph.add(importedmodname) mgraph.add(importedmodname)
def _check_deprecated_module(self, node, mod_path): def _check_deprecated_module(self, node, mod_path):
...@@ -343,7 +358,7 @@ given file (report RP0402 must not be disabled)'} ...@@ -343,7 +358,7 @@ given file (report RP0402 must not be disabled)'}
def report_external_dependencies(self, sect, _, dummy): def report_external_dependencies(self, sect, _, dummy):
"""return a verbatim layout for displaying dependencies""" """return a verbatim layout for displaying dependencies"""
dep_info = make_tree_defs(self._external_dependencies_info().iteritems()) dep_info = make_tree_defs(six.iteritems(self._external_dependencies_info()))
if not dep_info: if not dep_info:
raise EmptyReport() raise EmptyReport()
tree_str = repr_tree_defs(dep_info) tree_str = repr_tree_defs(dep_info)
...@@ -375,7 +390,7 @@ given file (report RP0402 must not be disabled)'} ...@@ -375,7 +390,7 @@ given file (report RP0402 must not be disabled)'}
if self.__ext_dep_info is None: if self.__ext_dep_info is None:
package = self.linter.current_name package = self.linter.current_name
self.__ext_dep_info = result = {} self.__ext_dep_info = result = {}
for importee, importers in self.stats['dependencies'].iteritems(): for importee, importers in six.iteritems(self.stats['dependencies']):
if not importee.startswith(package): if not importee.startswith(package):
result[importee] = importers result[importee] = importers
return self.__ext_dep_info return self.__ext_dep_info
...@@ -387,7 +402,7 @@ given file (report RP0402 must not be disabled)'} ...@@ -387,7 +402,7 @@ given file (report RP0402 must not be disabled)'}
if self.__int_dep_info is None: if self.__int_dep_info is None:
package = self.linter.current_name package = self.linter.current_name
self.__int_dep_info = result = {} self.__int_dep_info = result = {}
for importee, importers in self.stats['dependencies'].iteritems(): for importee, importers in six.iteritems(self.stats['dependencies']):
if importee.startswith(package): if importee.startswith(package):
result[importee] = importers result[importee] = importers
return self.__int_dep_info return self.__int_dep_info
......
This diff is collapsed.
...@@ -21,6 +21,7 @@ import re ...@@ -21,6 +21,7 @@ import re
from pylint.interfaces import IRawChecker from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker from pylint.checkers import BaseChecker
import six
MSGS = { MSGS = {
...@@ -72,8 +73,8 @@ class EncodingChecker(BaseChecker): ...@@ -72,8 +73,8 @@ class EncodingChecker(BaseChecker):
def _check_encoding(self, lineno, line, file_encoding): def _check_encoding(self, lineno, line, file_encoding):
try: try:
return unicode(line, file_encoding) return six.text_type(line, file_encoding)
except UnicodeDecodeError, ex: except UnicodeDecodeError as ex:
self.add_message('invalid-encoded-data', line=lineno, self.add_message('invalid-encoded-data', line=lineno,
args=(file_encoding, ex.args[2])) args=(file_encoding, ex.args[2]))
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
* pylint.d (PYLINTHOME) * pylint.d (PYLINTHOME)
""" """
from __future__ import with_statement from __future__ import with_statement
from __future__ import print_function
import pickle import pickle
import os import os
...@@ -52,7 +53,7 @@ def load_results(base): ...@@ -52,7 +53,7 @@ def load_results(base):
try: try:
with open(data_file, _PICK_LOAD) as stream: with open(data_file, _PICK_LOAD) as stream:
return pickle.load(stream) return pickle.load(stream)
except: except Exception: # pylint: disable=broad-except
return {} return {}
if sys.version_info < (3, 0): if sys.version_info < (3, 0):
...@@ -66,13 +67,13 @@ def save_results(results, base): ...@@ -66,13 +67,13 @@ def save_results(results, base):
try: try:
os.mkdir(PYLINT_HOME) os.mkdir(PYLINT_HOME)
except OSError: except OSError:
print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
data_file = get_pdata_path(base, 1) data_file = get_pdata_path(base, 1)
try: try:
with open(data_file, _PICK_DUMP) as stream: with open(data_file, _PICK_DUMP) as stream:
pickle.dump(results, stream) pickle.dump(results, stream)
except (IOError, OSError), ex: except (IOError, OSError) as ex:
print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex) print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
# location of the configuration file ########################################## # location of the configuration file ##########################################
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment