Commit 9f1f040b authored by vapier@chromium.org's avatar vapier@chromium.org

pylint: upgrade to 1.4.0

This is largely a bugfix release, so should be much easier to transition.

BUG=chromium:431514
TEST=ran on some code bases and checked output

Review URL: https://codereview.chromium.org/753543006

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@293355 0039d316-1c4b-4281-b951-d872f2087c98
parent a64c0b08
URL: http://www.logilab.org/project/logilab-astng
Version: 1.2.1
Version: 1.3.2
License: GPL
License File: LICENSE.txt
......
......@@ -79,6 +79,9 @@ class AsStringRegexpPredicate(object):
If specified, the second argument is an `attrgetter` expression that will be
applied on the node first to get the actual node on which `as_string` should
be called.
WARNING: This can be fairly slow, as it has to convert every AST node back
to Python code; you should consider examining the AST directly instead.
"""
def __init__(self, regexp, expression=None):
self.regexp = re.compile(regexp)
......@@ -98,13 +101,23 @@ def inference_tip(infer_function):
.. sourcecode:: python
MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func'))
predicate)
"""
def transform(node, infer_function=infer_function):
node._explicit_inference = infer_function
return node
return transform
def register_module_extender(manager, module_name, get_extension_mod):
def transform(node):
extension_module = get_extension_mod()
for name, obj in extension_module.locals.items():
node.locals[name] = obj
manager.register_transform(Module, transform, lambda n: n.name == module_name)
# load brain plugins
from os import listdir
from os.path import join, dirname
......
......@@ -16,30 +16,23 @@
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""astroid packaging information"""
distname = 'astroid'
modname = 'astroid'
numversion = (1, 2, 1)
numversion = (1, 3, 2)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.60.0']
install_requires = ['logilab-common >= 0.60.0', 'six']
license = 'LGPL'
author = 'Logilab'
author_email = 'python-projects@lists.logilab.org'
author_email = 'pylint-dev@lists.logilab.org'
mailinglist = "mailto://%s" % author_email
web = 'http://bitbucket.org/logilab/astroid'
description = "rebuild a new abstract syntax tree from Python's ast"
from os.path import join
include_dirs = ['brain',
join('test', 'regrtest_data'),
join('test', 'data'), join('test', 'data2')
]
description = "A abstract syntax tree for Python with inference support."
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
......
......@@ -148,7 +148,7 @@ class AsStringVisitor(object):
bases = bases and '(%s)' % bases or ''
else:
metaclass = node.metaclass()
if metaclass:
if metaclass and not node.has_metaclass_hack():
if bases:
bases = '(%s, metaclass=%s)' % (bases, metaclass.name)
else:
......
......@@ -24,6 +24,8 @@ __docformat__ = "restructuredtext en"
import sys
from contextlib import contextmanager
from logilab.common.decorators import cachedproperty
from astroid.exceptions import (InferenceError, AstroidError, NotFoundError,
UnresolvableName, UseInferenceDefault)
......@@ -56,63 +58,84 @@ class Proxy(object):
# Inference ##################################################################
MISSING = object()
class InferenceContext(object):
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode')
__slots__ = ('path', 'callcontext', 'boundnode', 'infered')
def __init__(self, path=None):
def __init__(self,
path=None, callcontext=None, boundnode=None, infered=None):
if path is None:
self.path = set()
self.path = frozenset()
else:
self.path = path
self.lookupname = None
self.callcontext = None
self.boundnode = None
def push(self, node):
name = self.lookupname
if (node, name) in self.path:
raise StopIteration()
self.path.add((node, name))
def clone(self):
# XXX copy lookupname/callcontext ?
clone = InferenceContext(self.path)
clone.callcontext = self.callcontext
clone.boundnode = self.boundnode
return clone
self.callcontext = callcontext
self.boundnode = boundnode
if infered is None:
self.infered = {}
else:
self.infered = infered
def push(self, key):
# This returns a NEW context with the same attributes, but a new key
# added to `path`. The intention is that it's only passed to callees
# and then destroyed; otherwise scope() may not work correctly.
# The cache will be shared, since it's the same exact dict.
if key in self.path:
# End the containing generator
raise StopIteration
return InferenceContext(
self.path.union([key]),
self.callcontext,
self.boundnode,
self.infered,
)
@contextmanager
def restore_path(self):
path = set(self.path)
yield
self.path = path
def copy_context(context):
if context is not None:
return context.clone()
else:
return InferenceContext()
def scope(self, callcontext=MISSING, boundnode=MISSING):
try:
orig = self.callcontext, self.boundnode
if callcontext is not MISSING:
self.callcontext = callcontext
if boundnode is not MISSING:
self.boundnode = boundnode
yield
finally:
self.callcontext, self.boundnode = orig
def cache_generator(self, key, generator):
results = []
for result in generator:
results.append(result)
yield result
self.infered[key] = tuple(results)
return
def _infer_stmts(stmts, context, frame=None):
def _infer_stmts(stmts, context, frame=None, lookupname=None):
"""return an iterator on statements inferred by each statement in <stmts>
"""
stmt = None
infered = False
if context is not None:
name = context.lookupname
context = context.clone()
else:
name = None
if context is None:
context = InferenceContext()
for stmt in stmts:
if stmt is YES:
yield stmt
infered = True
continue
context.lookupname = stmt._infer_name(frame, name)
kw = {}
infered_name = stmt._infer_name(frame, lookupname)
if infered_name is not None:
# only returns not None if .infer() accepts a lookupname kwarg
kw['lookupname'] = infered_name
try:
for infered in stmt.infer(context):
for infered in stmt.infer(context, **kw):
yield infered
infered = True
except UnresolvableName:
......@@ -170,20 +193,24 @@ class Instance(Proxy):
def igetattr(self, name, context=None):
"""inferred getattr"""
if not context:
context = InferenceContext()
try:
# avoid recursively inferring the same attr on the same class
if context:
context.push((self._proxied, name))
new_context = context.push((self._proxied, name))
# XXX frame should be self._proxied, or not ?
get_attr = self.getattr(name, context, lookupclass=False)
return _infer_stmts(self._wrap_attr(get_attr, context), context,
frame=self)
get_attr = self.getattr(name, new_context, lookupclass=False)
return _infer_stmts(
self._wrap_attr(get_attr, new_context),
new_context,
frame=self,
)
except NotFoundError:
try:
# fallback to class'igetattr since it has some logic to handle
# descriptors
return self._wrap_attr(self._proxied.igetattr(name, context),
context)
context)
except NotFoundError:
raise InferenceError(name)
......@@ -274,9 +301,9 @@ class BoundMethod(UnboundMethod):
return True
def infer_call_result(self, caller, context):
context = context.clone()
context.boundnode = self.bound
return self._proxied.infer_call_result(caller, context)
with context.scope(boundnode=self.bound):
for infered in self._proxied.infer_call_result(caller, context):
yield infered
class Generator(Instance):
......@@ -308,7 +335,8 @@ def path_wrapper(func):
"""wrapper function handling context"""
if context is None:
context = InferenceContext()
context.push(node)
context = context.push((node, kwargs.get('lookupname')))
yielded = set()
for res in _func(node, context, **kwargs):
# unproxy only true instance, not const, tuple, dict...
......@@ -377,7 +405,15 @@ class NodeNG(object):
return self._explicit_inference(self, context, **kwargs)
except UseInferenceDefault:
pass
return self._infer(context, **kwargs)
if not context:
return self._infer(context, **kwargs)
key = (self, kwargs.get('lookupname'), context.callcontext, context.boundnode)
if key in context.infered:
return iter(context.infered[key])
return context.cache_generator(key, self._infer(context, **kwargs))
def _repr_name(self):
"""return self.name or self.attrname or '' for nice representation"""
......@@ -415,7 +451,7 @@ class NodeNG(object):
attr = getattr(self, field)
if not attr: # None or empty listy / tuple
continue
if isinstance(attr, (list, tuple)):
if attr.__class__ in (list, tuple):
return attr[-1]
else:
return attr
......@@ -506,16 +542,28 @@ class NodeNG(object):
# FIXME: raise an exception if nearest is None ?
return nearest[0]
def set_line_info(self, lastchild):
# these are lazy because they're relatively expensive to compute for every
# single node, and they rarely get looked at
@cachedproperty
def fromlineno(self):
if self.lineno is None:
self.fromlineno = self._fixed_source_line()
return self._fixed_source_line()
else:
return self.lineno
@cachedproperty
def tolineno(self):
if not self._astroid_fields:
# can't have children
lastchild = None
else:
self.fromlineno = self.lineno
lastchild = self.last_child()
if lastchild is None:
self.tolineno = self.fromlineno
return self.fromlineno
else:
self.tolineno = lastchild.tolineno
return
return lastchild.tolineno
# TODO / FIXME:
assert self.fromlineno is not None, self
assert self.tolineno is not None, self
......@@ -530,7 +578,7 @@ class NodeNG(object):
_node = self
try:
while line is None:
_node = _node.get_children().next()
_node = next(_node.get_children())
line = _node.lineno
except StopIteration:
_node = self.parent
......
......@@ -4,6 +4,7 @@ Helps with understanding everything imported from 'gi.repository'
"""
import inspect
import itertools
import sys
import re
......@@ -111,40 +112,33 @@ def _gi_build_stub(parent):
return ret
# Overwrite Module.module_import to _actually_ import the introspected module if
# it's a gi module, then build stub code by examining its info and get an astng
# from that
from astroid.scoped_nodes import Module
_orig_import_module = Module.import_module
def _new_import_module(self, modname, relative_only=False, level=None):
# Could be a static piece of gi.repository or whatever unrelated module,
# let that fall through
try:
return _orig_import_module(self, modname, relative_only, level)
except AstroidBuildingException:
# we only consider gi.repository submodules
if not modname.startswith('gi.repository.'):
if relative_only and level is None:
level = 0
modname = self.relative_to_absolute_name(modname, level)
if not modname.startswith('gi.repository.'):
raise
def _import_gi_module(modname):
# we only consider gi.repository submodules
if not modname.startswith('gi.repository.'):
raise AstroidBuildingException()
# build astroid representation unless we already tried so
if modname not in _inspected_modules:
modnames = [modname]
# GLib and GObject have some special case handling
# in pygobject that we need to cope with
optional_modnames = []
# GLib and GObject may have some special case handling
# in pygobject that we need to cope with. However at
# least as of pygobject3-3.13.91 the _glib module doesn't
# exist anymore, so if treat these modules as optional.
if modname == 'gi.repository.GLib':
modnames.append('gi._glib')
optional_modnames.append('gi._glib')
elif modname == 'gi.repository.GObject':
modnames.append('gi._gobject')
optional_modnames.append('gi._gobject')
try:
modcode = ''
for m in modnames:
__import__(m)
modcode += _gi_build_stub(sys.modules[m])
for m in itertools.chain(modnames, optional_modnames):
try:
__import__(m)
modcode += _gi_build_stub(sys.modules[m])
except ImportError:
if m not in optional_modnames:
raise
except ImportError:
astng = _inspected_modules[modname] = None
else:
......@@ -156,4 +150,6 @@ def _new_import_module(self, modname, relative_only=False, level=None):
raise AstroidBuildingException('Failed to import module %r' % modname)
return astng
Module.import_module = _new_import_module
MANAGER.register_failed_import_hook(_import_gi_module)
from astroid import MANAGER
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def mechanize_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def mechanize_transform():
return AstroidBuilder(MANAGER).string_build('''
class Browser(object):
def open(self, url, data=None, timeout=None):
......@@ -13,8 +13,6 @@ class Browser(object):
return None
''')
module.locals['Browser'] = fake.locals['Browser']
import py2stdlib
py2stdlib.MODULE_TRANSFORMS['mechanize'] = mechanize_transform
register_module_extender(MANAGER, 'mechanize', mechanize_transform)
"""Astroid hooks for pytest."""
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def pytest_transform():
return AstroidBuilder(MANAGER).string_build('''
try:
import _pytest.mark
import _pytest.recwarn
import _pytest.runner
import _pytest.python
except ImportError:
pass
else:
deprecated_call = _pytest.recwarn.deprecated_call
exit = _pytest.runner.exit
fail = _pytest.runner.fail
fixture = _pytest.python.fixture
importorskip = _pytest.runner.importorskip
mark = _pytest.mark.MarkGenerator()
raises = _pytest.python.raises
skip = _pytest.runner.skip
yield_fixture = _pytest.python.yield_fixture
''')
register_module_extender(MANAGER, 'pytest', pytest_transform)
register_module_extender(MANAGER, 'py.test', pytest_transform)
......@@ -5,21 +5,18 @@ Currently help understanding of :
* PyQT4.QtCore
"""
from astroid import MANAGER
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def pyqt4_qtcore_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def pyqt4_qtcore_transform():
return AstroidBuilder(MANAGER).string_build('''
def SIGNAL(signal_name): pass
class QObject(object):
def emit(self, signal): pass
''')
for klass in ('QObject',):
module.locals[klass] = fake.locals[klass]
import py2stdlib
py2stdlib.MODULE_TRANSFORMS['PyQt4.QtCore'] = pyqt4_qtcore_transform
register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform)
......@@ -11,12 +11,11 @@ from textwrap import dedent
from astroid import (
MANAGER, AsStringRegexpPredicate,
UseInferenceDefault, inference_tip,
YES, InferenceError)
YES, InferenceError, register_module_extender)
from astroid import exceptions
from astroid import nodes
from astroid.builder import AstroidBuilder
MODULE_TRANSFORMS = {}
PY3K = sys.version_info > (3, 0)
PY33 = sys.version_info >= (3, 3)
......@@ -26,7 +25,7 @@ def infer_func_form(node, base_type, context=None, enum=False):
"""Specific inference function for namedtuple or Python 3 enum. """
def infer_first(node):
try:
value = node.infer(context=context).next()
value = next(node.infer(context=context))
if value is YES:
raise UseInferenceDefault()
else:
......@@ -90,39 +89,31 @@ def infer_func_form(node, base_type, context=None, enum=False):
# module specific transformation functions #####################################
def transform(module):
try:
tr = MODULE_TRANSFORMS[module.name]
except KeyError:
pass
else:
tr(module)
MANAGER.register_transform(nodes.Module, transform)
# module specific transformation functions #####################################
def hashlib_transform(module):
def hashlib_transform():
template = '''
class %s(object):
class %(name)s(object):
def __init__(self, value=''): pass
def digest(self):
return u''
return %(digest)s
def copy(self):
return self
def update(self, value): pass
def hexdigest(self):
return u''
return ''
@property
def name(self):
return %(name)r
'''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(template % hashfunc for hashfunc in algorithms)
classes = "".join(
template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'}
for hashfunc in algorithms)
return AstroidBuilder(MANAGER).string_build(classes)
fake = AstroidBuilder(MANAGER).string_build(classes)
for hashfunc in algorithms:
module.locals[hashfunc] = fake.locals[hashfunc]
def collections_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def collections_transform():
return AstroidBuilder(MANAGER).string_build('''
class defaultdict(dict):
default_factory = None
......@@ -146,11 +137,9 @@ class deque(object):
''')
for klass in ('deque', 'defaultdict'):
module.locals[klass] = fake.locals[klass]
def pkg_resources_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def pkg_resources_transform():
return AstroidBuilder(MANAGER).string_build('''
def resource_exists(package_or_requirement, resource_name):
pass
......@@ -187,11 +176,8 @@ def cleanup_resources(force=False):
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def subprocess_transform(module):
def subprocess_transform():
if PY3K:
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
init = """
......@@ -217,7 +203,7 @@ def subprocess_transform(module):
wait_signature = 'def wait(self, timeout=None)'
else:
wait_signature = 'def wait(self)'
fake = AstroidBuilder(MANAGER).string_build('''
return AstroidBuilder(MANAGER).string_build('''
class Popen(object):
returncode = pid = 0
......@@ -241,18 +227,17 @@ class Popen(object):
'communicate': communicate,
'wait_signature': wait_signature})
for func_name, func in fake.locals.items():
module.locals[func_name] = func
MODULE_TRANSFORMS['hashlib'] = hashlib_transform
MODULE_TRANSFORMS['collections'] = collections_transform
MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
MODULE_TRANSFORMS['subprocess'] = subprocess_transform
# namedtuple support ###########################################################
def looks_like_namedtuple(node):
func = node.func
if type(func) is nodes.Getattr:
return func.attrname == 'namedtuple'
if type(func) is nodes.Name:
return func.name == 'namedtuple'
return False
def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple CallFunc node"""
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
......@@ -285,11 +270,12 @@ def infer_enum(node, context=None):
context=context, enum=True)[0]
return iter([class_node.instanciate_class()])
def infer_enum_class(node, context=None):
def infer_enum_class(node):
""" Specific inference for enums. """
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
for basename in node.basenames:
# TODO: doesn't handle subclasses yet.
# TODO: doesn't handle subclasses yet. This implementation
# is a hack to support enums.
if basename not in names:
continue
if node.root().name == 'enum':
......@@ -299,22 +285,26 @@ def infer_enum_class(node, context=None):
if any(not isinstance(value, nodes.AssName)
for value in values):
continue
parent = values[0].parent
real_value = parent.value
stmt = values[0].statement()
if isinstance(stmt.targets[0], nodes.Tuple):
targets = stmt.targets[0].itered()
else:
targets = stmt.targets
new_targets = []
for target in parent.targets:
for target in targets:
# Replace all the assignments with our mocked class.
classdef = dedent('''
class %(name)s(object):
@property
def value(self):
return %(value)s
# Not the best return.
return None
@property
def name(self):
return %(name)r
%(name)s = %(value)s
''' % {'name': target.name,
'value': real_value.as_string()})
''' % {'name': target.name})
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
fake.parent = target.parent
for method in node.mymethods():
......@@ -324,8 +314,13 @@ def infer_enum_class(node, context=None):
break
return node
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func'))
looks_like_namedtuple)
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum),
AsStringRegexpPredicate('Enum', 'func'))
MANAGER.register_transform(nodes.Class, infer_enum_class)
register_module_extender(MANAGER, 'hashlib', hashlib_transform)
register_module_extender(MANAGER, 'collections', collections_transform)
register_module_extender(MANAGER, 'pkg_resourcds', pkg_resources_transform)
register_module_extender(MANAGER, 'subprocess', subprocess_transform)
......@@ -44,7 +44,7 @@ if sys.version_info >= (3, 0):
def open_source_file(filename):
with open(filename, 'rb') as byte_stream:
encoding = detect_encoding(byte_stream.readline)[0]
stream = open(filename, 'rU', encoding=encoding)
stream = open(filename, 'r', newline=None, encoding=encoding)
try:
data = stream.read()
except UnicodeError: # wrong encodingg
......@@ -115,23 +115,24 @@ class AstroidBuilder(InspectBuilder):
path is expected to be a python source file
"""
try:
_, encoding, data = open_source_file(path)
except IOError, exc:
stream, encoding, data = open_source_file(path)
except IOError as exc:
msg = 'Unable to load file %r (%s)' % (path, exc)
raise AstroidBuildingException(msg)
except SyntaxError, exc: # py3k encoding specification error
except SyntaxError as exc: # py3k encoding specification error
raise AstroidBuildingException(exc)
except LookupError, exc: # unknown encoding
except LookupError as exc: # unknown encoding
raise AstroidBuildingException(exc)
# get module name if necessary
if modname is None:
try:
modname = '.'.join(modpath_from_file(path))
except ImportError:
modname = splitext(basename(path))[0]
# build astroid representation
module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
with stream:
# get module name if necessary
if modname is None:
try:
modname = '.'.join(modpath_from_file(path))
except ImportError:
modname = splitext(basename(path))[0]
# build astroid representation
module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
def string_build(self, data, modname='', path=None):
"""build astroid from source code string and return rebuilded astroid"""
......@@ -159,7 +160,10 @@ class AstroidBuilder(InspectBuilder):
def _data_build(self, data, modname, path):
"""build tree node from data and add some informations"""
# this method could be wrapped with a pickle/cache function
node = parse(data + '\n')
try:
node = parse(data + '\n')
except TypeError as exc:
raise AstroidBuildingException(exc)
if path is not None:
node_file = abspath(path)
else:
......@@ -170,8 +174,7 @@ class AstroidBuilder(InspectBuilder):
else:
package = path and path.find('__init__.py') > -1 or False
rebuilder = TreeRebuilder(self._manager)
module = rebuilder.visit_module(node, modname, package)
module.file = module.path = node_file
module = rebuilder.visit_module(node, modname, node_file, package)
module._from_nodes = rebuilder._from_nodes
module._delayed_assattr = rebuilder._delayed_assattr
return module
......
......@@ -28,7 +28,7 @@ from astroid.manager import AstroidManager
from astroid.exceptions import (AstroidError, InferenceError, NoDefault,
NotFoundError, UnresolvableName)
from astroid.bases import (YES, Instance, InferenceContext,
_infer_stmts, copy_context, path_wrapper,
_infer_stmts, path_wrapper,
raise_if_nothing_infered)
from astroid.protocols import (
_arguments_infer_argname,
......@@ -175,93 +175,89 @@ def infer_name(self, context=None):
if not stmts:
raise UnresolvableName(self.name)
context = context.clone()
context.lookupname = self.name
return _infer_stmts(stmts, context, frame)
return _infer_stmts(stmts, context, frame, self.name)
nodes.Name._infer = path_wrapper(infer_name)
nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper
def infer_callfunc(self, context=None):
"""infer a CallFunc node by trying to guess what the function returns"""
callcontext = context.clone()
callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs)
callcontext.boundnode = None
if context is None:
context = InferenceContext()
for callee in self.func.infer(context):
if callee is YES:
yield callee
continue
try:
if hasattr(callee, 'infer_call_result'):
for infered in callee.infer_call_result(self, callcontext):
yield infered
except InferenceError:
## XXX log error ?
continue
with context.scope(
callcontext=CallContext(self.args, self.starargs, self.kwargs),
boundnode=None,
):
if callee is YES:
yield callee
continue
try:
if hasattr(callee, 'infer_call_result'):
for infered in callee.infer_call_result(self, context):
yield infered
except InferenceError:
## XXX log error ?
continue
nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
def infer_import(self, context=None, asname=True):
def infer_import(self, context=None, asname=True, lookupname=None):
"""infer an Import node: return the imported module/object"""
name = context.lookupname
if name is None:
if lookupname is None:
raise InferenceError()
if asname:
yield self.do_import_module(self.real_name(name))
yield self.do_import_module(self.real_name(lookupname))
else:
yield self.do_import_module(name)
yield self.do_import_module(lookupname)
nodes.Import._infer = path_wrapper(infer_import)
def infer_name_module(self, name):
context = InferenceContext()
context.lookupname = name
return self.infer(context, asname=False)
return self.infer(context, asname=False, lookupname=name)
nodes.Import.infer_name_module = infer_name_module
def infer_from(self, context=None, asname=True):
def infer_from(self, context=None, asname=True, lookupname=None):
"""infer a From nodes: return the imported module/object"""
name = context.lookupname
if name is None:
if lookupname is None:
raise InferenceError()
if asname:
name = self.real_name(name)
lookupname = self.real_name(lookupname)
module = self.do_import_module()
try:
context = copy_context(context)
context.lookupname = name
return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context)
return _infer_stmts(module.getattr(lookupname, ignore_locals=module is self.root()), context, lookupname=lookupname)
except NotFoundError:
raise InferenceError(name)
raise InferenceError(lookupname)
nodes.From._infer = path_wrapper(infer_from)
def infer_getattr(self, context=None):
"""infer a Getattr node by using getattr on the associated object"""
#context = context.clone()
if not context:
context = InferenceContext()
for owner in self.expr.infer(context):
if owner is YES:
yield owner
continue
try:
context.boundnode = owner
for obj in owner.igetattr(self.attrname, context):
yield obj
context.boundnode = None
with context.scope(boundnode=owner):
for obj in owner.igetattr(self.attrname, context):
yield obj
except (NotFoundError, InferenceError):
context.boundnode = None
pass
except AttributeError:
# XXX method / function
context.boundnode = None
pass
nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper
def infer_global(self, context=None):
if context.lookupname is None:
def infer_global(self, context=None, lookupname=None):
if lookupname is None:
raise InferenceError()
try:
return _infer_stmts(self.root().getattr(context.lookupname), context)
return _infer_stmts(self.root().getattr(lookupname), context)
except NotFoundError:
raise InferenceError()
nodes.Global._infer = path_wrapper(infer_global)
......@@ -269,12 +265,12 @@ nodes.Global._infer = path_wrapper(infer_global)
def infer_subscript(self, context=None):
"""infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]"""
value = self.value.infer(context).next()
value = next(self.value.infer(context))
if value is YES:
yield YES
return
index = self.slice.infer(context).next()
index = next(self.slice.infer(context))
if index is YES:
yield YES
return
......@@ -287,6 +283,12 @@ def infer_subscript(self, context=None):
except (IndexError, TypeError):
yield YES
return
# Prevent inferring if the infered subscript
# is the same as the original subscripted object.
if self is assigned:
yield YES
return
for infered in assigned.infer(context):
yield infered
else:
......@@ -347,11 +349,10 @@ def infer_binop(self, context=None):
nodes.BinOp._infer = path_wrapper(infer_binop)
def infer_arguments(self, context=None):
name = context.lookupname
if name is None:
def infer_arguments(self, context=None, lookupname=None):
if lookupname is None:
raise InferenceError()
return _arguments_infer_argname(self, name, context)
return _arguments_infer_argname(self, lookupname, context)
nodes.Arguments._infer = infer_arguments
......
......@@ -19,29 +19,31 @@
possible by providing a class responsible to get astroid representation
from various source and using a cache of built modules)
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
import collections
import imp
import os
from os.path import dirname, join, isdir, exists
from warnings import warn
import zipimport
from logilab.common.configuration import OptionsProviderMixIn
from astroid.exceptions import AstroidBuildingException
from astroid.modutils import NoSourceFile, is_python_source, \
file_from_modpath, load_module_from_name, modpath_from_file, \
get_module_files, get_source_file, zipimport
from astroid import modutils
def astroid_wrapper(func, modname):
"""wrapper to give to AstroidManager.project_from_files"""
print 'parsing %s...' % modname
print('parsing %s...' % modname)
try:
return func(modname)
except AstroidBuildingException, exc:
print exc
except Exception, exc:
except AstroidBuildingException as exc:
print(exc)
except Exception as exc:
import traceback
traceback.print_exc()
......@@ -85,18 +87,21 @@ class AstroidManager(OptionsProviderMixIn):
# NOTE: cache entries are added by the [re]builder
self.astroid_cache = {}
self._mod_file_cache = {}
self.transforms = {}
self.transforms = collections.defaultdict(list)
self._failed_import_hooks = []
self.always_load_extensions = False
self.extension_package_whitelist = set()
def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
"""given a module name, return the astroid object"""
try:
filepath = get_source_file(filepath, include_no_ext=True)
filepath = modutils.get_source_file(filepath, include_no_ext=True)
source = True
except NoSourceFile:
except modutils.NoSourceFile:
pass
if modname is None:
try:
modname = '.'.join(modpath_from_file(filepath))
modname = '.'.join(modutils.modpath_from_file(filepath))
except ImportError:
modname = filepath
if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath:
......@@ -109,30 +114,56 @@ class AstroidManager(OptionsProviderMixIn):
raise AstroidBuildingException('unable to get astroid for file %s' %
filepath)
def _build_stub_module(self, modname):
from astroid.builder import AstroidBuilder
return AstroidBuilder(self).string_build('', modname)
def _can_load_extension(self, modname):
if self.always_load_extensions:
return True
if modutils.is_standard_module(modname):
return True
parts = modname.split('.')
return any(
'.'.join(parts[:x]) in self.extension_package_whitelist
for x in range(1, len(parts) + 1))
def ast_from_module_name(self, modname, context_file=None):
"""given a module name, return the astroid object"""
if modname in self.astroid_cache:
return self.astroid_cache[modname]
if modname == '__main__':
from astroid.builder import AstroidBuilder
return AstroidBuilder(self).string_build('', modname)
return self._build_stub_module(modname)
old_cwd = os.getcwd()
if context_file:
os.chdir(dirname(context_file))
try:
filepath = self.file_from_module_name(modname, context_file)
if filepath is not None and not is_python_source(filepath):
filepath, mp_type = self.file_from_module_name(modname, context_file)
if mp_type == modutils.PY_ZIPMODULE:
module = self.zip_import_data(filepath)
if module is not None:
return module
if filepath is None or not is_python_source(filepath):
elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION):
if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname):
return self._build_stub_module(modname)
try:
module = load_module_from_name(modname)
except Exception, ex:
module = modutils.load_module_from_name(modname)
except Exception as ex:
msg = 'Unable to load module %s (%s)' % (modname, ex)
raise AstroidBuildingException(msg)
return self.ast_from_module(module, modname)
elif mp_type == imp.PY_COMPILED:
raise AstroidBuildingException("Unable to load compiled module %s" % (modname,))
if filepath is None:
raise AstroidBuildingException("Unable to load module %s" % (modname,))
return self.ast_from_file(filepath, modname, fallback=False)
except AstroidBuildingException as e:
for hook in self._failed_import_hooks:
try:
return hook(modname)
except AstroidBuildingException:
pass
raise e
finally:
os.chdir(old_cwd)
......@@ -143,12 +174,12 @@ class AstroidManager(OptionsProviderMixIn):
builder = AstroidBuilder(self)
for ext in ('.zip', '.egg'):
try:
eggpath, resource = filepath.rsplit(ext + '/', 1)
eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
except ValueError:
continue
try:
importer = zipimport.zipimporter(eggpath + ext)
zmodname = resource.replace('/', '.')
zmodname = resource.replace(os.path.sep, '.')
if importer.is_package(resource):
zmodname = zmodname + '.__init__'
module = builder.string_build(importer.get_source(resource),
......@@ -163,9 +194,9 @@ class AstroidManager(OptionsProviderMixIn):
value = self._mod_file_cache[(modname, contextfile)]
except KeyError:
try:
value = file_from_modpath(modname.split('.'),
context_file=contextfile)
except ImportError, ex:
value = modutils.file_info_from_modpath(
modname.split('.'), context_file=contextfile)
except ImportError as ex:
msg = 'Unable to load module %s (%s)' % (modname, ex)
value = AstroidBuildingException(msg)
self._mod_file_cache[(modname, contextfile)] = value
......@@ -181,7 +212,7 @@ class AstroidManager(OptionsProviderMixIn):
try:
# some builtin modules don't have __file__ attribute
filepath = module.__file__
if is_python_source(filepath):
if modutils.is_python_source(filepath):
return self.ast_from_file(filepath, modname)
except AttributeError:
pass
......@@ -211,7 +242,7 @@ class AstroidManager(OptionsProviderMixIn):
except AttributeError:
raise AstroidBuildingException(
'Unable to get module for %s' % safe_repr(klass))
except Exception, ex:
except Exception as ex:
raise AstroidBuildingException(
'Unexpected error while retrieving module for %s: %s'
% (safe_repr(klass), ex))
......@@ -220,7 +251,7 @@ class AstroidManager(OptionsProviderMixIn):
except AttributeError:
raise AstroidBuildingException(
'Unable to get name for %s' % safe_repr(klass))
except Exception, ex:
except Exception as ex:
raise AstroidBuildingException(
'Unexpected error while retrieving name for %s: %s'
% (safe_repr(klass), ex))
......@@ -242,7 +273,7 @@ class AstroidManager(OptionsProviderMixIn):
project = Project(project_name)
for something in files:
if not exists(something):
fpath = file_from_modpath(something.split('.'))
fpath = modutils.file_from_modpath(something.split('.'))
elif isdir(something):
fpath = join(something, '__init__.py')
else:
......@@ -257,8 +288,8 @@ class AstroidManager(OptionsProviderMixIn):
# recurse in package except if __init__ was explicitly given
if astroid.package and something.find('__init__') == -1:
# recurse on others packages / modules if this is a package
for fpath in get_module_files(dirname(astroid.file),
black_list):
for fpath in modutils.get_module_files(dirname(astroid.file),
black_list):
astroid = func_wrapper(self.ast_from_file, fpath)
if astroid is None or astroid.name == base_name:
continue
......@@ -267,18 +298,28 @@ class AstroidManager(OptionsProviderMixIn):
def register_transform(self, node_class, transform, predicate=None):
"""Register `transform(node)` function to be applied on the given
Astroid's `node_class` if `predicate` is None or return a true value
Astroid's `node_class` if `predicate` is None or returns true
when called with the node as argument.
The transform function may return a value which is then used to
substitute the original node in the tree.
"""
self.transforms.setdefault(node_class, []).append((transform, predicate))
self.transforms[node_class].append((transform, predicate))
def unregister_transform(self, node_class, transform, predicate=None):
"""Unregister the given transform."""
self.transforms[node_class].remove((transform, predicate))
def register_failed_import_hook(self, hook):
""""Registers a hook to resolve imports that cannot be found otherwise.
`hook` must be a function that accepts a single argument `modname` which
contains the name of the module or package that could not be imported.
If `hook` can resolve the import, must return a node of type `astroid.Module`,
otherwise, it must raise `AstroidBuildingException`.
"""
self._failed_import_hooks.append(hook)
def transform(self, node):
"""Call matching transforms for the given node if any and return the
transformed node.
......@@ -308,6 +349,7 @@ class AstroidManager(OptionsProviderMixIn):
self.astroid_cache.setdefault(module.name, module)
def clear_cache(self):
# XXX clear transforms
self.astroid_cache.clear()
# force bootstrap again, else we may ends up with cache inconsistency
# between the manager and CONST_PROXY, making
......
......@@ -18,16 +18,18 @@
"""This module contains some mixins for the different nodes.
"""
from logilab.common.decorators import cachedproperty
from astroid.exceptions import (AstroidBuildingException, InferenceError,
NotFoundError)
class BlockRangeMixIn(object):
"""override block range """
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self._blockstart_toline()
@cachedproperty
def blockstart_tolineno(self):
return self.lineno
def _elsed_block_range(self, lineno, orelse, last=None):
"""handle block line numbers range for try/finally, for, if and while
......@@ -105,7 +107,7 @@ class FromImportMixIn(FilterStmtsMixin):
return mymodule.import_module(modname, level=level)
except AstroidBuildingException:
raise InferenceError(modname)
except SyntaxError, ex:
except SyntaxError as ex:
raise InferenceError(str(ex))
def real_name(self, asname):
......
......@@ -20,8 +20,8 @@
:type PY_SOURCE_EXTS: tuple(str)
:var PY_SOURCE_EXTS: list of possible python source file extension
:type STD_LIB_DIR: str
:var STD_LIB_DIR: directory where standard modules are located
:type STD_LIB_DIRS: list of str
:var STD_LIB_DIRS: directories where standard modules are located
:type BUILTIN_MODULES: dict
:var BUILTIN_MODULES: dictionary with builtin module names has key
......@@ -30,28 +30,22 @@ from __future__ import with_statement
__docformat__ = "restructuredtext en"
import sys
import imp
import os
from os.path import splitext, join, abspath, isdir, dirname, exists
from imp import find_module, load_module, C_BUILTIN, PY_COMPILED, PKG_DIRECTORY
import sys
from distutils.sysconfig import get_python_lib
from distutils.errors import DistutilsPlatformError
import zipimport
try:
import zipimport
import pkg_resources
except ImportError:
zipimport = None
ZIPFILE = object()
pkg_resources = None
from logilab.common import _handle_blacklist
# Notes about STD_LIB_DIR
# Consider arch-specific installation for STD_LIB_DIR definition
# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on
#
# :see: `Problems with /usr/lib64 builds <http://bugs.python.org/issue1294959>`_
# :see: `FHS <http://www.pathname.com/fhs/pub/fhs-2.3.html#LIBLTQUALGTALTERNATEFORMATESSENTIAL>`_
PY_ZIPMODULE = object()
if sys.platform.startswith('win'):
PY_SOURCE_EXTS = ('py', 'pyw')
PY_COMPILED_EXTS = ('dll', 'pyd')
......@@ -59,12 +53,32 @@ else:
PY_SOURCE_EXTS = ('py',)
PY_COMPILED_EXTS = ('so',)
# Notes about STD_LIB_DIRS
# Consider arch-specific installation for STD_LIB_DIR definition
# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on
#
# :see: `Problems with /usr/lib64 builds <http://bugs.python.org/issue1294959>`_
# :see: `FHS <http://www.pathname.com/fhs/pub/fhs-2.3.html#LIBLTQUALGTALTERNATEFORMATESSENTIAL>`_
try:
STD_LIB_DIR = get_python_lib(standard_lib=1)
# The explicit prefix is to work around a patch in virtualenv that
# replaces the 'real' sys.prefix (i.e. the location of the binary)
# with the prefix from which the virtualenv was created. This throws
# off the detection logic for standard library modules, thus the
# workaround.
STD_LIB_DIRS = [
get_python_lib(standard_lib=True, prefix=sys.prefix),
get_python_lib(standard_lib=True)]
if os.name == 'nt':
STD_LIB_DIRS.append(os.path.join(sys.prefix, 'dlls'))
try:
# real_prefix is defined when running inside virtualenv.
STD_LIB_DIRS.append(os.path.join(sys.real_prefix, 'dlls'))
except AttributeError:
pass
# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
# non-valid path, see https://bugs.pypy.org/issue1164
except DistutilsPlatformError:
STD_LIB_DIR = '//'
STD_LIB_DIRS = []
EXT_LIB_DIR = get_python_lib()
......@@ -77,6 +91,24 @@ class NoSourceFile(Exception):
source file for a precompiled file
"""
def _normalize_path(path):
return os.path.normcase(os.path.abspath(path))
_NORM_PATH_CACHE = {}
def _cache_normalize_path(path):
"""abspath with caching"""
# _module_file calls abspath on every path in sys.path every time it's
# called; on a larger codebase this easily adds up to half a second just
# assembling path components. This cache alleviates that.
try:
return _NORM_PATH_CACHE[path]
except KeyError:
if not path: # don't cache result for ''
return _normalize_path(path)
result = _NORM_PATH_CACHE[path] = _normalize_path(path)
return result
def load_module_from_name(dotted_name, path=None, use_sys=1):
"""Load a Python module from its name.
......@@ -142,14 +174,17 @@ def load_module_from_modpath(parts, path=None, use_sys=1):
# because it may have been indirectly loaded through a parent
module = sys.modules.get(curname)
if module is None:
mp_file, mp_filename, mp_desc = find_module(part, path)
module = load_module(curname, mp_file, mp_filename, mp_desc)
mp_file, mp_filename, mp_desc = imp.find_module(part, path)
module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
# mp_file still needs to be closed.
if mp_file:
mp_file.close()
if prevmodule:
setattr(prevmodule, part, module)
_file = getattr(module, '__file__', '')
if not _file and len(modpath) != len(parts):
raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]))
path = [dirname(_file)]
path = [os.path.dirname(_file)]
prevmodule = module
return module
......@@ -183,7 +218,7 @@ def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None):
def _check_init(path, mod_path):
"""check there are some __init__.py all along the way"""
for part in mod_path:
path = join(path, part)
path = os.path.join(path, part)
if not _has_init(path):
return False
return True
......@@ -209,18 +244,18 @@ def modpath_from_file(filename, extrapath=None):
:rtype: list(str)
:return: the corresponding splitted module's name
"""
base = splitext(abspath(filename))[0]
base = os.path.splitext(os.path.abspath(filename))[0]
if extrapath is not None:
for path_ in extrapath:
path = _abspath(path_)
if path and base[:len(path)] == path:
path = os.path.abspath(path_)
if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path):
submodpath = [pkg for pkg in base[len(path):].split(os.sep)
if pkg]
if _check_init(path, submodpath[:-1]):
return extrapath[path_].split('.') + submodpath
for path in sys.path:
path = _abspath(path)
if path and base.startswith(path):
path = _cache_normalize_path(path)
if path and os.path.normcase(base).startswith(path):
modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg]
if _check_init(path, modpath[:-1]):
return modpath
......@@ -228,8 +263,10 @@ def modpath_from_file(filename, extrapath=None):
filename, ', \n'.join(sys.path)))
def file_from_modpath(modpath, path=None, context_file=None):
return file_info_from_modpath(modpath, path, context_file)[0]
def file_info_from_modpath(modpath, path=None, context_file=None):
"""given a mod path (i.e. splitted module / package name), return the
corresponding file, giving priority to source file over precompiled
file if it exists
......@@ -254,13 +291,13 @@ def file_from_modpath(modpath, path=None, context_file=None):
:raise ImportError: if there is no such module in the directory
:rtype: str or None
:rtype: (str or None, import type)
:return:
the path to the module's file or None if it's an integrated
builtin module such as 'sys'
"""
if context_file is not None:
context = dirname(context_file)
context = os.path.dirname(context_file)
else:
context = context_file
if modpath[0] == 'xml':
......@@ -271,11 +308,10 @@ def file_from_modpath(modpath, path=None, context_file=None):
return _file_from_modpath(modpath, path, context)
elif modpath == ['os', 'path']:
# FIXME: currently ignoring search_path...
return os.path.__file__
return os.path.__file__, imp.PY_SOURCE
return _file_from_modpath(modpath, path, context)
def get_module_part(dotted_name, context_file=None):
"""given a dotted name return the module part of the name :
......@@ -323,7 +359,7 @@ def get_module_part(dotted_name, context_file=None):
starti = 1
while parts[starti] == '': # for all further dots: change context
starti += 1
context_file = dirname(context_file)
context_file = os.path.dirname(context_file)
for i in range(starti, len(parts)):
try:
file_from_modpath(parts[starti:i+1], path=path,
......@@ -362,7 +398,7 @@ def get_module_files(src_directory, blacklist):
continue
for filename in filenames:
if _is_python_file(filename):
src = join(directory, filename)
src = os.path.join(directory, filename)
files.append(src)
return files
......@@ -381,12 +417,12 @@ def get_source_file(filename, include_no_ext=False):
:rtype: str
:return: the absolute path of the source file if it exists
"""
base, orig_ext = splitext(abspath(filename))
base, orig_ext = os.path.splitext(os.path.abspath(filename))
for ext in PY_SOURCE_EXTS:
source_path = '%s.%s' % (base, ext)
if exists(source_path):
if os.path.exists(source_path):
return source_path
if include_no_ext and not orig_ext and exists(base):
if include_no_ext and not orig_ext and os.path.exists(base):
return base
raise NoSourceFile(filename)
......@@ -396,10 +432,10 @@ def is_python_source(filename):
rtype: bool
return: True if the filename is a python source file
"""
return splitext(filename)[1][1:] in PY_SOURCE_EXTS
return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
def is_standard_module(modname, std_path=(STD_LIB_DIR,)):
def is_standard_module(modname, std_path=None):
"""try to guess if a module is a standard python module (by default,
see `std_path` parameter's description)
......@@ -427,11 +463,13 @@ def is_standard_module(modname, std_path=(STD_LIB_DIR,)):
# (sys and __builtin__ for instance)
if filename is None:
return True
filename = abspath(filename)
if filename.startswith(EXT_LIB_DIR):
filename = _normalize_path(filename)
if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)):
return False
if std_path is None:
std_path = STD_LIB_DIRS
for path in std_path:
if filename.startswith(_abspath(path)):
if filename.startswith(_cache_normalize_path(path)):
return True
return False
......@@ -452,12 +490,16 @@ def is_relative(modname, from_file):
:return:
true if the module has been imported relatively to `from_file`
"""
if not isdir(from_file):
from_file = dirname(from_file)
if not os.path.isdir(from_file):
from_file = os.path.dirname(from_file)
if from_file in sys.path:
return False
try:
find_module(modname.split('.')[0], [from_file])
stream, _, _ = imp.find_module(modname.split('.')[0], [from_file])
# Close the stream to avoid ResourceWarnings.
if stream:
stream.close()
return True
except ImportError:
return False
......@@ -480,17 +522,18 @@ def _file_from_modpath(modpath, path=None, context=None):
mtype, mp_filename = _module_file(modpath, path)
else:
mtype, mp_filename = _module_file(modpath, path)
if mtype == PY_COMPILED:
if mtype == imp.PY_COMPILED:
try:
return get_source_file(mp_filename)
return get_source_file(mp_filename), imp.PY_SOURCE
except NoSourceFile:
return mp_filename
elif mtype == C_BUILTIN:
return mp_filename, imp.PY_COMPILED
elif mtype == imp.C_BUILTIN:
# integrated builtin module
return None
elif mtype == PKG_DIRECTORY:
return None, imp.C_BUILTIN
elif mtype == imp.PKG_DIRECTORY:
mp_filename = _has_init(mp_filename)
return mp_filename
mtype = imp.PY_SOURCE
return mp_filename, mtype
def _search_zip(modpath, pic):
for filepath, importer in pic.items():
......@@ -499,28 +542,10 @@ def _search_zip(modpath, pic):
if not importer.find_module(os.path.sep.join(modpath)):
raise ImportError('No module named %s in %s/%s' % (
'.'.join(modpath[1:]), filepath, modpath))
return ZIPFILE, abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath
return PY_ZIPMODULE, os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), filepath
raise ImportError('No module named %s' % '.'.join(modpath))
def _abspath(path, _abspathcache={}): #pylint: disable=dangerous-default-value
"""abspath with caching"""
# _module_file calls abspath on every path in sys.path every time it's
# called; on a larger codebase this easily adds up to half a second just
# assembling path components. This cache alleviates that.
try:
return _abspathcache[path]
except KeyError:
if not path: # don't cache result for ''
return abspath(path)
_abspathcache[path] = abspath(path)
return _abspathcache[path]
try:
import pkg_resources
except ImportError:
pkg_resources = None
def _module_file(modpath, path=None):
"""get a module type / file path
......@@ -552,7 +577,10 @@ def _module_file(modpath, path=None):
except AttributeError:
checkeggs = False
# pkg_resources support (aka setuptools namespace packages)
if pkg_resources is not None and modpath[0] in pkg_resources._namespace_packages and len(modpath) > 1:
if (pkg_resources is not None
and modpath[0] in pkg_resources._namespace_packages
and modpath[0] in sys.modules
and len(modpath) > 1):
# setuptools has added into sys.modules a module object with proper
# __path__, get back information from there
module = sys.modules[modpath.pop(0)]
......@@ -570,16 +598,21 @@ def _module_file(modpath, path=None):
# >>> imp.find_module('posix')
# (None, None, ('', '', 6))
try:
_, mp_filename, mp_desc = find_module(modname, path)
stream, mp_filename, mp_desc = imp.find_module(modname, path)
except ImportError:
if checkeggs:
return _search_zip(modpath, pic)[:2]
raise
else:
# Don't forget to close the stream to avoid
# spurious ResourceWarnings.
if stream:
stream.close()
if checkeggs and mp_filename:
fullabspath = [_abspath(x) for x in _path]
fullabspath = [_cache_normalize_path(x) for x in _path]
try:
pathindex = fullabspath.index(dirname(abspath(mp_filename)))
pathindex = fullabspath.index(os.path.dirname(_normalize_path(mp_filename)))
emtype, emp_filename, zippath = _search_zip(modpath, pic)
if pathindex > _path.index(zippath):
# an egg takes priority
......@@ -593,22 +626,22 @@ def _module_file(modpath, path=None):
imported.append(modpath.pop(0))
mtype = mp_desc[2]
if modpath:
if mtype != PKG_DIRECTORY:
if mtype != imp.PKG_DIRECTORY:
raise ImportError('No module %s in %s' % ('.'.join(modpath),
'.'.join(imported)))
# XXX guess if package is using pkgutil.extend_path by looking for
# those keywords in the first four Kbytes
try:
with open(join(mp_filename, '__init__.py')) as stream:
with open(os.path.join(mp_filename, '__init__.py'), 'rb') as stream:
data = stream.read(4096)
except IOError:
path = [mp_filename]
else:
if 'pkgutil' in data and 'extend_path' in data:
if b'pkgutil' in data and b'extend_path' in data:
# extend_path is called, search sys.path for module/packages
# of this name see pkgutil.extend_path documentation
path = [join(p, *imported) for p in sys.path
if isdir(join(p, *imported))]
path = [os.path.join(p, *imported) for p in sys.path
if os.path.isdir(os.path.join(p, *imported))]
else:
path = [mp_filename]
return mtype, mp_filename
......@@ -628,8 +661,8 @@ def _has_init(directory):
"""if the given directory has a valid __init__ file, return its path,
else return None
"""
mod_or_pack = join(directory, '__init__')
mod_or_pack = os.path.join(directory, '__init__')
for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'):
if exists(mod_or_pack + '.' + ext):
if os.path.exists(mod_or_pack + '.' + ext):
return mod_or_pack + '.' + ext
return None
......@@ -20,6 +20,9 @@
import sys
import six
from logilab.common.decorators import cachedproperty
from astroid.exceptions import NoDefault
from astroid.bases import (NodeNG, Statement, Instance, InferenceContext,
_infer_stmts, YES, BUILTINS)
......@@ -39,7 +42,7 @@ def unpack_infer(stmt, context=None):
yield infered_elt
return
# if infered is a final node, return it and stop
infered = stmt.infer(context).next()
infered = next(stmt.infer(context))
if infered is stmt:
yield infered
return
......@@ -127,8 +130,7 @@ class LookupMixIn(object):
the lookup method
"""
frame, stmts = self.lookup(name)
context = InferenceContext()
return _infer_stmts(stmts, context, frame)
return _infer_stmts(stmts, None, frame)
def _filter_stmts(self, stmts, frame, offset):
"""filter statements to remove ignorable statements.
......@@ -146,6 +148,20 @@ class LookupMixIn(object):
myframe = self.frame().parent.frame()
else:
myframe = self.frame()
# If the frame of this node is the same as the statement
# of this node, then the node is part of a class or
# a function definition and the frame of this node should be the
# the upper frame, not the frame of the definition.
# For more information why this is important,
# see Pylint issue #295.
# For example, for 'b', the statement is the same
# as the frame / scope:
#
# def test(b=1):
# ...
if self.statement() is myframe and myframe.parent:
myframe = myframe.parent.frame()
if not myframe is frame or self is frame:
return stmts
mystmt = self.statement()
......@@ -289,6 +305,11 @@ class Arguments(NodeNG, AssignTypeMixin):
return name
return None
@cachedproperty
def fromlineno(self):
lineno = super(Arguments, self).fromlineno
return max(lineno, self.parent.fromlineno)
def format_args(self):
"""return arguments formatted as string"""
result = []
......@@ -475,7 +496,7 @@ class Const(NodeNG, Instance):
self.value = value
def getitem(self, index, context=None):
if isinstance(self.value, basestring):
if isinstance(self.value, six.string_types):
return Const(self.value[index])
raise TypeError('%r (value=%s)' % (self, self.value))
......@@ -483,7 +504,7 @@ class Const(NodeNG, Instance):
return False
def itered(self):
if isinstance(self.value, basestring):
if isinstance(self.value, six.string_types):
return self.value
raise TypeError()
......@@ -528,7 +549,7 @@ class Dict(NodeNG, Instance):
self.items = []
else:
self.items = [(const_factory(k), const_factory(v))
for k, v in items.iteritems()]
for k, v in items.items()]
def pytype(self):
return '%s.dict' % BUILTINS
......@@ -583,7 +604,8 @@ class ExceptHandler(Statement, AssignTypeMixin):
name = None
body = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
if self.name:
return self.name.tolineno
elif self.type:
......@@ -591,11 +613,6 @@ class ExceptHandler(Statement, AssignTypeMixin):
else:
return self.lineno
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self._blockstart_toline()
def catch(self, exceptions):
if self.type is None or exceptions is None:
return True
......@@ -626,7 +643,8 @@ class For(BlockRangeMixIn, AssignTypeMixin, Statement):
orelse = None
optional_assign = True
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.iter.tolineno
......@@ -661,7 +679,8 @@ class If(BlockRangeMixIn, Statement):
body = None
orelse = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno
def block_range(self, lineno):
......@@ -812,9 +831,6 @@ class TryExcept(BlockRangeMixIn, Statement):
def _infer_name(self, frame, name):
return name
def _blockstart_toline(self):
return self.lineno
def block_range(self, lineno):
"""handle block line numbers range for try/except statements"""
last = None
......@@ -834,9 +850,6 @@ class TryFinally(BlockRangeMixIn, Statement):
body = None
finalbody = None
def _blockstart_toline(self):
return self.lineno
def block_range(self, lineno):
"""handle block line numbers range for try/finally statements"""
child = self.body[0]
......@@ -880,7 +893,8 @@ class While(BlockRangeMixIn, Statement):
body = None
orelse = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno
def block_range(self, lineno):
......@@ -894,7 +908,8 @@ class With(BlockRangeMixIn, AssignTypeMixin, Statement):
items = None
body = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.items[-1][0].tolineno
def get_children(self):
......
......@@ -23,7 +23,7 @@ __doctype__ = "restructuredtext en"
from astroid.exceptions import InferenceError, NoDefault, NotFoundError
from astroid.node_classes import unpack_infer
from astroid.bases import copy_context, \
from astroid.bases import InferenceContext, \
raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES
from astroid.nodes import const_factory
from astroid import nodes
......@@ -91,7 +91,7 @@ BIN_OP_IMPL = {'+': lambda a, b: a + b,
'<<': lambda a, b: a << b,
'>>': lambda a, b: a >> b,
}
for key, impl in BIN_OP_IMPL.items():
for key, impl in list(BIN_OP_IMPL.items()):
BIN_OP_IMPL[key+'='] = impl
def const_infer_binary_op(self, operator, other, context):
......@@ -282,7 +282,8 @@ def _arguments_infer_argname(self, name, context):
# if there is a default value, yield it. And then yield YES to reflect
# we can't guess given argument value
try:
context = copy_context(context)
if context is None:
context = InferenceContext()
for infered in self.default_value(name).infer(context):
yield infered
yield YES
......@@ -294,13 +295,8 @@ def arguments_assigned_stmts(self, node, context, asspath=None):
if context.callcontext:
# reset call context/name
callcontext = context.callcontext
context = copy_context(context)
context.callcontext = None
for infered in callcontext.infer_argument(self.parent, node.name, context):
yield infered
return
for infered in _arguments_infer_argname(self, node.name, context):
yield infered
return callcontext.infer_argument(self.parent, node.name, context)
return _arguments_infer_argname(self, node.name, context)
nodes.Arguments.assigned_stmts = arguments_assigned_stmts
......
......@@ -25,6 +25,7 @@ import sys
from os.path import abspath
from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
ismethoddescriptor, isclass, isbuiltin, ismodule)
import six
from astroid.node_classes import CONST_CLS
from astroid.nodes import (Module, Class, Const, const_factory, From,
......@@ -57,7 +58,10 @@ def attach_dummy_node(node, name, object=_marker):
enode.object = object
_attach_local_node(node, enode, name)
EmptyNode.has_underlying_object = lambda self: self.object is not _marker
def _has_underlying_object(self):
return hasattr(self, 'object') and self.object is not _marker
EmptyNode.has_underlying_object = _has_underlying_object
def attach_const_node(node, name, value):
"""create a Const node and register it in the locals of the given
......@@ -247,10 +251,11 @@ class InspectBuilder(object):
attach_dummy_node(node, name)
continue
if ismethod(member):
member = member.im_func
member = six.get_method_function(member)
if isfunction(member):
# verify this is not an imported function
filename = getattr(member.func_code, 'co_filename', None)
filename = getattr(six.get_function_code(member),
'co_filename', None)
if filename is None:
assert isinstance(member, object)
object_build_methoddescriptor(node, member, name)
......@@ -261,8 +266,6 @@ class InspectBuilder(object):
elif isbuiltin(member):
if (not _io_discrepancy(member) and
self.imported_member(node, member, name)):
#if obj is object:
# print 'skippp', obj, name, member
continue
object_build_methoddescriptor(node, member, name)
elif isclass(member):
......@@ -299,7 +302,7 @@ class InspectBuilder(object):
modname = getattr(member, '__module__', None)
except:
# XXX use logging
print 'unexpected error while building astroid from living object'
print('unexpected error while building astroid from living object')
import traceback
traceback.print_exc()
modname = None
......
......@@ -99,7 +99,6 @@ def _init_set_doc(node, newnode):
newnode.doc = None
try:
if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str):
newnode.tolineno = node.body[0].lineno
newnode.doc = node.body[0].value.s
node.body = node.body[1:]
......@@ -108,10 +107,8 @@ def _init_set_doc(node, newnode):
def _lineno_parent(oldnode, newnode, parent):
newnode.parent = parent
if hasattr(oldnode, 'lineno'):
newnode.lineno = oldnode.lineno
if hasattr(oldnode, 'col_offset'):
newnode.col_offset = oldnode.col_offset
newnode.lineno = oldnode.lineno
newnode.col_offset = oldnode.col_offset
def _set_infos(oldnode, newnode, parent):
newnode.parent = parent
......@@ -119,14 +116,12 @@ def _set_infos(oldnode, newnode, parent):
newnode.lineno = oldnode.lineno
if hasattr(oldnode, 'col_offset'):
newnode.col_offset = oldnode.col_offset
newnode.set_line_info(newnode.last_child()) # set_line_info accepts None
def _create_yield_node(node, parent, rebuilder, factory):
newnode = factory()
_lineno_parent(node, newnode, parent)
if node.value is not None:
newnode.value = rebuilder.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
......@@ -142,14 +137,14 @@ class TreeRebuilder(object):
self._visit_meths = {}
self._transform = manager.transform
def visit_module(self, node, modname, package):
def visit_module(self, node, modname, modpath, package):
"""visit a Module node by returning a fresh instance of it"""
newnode = new.Module(modname, None)
newnode.package = package
_lineno_parent(node, newnode, parent=None)
newnode.parent = None
_init_set_doc(node, newnode)
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.set_line_info(newnode.last_child())
newnode.file = newnode.path = modpath
return self._transform(newnode)
def visit(self, node, parent):
......@@ -174,7 +169,7 @@ class TreeRebuilder(object):
def visit_arguments(self, node, parent):
"""visit a Arguments node by returning a fresh instance of it"""
newnode = new.Arguments()
_lineno_parent(node, newnode, parent)
newnode.parent = parent
self.asscontext = "Ass"
newnode.args = [self.visit(child, newnode) for child in node.args]
self.asscontext = None
......@@ -210,7 +205,6 @@ class TreeRebuilder(object):
newnode.parent.set_local(vararg, newnode)
if kwarg:
newnode.parent.set_local(kwarg, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_assattr(self, node, parent):
......@@ -221,7 +215,6 @@ class TreeRebuilder(object):
newnode.expr = self.visit(node.expr, newnode)
self.asscontext = assc
self._delayed_assattr.append(newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_assert(self, node, parent):
......@@ -231,7 +224,6 @@ class TreeRebuilder(object):
newnode.test = self.visit(node.test, newnode)
if node.msg is not None:
newnode.fail = self.visit(node.msg, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_assign(self, node, parent):
......@@ -259,7 +251,6 @@ class TreeRebuilder(object):
meth.extra_decorators.append(newnode.value)
except (AttributeError, KeyError):
continue
newnode.set_line_info(newnode.last_child())
return newnode
def visit_assname(self, node, parent, node_name=None):
......@@ -279,7 +270,6 @@ class TreeRebuilder(object):
newnode.target = self.visit(node.target, newnode)
self.asscontext = None
newnode.value = self.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_backquote(self, node, parent):
......@@ -287,7 +277,6 @@ class TreeRebuilder(object):
newnode = new.Backquote()
_lineno_parent(node, newnode, parent)
newnode.value = self.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_binop(self, node, parent):
......@@ -297,7 +286,6 @@ class TreeRebuilder(object):
newnode.left = self.visit(node.left, newnode)
newnode.right = self.visit(node.right, newnode)
newnode.op = _BIN_OP_CLASSES[node.op.__class__]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_boolop(self, node, parent):
......@@ -306,7 +294,6 @@ class TreeRebuilder(object):
_lineno_parent(node, newnode, parent)
newnode.values = [self.visit(child, newnode) for child in node.values]
newnode.op = _BOOL_OP_CLASSES[node.op.__class__]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_break(self, node, parent):
......@@ -325,8 +312,8 @@ class TreeRebuilder(object):
newnode.starargs = self.visit(node.starargs, newnode)
if node.kwargs is not None:
newnode.kwargs = self.visit(node.kwargs, newnode)
newnode.args.extend(self.visit(child, newnode) for child in node.keywords)
newnode.set_line_info(newnode.last_child())
for child in node.keywords:
newnode.args.append(self.visit(child, newnode))
return newnode
def visit_class(self, node, parent):
......@@ -338,7 +325,6 @@ class TreeRebuilder(object):
newnode.body = [self.visit(child, newnode) for child in node.body]
if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6
newnode.decorators = self.visit_decorators(node, newnode)
newnode.set_line_info(newnode.last_child())
newnode.parent.frame().set_local(newnode.name, newnode)
return newnode
......@@ -361,19 +347,17 @@ class TreeRebuilder(object):
newnode.left = self.visit(node.left, newnode)
newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode))
for (op, expr) in zip(node.ops, node.comparators)]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_comprehension(self, node, parent):
"""visit a Comprehension node by returning a fresh instance of it"""
newnode = new.Comprehension()
_lineno_parent(node, newnode, parent)
newnode.parent = parent
self.asscontext = "Ass"
newnode.target = self.visit(node.target, newnode)
self.asscontext = None
newnode.iter = self.visit(node.iter, newnode)
newnode.ifs = [self.visit(child, newnode) for child in node.ifs]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_decorators(self, node, parent):
......@@ -387,7 +371,6 @@ class TreeRebuilder(object):
else:
decorators = node.decorator_list
newnode.nodes = [self.visit(child, newnode) for child in decorators]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_delete(self, node, parent):
......@@ -397,7 +380,6 @@ class TreeRebuilder(object):
self.asscontext = "Del"
newnode.targets = [self.visit(child, newnode) for child in node.targets]
self.asscontext = None
newnode.set_line_info(newnode.last_child())
return newnode
def visit_dict(self, node, parent):
......@@ -406,7 +388,6 @@ class TreeRebuilder(object):
_lineno_parent(node, newnode, parent)
newnode.items = [(self.visit(key, newnode), self.visit(value, newnode))
for key, value in zip(node.keys, node.values)]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_dictcomp(self, node, parent):
......@@ -417,7 +398,6 @@ class TreeRebuilder(object):
newnode.value = self.visit(node.value, newnode)
newnode.generators = [self.visit(child, newnode)
for child in node.generators]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_discard(self, node, parent):
......@@ -425,7 +405,6 @@ class TreeRebuilder(object):
newnode = new.Discard()
_lineno_parent(node, newnode, parent)
newnode.value = self.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_ellipsis(self, node, parent):
......@@ -452,7 +431,6 @@ class TreeRebuilder(object):
newnode.name = self.visit(node.name, newnode)
self.asscontext = None
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_exec(self, node, parent):
......@@ -464,15 +442,13 @@ class TreeRebuilder(object):
newnode.globals = self.visit(node.globals, newnode)
if node.locals is not None:
newnode.locals = self.visit(node.locals, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_extslice(self, node, parent):
"""visit an ExtSlice node by returning a fresh instance of it"""
newnode = new.ExtSlice()
_lineno_parent(node, newnode, parent)
newnode.parent = parent
newnode.dims = [self.visit(dim, newnode) for dim in node.dims]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_for(self, node, parent):
......@@ -485,7 +461,6 @@ class TreeRebuilder(object):
newnode.iter = self.visit(node.iter, newnode)
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_from(self, node, parent):
......@@ -514,7 +489,6 @@ class TreeRebuilder(object):
newnode.decorators = self.visit_decorators(node, newnode)
if PY3K and node.returns:
newnode.returns = self.visit(node.returns, newnode)
newnode.set_line_info(newnode.last_child())
self._global_names.pop()
frame = newnode.parent.frame()
if isinstance(frame, new.Class):
......@@ -538,7 +512,6 @@ class TreeRebuilder(object):
_lineno_parent(node, newnode, parent)
newnode.elt = self.visit(node.elt, newnode)
newnode.generators = [self.visit(child, newnode) for child in node.generators]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_getattr(self, node, parent):
......@@ -558,7 +531,6 @@ class TreeRebuilder(object):
newnode.expr = self.visit(node.value, newnode)
self.asscontext = asscontext
newnode.attrname = node.attr
newnode.set_line_info(newnode.last_child())
return newnode
def visit_global(self, node, parent):
......@@ -577,7 +549,6 @@ class TreeRebuilder(object):
newnode.test = self.visit(node.test, newnode)
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_ifexp(self, node, parent):
......@@ -587,7 +558,6 @@ class TreeRebuilder(object):
newnode.test = self.visit(node.test, newnode)
newnode.body = self.visit(node.body, newnode)
newnode.orelse = self.visit(node.orelse, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_import(self, node, parent):
......@@ -604,18 +574,16 @@ class TreeRebuilder(object):
def visit_index(self, node, parent):
"""visit a Index node by returning a fresh instance of it"""
newnode = new.Index()
_lineno_parent(node, newnode, parent)
newnode.parent = parent
newnode.value = self.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_keyword(self, node, parent):
"""visit a Keyword node by returning a fresh instance of it"""
newnode = new.Keyword()
_lineno_parent(node, newnode, parent)
newnode.parent = parent
newnode.arg = node.arg
newnode.value = self.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_lambda(self, node, parent):
......@@ -624,7 +592,6 @@ class TreeRebuilder(object):
_lineno_parent(node, newnode, parent)
newnode.args = self.visit(node.args, newnode)
newnode.body = self.visit(node.body, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_list(self, node, parent):
......@@ -632,7 +599,6 @@ class TreeRebuilder(object):
newnode = new.List()
_lineno_parent(node, newnode, parent)
newnode.elts = [self.visit(child, newnode) for child in node.elts]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_listcomp(self, node, parent):
......@@ -642,7 +608,6 @@ class TreeRebuilder(object):
newnode.elt = self.visit(node.elt, newnode)
newnode.generators = [self.visit(child, newnode)
for child in node.generators]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_name(self, node, parent):
......@@ -665,7 +630,6 @@ class TreeRebuilder(object):
# XXX REMOVE me :
if self.asscontext in ('Del', 'Ass'): # 'Aug' ??
self._save_assignment(newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_bytes(self, node, parent):
......@@ -700,7 +664,6 @@ class TreeRebuilder(object):
if node.dest is not None:
newnode.dest = self.visit(node.dest, newnode)
newnode.values = [self.visit(child, newnode) for child in node.values]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_raise(self, node, parent):
......@@ -713,7 +676,6 @@ class TreeRebuilder(object):
newnode.inst = self.visit(node.inst, newnode)
if node.tback is not None:
newnode.tback = self.visit(node.tback, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_return(self, node, parent):
......@@ -722,7 +684,6 @@ class TreeRebuilder(object):
_lineno_parent(node, newnode, parent)
if node.value is not None:
newnode.value = self.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_set(self, node, parent):
......@@ -730,7 +691,6 @@ class TreeRebuilder(object):
newnode = new.Set()
_lineno_parent(node, newnode, parent)
newnode.elts = [self.visit(child, newnode) for child in node.elts]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_setcomp(self, node, parent):
......@@ -740,20 +700,18 @@ class TreeRebuilder(object):
newnode.elt = self.visit(node.elt, newnode)
newnode.generators = [self.visit(child, newnode)
for child in node.generators]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_slice(self, node, parent):
"""visit a Slice node by returning a fresh instance of it"""
newnode = new.Slice()
_lineno_parent(node, newnode, parent)
newnode.parent = parent
if node.lower is not None:
newnode.lower = self.visit(node.lower, newnode)
if node.upper is not None:
newnode.upper = self.visit(node.upper, newnode)
if node.step is not None:
newnode.step = self.visit(node.step, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_subscript(self, node, parent):
......@@ -764,7 +722,6 @@ class TreeRebuilder(object):
newnode.value = self.visit(node.value, newnode)
newnode.slice = self.visit(node.slice, newnode)
self.asscontext = subcontext
newnode.set_line_info(newnode.last_child())
return newnode
def visit_tryexcept(self, node, parent):
......@@ -774,7 +731,6 @@ class TreeRebuilder(object):
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_tryfinally(self, node, parent):
......@@ -783,7 +739,6 @@ class TreeRebuilder(object):
_lineno_parent(node, newnode, parent)
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_tuple(self, node, parent):
......@@ -791,7 +746,6 @@ class TreeRebuilder(object):
newnode = new.Tuple()
_lineno_parent(node, newnode, parent)
newnode.elts = [self.visit(child, newnode) for child in node.elts]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_unaryop(self, node, parent):
......@@ -800,7 +754,6 @@ class TreeRebuilder(object):
_lineno_parent(node, newnode, parent)
newnode.operand = self.visit(node.operand, newnode)
newnode.op = _UNARY_OP_CLASSES[node.op.__class__]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_while(self, node, parent):
......@@ -810,7 +763,6 @@ class TreeRebuilder(object):
newnode.test = self.visit(node.test, newnode)
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_with(self, node, parent):
......@@ -825,7 +777,6 @@ class TreeRebuilder(object):
self.asscontext = None
newnode.items = [(expr, vars)]
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_yield(self, node, parent):
......@@ -867,7 +818,6 @@ class TreeRebuilder3k(TreeRebuilder):
if node.name is not None:
newnode.name = self.visit_assname(node, newnode, node.name)
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_nonlocal(self, node, parent):
......@@ -885,7 +835,6 @@ class TreeRebuilder3k(TreeRebuilder):
newnode.exc = self.visit(node.exc, newnode)
if node.cause is not None:
newnode.cause = self.visit(node.cause, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_starred(self, node, parent):
......@@ -893,7 +842,6 @@ class TreeRebuilder3k(TreeRebuilder):
newnode = new.Starred()
_lineno_parent(node, newnode, parent)
newnode.value = self.visit(node.value, newnode)
newnode.set_line_info(newnode.last_child())
return newnode
def visit_try(self, node, parent):
......@@ -908,7 +856,6 @@ class TreeRebuilder3k(TreeRebuilder):
excnode.body = [self.visit(child, excnode) for child in node.body]
excnode.handlers = [self.visit(child, excnode) for child in node.handlers]
excnode.orelse = [self.visit(child, excnode) for child in node.orelse]
excnode.set_line_info(excnode.last_child())
newnode.body = [excnode]
else:
newnode.body = [self.visit(child, newnode) for child in node.body]
......@@ -918,7 +865,6 @@ class TreeRebuilder3k(TreeRebuilder):
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_with(self, node, parent):
......@@ -940,7 +886,6 @@ class TreeRebuilder3k(TreeRebuilder):
newnode.items = [visit_child(child)
for child in node.items]
newnode.body = [self.visit(child, newnode) for child in node.body]
newnode.set_line_info(newnode.last_child())
return newnode
def visit_yieldfrom(self, node, parent):
......
......@@ -30,6 +30,7 @@ try:
except ImportError:
from cStringIO import StringIO as BytesIO
import six
from logilab.common.compat import builtins
from logilab.common.decorators import cached, cachedproperty
......@@ -39,7 +40,7 @@ from astroid.node_classes import Const, DelName, DelAttr, \
Dict, From, List, Pass, Raise, Return, Tuple, Yield, YieldFrom, \
LookupMixIn, const_factory as cf, unpack_infer, Name, CallFunc
from astroid.bases import NodeNG, InferenceContext, Instance,\
YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \
YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, \
BUILTINS
from astroid.mixins import FilterStmtsMixin
from astroid.bases import Statement
......@@ -191,7 +192,7 @@ class LocalsDictNodeNG(LookupMixIn, NodeNG):
"""method from the `dict` interface returning a tuple containing
locally defined names
"""
return self.locals.keys()
return list(self.locals.keys())
def values(self):
"""method from the `dict` interface returning a tuple containing
......@@ -204,7 +205,7 @@ class LocalsDictNodeNG(LookupMixIn, NodeNG):
containing each locally defined name with its associated node,
which is an instance of `Function` or `Class`
"""
return zip(self.keys(), self.values())
return list(zip(self.keys(), self.values()))
def __contains__(self, name):
......@@ -256,7 +257,7 @@ class Module(LocalsDictNodeNG):
self.body = []
self.future_imports = set()
@property
@cachedproperty
def file_stream(self):
if self.file_bytes is not None:
return BytesIO(self.file_bytes)
......@@ -311,10 +312,10 @@ class Module(LocalsDictNodeNG):
"""inferred getattr"""
# set lookup name since this is necessary to infer on import nodes for
# instance
context = copy_context(context)
context.lookupname = name
if not context:
context = InferenceContext()
try:
return _infer_stmts(self.getattr(name, context), context, frame=self)
return _infer_stmts(self.getattr(name, context), context, frame=self, lookupname=name)
except NotFoundError:
raise InferenceError(name)
......@@ -339,13 +340,17 @@ class Module(LocalsDictNodeNG):
return
if sys.version_info < (2, 8):
def absolute_import_activated(self):
@cachedproperty
def _absolute_import_activated(self):
for stmt in self.locals.get('absolute_import', ()):
if isinstance(stmt, From) and stmt.modname == '__future__':
return True
return False
else:
absolute_import_activated = lambda self: True
_absolute_import_activated = True
def absolute_import_activated(self):
return self._absolute_import_activated
def import_module(self, modname, relative_only=False, level=None):
"""import the given module considering self as context"""
......@@ -408,24 +413,43 @@ class Module(LocalsDictNodeNG):
#
# We separate the different steps of lookup in try/excepts
# to avoid catching too many Exceptions
# However, we can not analyse dynamically constructed __all__
default = [name for name in self.keys() if not name.startswith('_')]
try:
all = self['__all__']
except KeyError:
return [name for name in self.keys() if not name.startswith('_')]
return default
try:
explicit = all.assigned_stmts().next()
explicit = next(all.assigned_stmts())
except InferenceError:
return [name for name in self.keys() if not name.startswith('_')]
return default
except AttributeError:
# not an assignment node
# XXX infer?
return [name for name in self.keys() if not name.startswith('_')]
return default
# Try our best to detect the exported name.
infered = []
try:
# should be a Tuple/List of constant string / 1 string not allowed
return [const.value for const in explicit.elts]
except AttributeError:
return [name for name in self.keys() if not name.startswith('_')]
explicit = next(explicit.infer())
except InferenceError:
return default
if not isinstance(explicit, (Tuple, List)):
return default
str_const = lambda node: (isinstance(node, Const) and
isinstance(node.value, six.string_types))
for node in explicit.elts:
if str_const(node):
infered.append(node.value)
else:
try:
infered_node = next(node.infer())
except InferenceError:
continue
if str_const(infered_node):
infered.append(infered_node.value)
return infered
class ComprehensionScope(LocalsDictNodeNG):
......@@ -488,7 +512,7 @@ def _infer_decorator_callchain(node):
while True:
if isinstance(current, CallFunc):
try:
current = current.func.infer().next()
current = next(current.func.infer())
except InferenceError:
return
elif isinstance(current, Function):
......@@ -498,7 +522,11 @@ def _infer_decorator_callchain(node):
# TODO: We don't handle multiple inference results right now,
# because there's no flow to reason when the return
# is what we are looking for, a static or a class method.
result = current.infer_call_result(current.parent).next()
result = next(current.infer_call_result(current.parent))
if current is result:
# This will lead to an infinite loop, where a decorator
# returns itself.
return
except (StopIteration, InferenceError):
return
if isinstance(result, (Function, CallFunc)):
......@@ -629,22 +657,25 @@ class Function(Statement, Lambda):
self.locals = {}
self.args = []
self.body = []
self.decorators = None
self.name = name
self.doc = doc
self.extra_decorators = []
self.instance_attrs = {}
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
# lineno is the line number of the first decorator, we want the def statement lineno
@cachedproperty
def fromlineno(self):
# lineno is the line number of the first decorator, we want the def
# statement lineno
lineno = self.lineno
if self.decorators is not None:
self.fromlineno += sum(node.tolineno - node.lineno + 1
lineno += sum(node.tolineno - node.lineno + 1
for node in self.decorators.nodes)
if self.args.fromlineno < self.fromlineno:
self.args.fromlineno = self.fromlineno
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self.args.tolineno
return lineno
@cachedproperty
def blockstart_tolineno(self):
return self.args.tolineno
def block_range(self, lineno):
"""return block line numbers.
......@@ -697,7 +728,7 @@ class Function(Statement, Lambda):
if self.decorators:
for node in self.decorators.nodes:
try:
infered = node.infer().next()
infered = next(node.infer())
except InferenceError:
continue
if infered and infered.qname() in ('abc.abstractproperty',
......@@ -718,17 +749,32 @@ class Function(Statement, Lambda):
def is_generator(self):
"""return true if this is a generator function"""
# XXX should be flagged, not computed
try:
return self.nodes_of_class((Yield, YieldFrom),
skip_klass=(Function, Lambda)).next()
except StopIteration:
return False
return next(self.nodes_of_class((Yield, YieldFrom),
skip_klass=(Function, Lambda)), False)
def infer_call_result(self, caller, context=None):
"""infer what a function is returning when called"""
if self.is_generator():
yield Generator()
return
# This is really a gigantic hack to work around metaclass generators
# that return transient class-generating functions. Pylint's AST structure
# cannot handle a base class object that is only used for calling __new__,
# but does not contribute to the inheritance structure itself. We inject
# a fake class into the hierarchy here for several well-known metaclass
# generators, and filter it out later.
if (self.name == 'with_metaclass' and
len(self.args.args) == 1 and
self.args.vararg is not None):
metaclass = next(caller.args[0].infer(context))
if isinstance(metaclass, Class):
c = Class('temporary_class', None)
c.hide = True
c.parent = self
c.bases = [next(b.infer(context)) for b in caller.args[1:]]
c._metaclass = metaclass
yield c
return
returns = self.nodes_of_class(Return, skip_klass=Function)
for returnnode in returns:
if returnnode.value is None:
......@@ -810,7 +856,6 @@ def _class_type(klass, ancestors=None):
klass._type = 'class'
return 'class'
ancestors.add(klass)
# print >> sys.stderr, '_class_type', repr(klass)
for base in klass.ancestors(recurs=False):
name = _class_type(base, ancestors)
if name != 'class':
......@@ -845,6 +890,8 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
blockstart_tolineno = None
_type = None
_metaclass_hack = False
hide = False
type = property(_class_type,
doc="class'type, possible values are 'class' | "
"'metaclass' | 'interface' | 'exception'")
......@@ -880,12 +927,12 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
doc="boolean indicating if it's a new style class"
"or not")
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.blockstart_tolineno = self.bases and self.bases[-1].tolineno or self.fromlineno
if lastchild is not None:
self.tolineno = lastchild.tolineno
# else this is a class with only a docstring, then tolineno is (should be) already ok
@cachedproperty
def blockstart_tolineno(self):
if self.bases:
return self.bases[-1].tolineno
else:
return self.fromlineno
def block_range(self, lineno):
"""return block line numbers.
......@@ -905,24 +952,25 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
def callable(self):
return True
def _is_subtype_of(self, type_name):
def is_subtype_of(self, type_name, context=None):
if self.qname() == type_name:
return True
for anc in self.ancestors():
for anc in self.ancestors(context=context):
if anc.qname() == type_name:
return True
def infer_call_result(self, caller, context=None):
"""infer what a class is returning when called"""
if self._is_subtype_of('%s.type' % (BUILTINS,)) and len(caller.args) == 3:
name_node = caller.args[0].infer().next()
if isinstance(name_node, Const) and isinstance(name_node.value, basestring):
if self.is_subtype_of('%s.type' % (BUILTINS,), context) and len(caller.args) == 3:
name_node = next(caller.args[0].infer(context))
if (isinstance(name_node, Const) and
isinstance(name_node.value, six.string_types)):
name = name_node.value
else:
yield YES
return
result = Class(name, None)
bases = caller.args[1].infer().next()
bases = next(caller.args[1].infer(context))
if isinstance(bases, (Tuple, List)):
result.bases = bases.itered()
else:
......@@ -961,34 +1009,39 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
ancestors only
"""
# FIXME: should be possible to choose the resolution order
# XXX inference make infinite loops possible here (see BaseTransformer
# manipulation in the builder module for instance)
# FIXME: inference make infinite loops possible here
yielded = set([self])
if context is None:
context = InferenceContext()
if sys.version_info[0] >= 3:
if not self.bases and self.qname() != 'builtins.object':
yield builtin_lookup("object")[1][0]
return
for stmt in self.bases:
with context.restore_path():
try:
for baseobj in stmt.infer(context):
if not isinstance(baseobj, Class):
if isinstance(baseobj, Instance):
baseobj = baseobj._proxied
else:
# duh ?
continue
try:
for baseobj in stmt.infer(context):
if not isinstance(baseobj, Class):
if isinstance(baseobj, Instance):
baseobj = baseobj._proxied
else:
# duh ?
continue
if not baseobj.hide:
if baseobj in yielded:
continue # cf xxx above
yielded.add(baseobj)
yield baseobj
if recurs:
for grandpa in baseobj.ancestors(True, context):
if grandpa in yielded:
continue # cf xxx above
yielded.add(grandpa)
yield grandpa
except InferenceError:
# XXX log error ?
continue
if recurs:
for grandpa in baseobj.ancestors(recurs=True,
context=context):
if grandpa in yielded:
continue # cf xxx above
yielded.add(grandpa)
yield grandpa
except InferenceError:
# XXX log error ?
continue
def local_attr_ancestors(self, name, context=None):
"""return an iterator on astroid representation of parent classes
......@@ -1083,11 +1136,11 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
"""
# set lookup name since this is necessary to infer on import nodes for
# instance
context = copy_context(context)
context.lookupname = name
if not context:
context = InferenceContext()
try:
for infered in _infer_stmts(self.getattr(name, context), context,
frame=self):
frame=self, lookupname=name):
# yield YES object instead of descriptors when necessary
if not isinstance(infered, Const) and isinstance(infered, Instance):
try:
......@@ -1178,6 +1231,16 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
having a ``__metaclass__`` class attribute, or if there are
no explicit bases but there is a global ``__metaclass__`` variable.
"""
for base in self.bases:
try:
for baseobj in base.infer():
if isinstance(baseobj, Class) and baseobj.hide:
self._metaclass = baseobj._metaclass
self._metaclass_hack = True
break
except InferenceError:
pass
if self._metaclass:
# Expects this from Py3k TreeRebuilder
try:
......@@ -1202,7 +1265,7 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
return None
try:
infered = assignment.infer().next()
infered = next(assignment.infer())
except InferenceError:
return
if infered is YES: # don't expose this
......@@ -1224,6 +1287,9 @@ class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
break
return klass
def has_metaclass_hack(self):
return self._metaclass_hack
def _islots(self):
""" Return an iterator with the inferred slots. """
if '__slots__' not in self.locals:
......
......@@ -18,6 +18,7 @@
"""this module contains some utilities to navigate in the tree or to
extract information from it
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
......@@ -109,22 +110,22 @@ def _check_children(node):
for child in node.get_children():
ok = False
if child is None:
print "Hm, child of %s is None" % node
print("Hm, child of %s is None" % node)
continue
if not hasattr(child, 'parent'):
print " ERROR: %s has child %s %x with no parent" % (
node, child, id(child))
print(" ERROR: %s has child %s %x with no parent" % (
node, child, id(child)))
elif not child.parent:
print " ERROR: %s has child %s %x with parent %r" % (
node, child, id(child), child.parent)
print(" ERROR: %s has child %s %x with parent %r" % (
node, child, id(child), child.parent))
elif child.parent is not node:
print " ERROR: %s %x has child %s %x with wrong parent %s" % (
node, id(node), child, id(child), child.parent)
print(" ERROR: %s %x has child %s %x with wrong parent %s" % (
node, id(node), child, id(child), child.parent))
else:
ok = True
if not ok:
print "lines;", node.lineno, child.lineno
print "of module", node.root(), node.root().name
print("lines;", node.lineno, child.lineno)
print("of module", node.root(), node.root().name)
raise AstroidBuildingException
_check_children(child)
......
URL: http://www.logilab.org/project/logilab-common
Version: 0.63.1
Version: 0.63.2
License: GPL
License File: LICENSE.txt
......
......@@ -25,7 +25,7 @@ modname = 'common'
subpackage_of = 'logilab'
subpackage_master = True
numversion = (0, 63, 0)
numversion = (0, 63, 2)
version = '.'.join([str(num) for num in numversion])
license = 'LGPL' # 2.1 or later
......@@ -43,6 +43,8 @@ include_dirs = [join('test', 'data')]
install_requires = [
'six >= 1.4.0',
]
test_require = ['pytz']
if sys.version_info < (2, 7):
install_requires.append('unittest2 >= 0.5.1')
if os.name == 'nt':
......
......@@ -33,15 +33,17 @@ Example::
help_do_pionce = ("pionce", "pionce duree", _("met ton corps en veille"))
def do_pionce(self):
print 'nap is good'
print('nap is good')
help_do_ronfle = ("ronfle", "ronfle volume", _("met les autres en veille"))
def do_ronfle(self):
print 'fuuuuuuuuuuuu rhhhhhrhrhrrh'
print('fuuuuuuuuuuuu rhhhhhrhrhrrh')
cl = BookShell()
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
from six.moves import builtins, input
......@@ -66,7 +68,7 @@ def init_readline(complete_method, histfile=None):
import atexit
atexit.register(readline.write_history_file, histfile)
except:
print 'readline is not available :-('
print('readline is not available :-(')
class Completer :
......@@ -157,10 +159,10 @@ class CLIHelper:
return self.commands.keys()
def _print_help(self, cmd, syntax, explanation):
print _('Command %s') % cmd
print _('Syntax: %s') % syntax
print '\t', explanation
print
print(_('Command %s') % cmd)
print(_('Syntax: %s') % syntax)
print('\t', explanation)
print()
# predefined commands #####################################################
......@@ -170,20 +172,20 @@ class CLIHelper:
if command in self._command_help:
self._print_help(*self._command_help[command])
elif command is None or command not in self._topics:
print _("Use help <topic> or help <command>.")
print _("Available topics are:")
print(_("Use help <topic> or help <command>."))
print(_("Available topics are:"))
topics = sorted(self._topics.keys())
for topic in topics:
print '\t', topic
print
print _("Available commands are:")
print('\t', topic)
print()
print(_("Available commands are:"))
commands = self.commands.keys()
commands.sort()
for command in commands:
print '\t', command[len(self.CMD_PREFIX):]
print('\t', command[len(self.CMD_PREFIX):])
else:
print _('Available commands about %s:') % command
print(_('Available commands about %s:') % command)
print
for command_help_method in self._topics[command]:
try:
......@@ -194,8 +196,8 @@ class CLIHelper:
except:
import traceback
traceback.print_exc()
print 'ERROR in help method %s'% (
command_help_method.__name__)
print('ERROR in help method %s'% (
command_help_method.__name__))
help_do_help = ("help", "help [topic|command]",
_("print help message for the given topic/command or \
......
......@@ -409,21 +409,20 @@ def rest_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using as ReST formatted output"""
encoding = _get_encoding(encoding, stream)
if section:
print >> stream, '%s\n%s' % (section, "'"*len(section))
print('%s\n%s' % (section, "'"*len(section)), file=stream)
if doc:
print >> stream, _encode(normalize_text(doc, line_len=79, indent=''),
encoding)
print >> stream
print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream)
print(file=stream)
for optname, optdict, value in options:
help = optdict.get('help')
print >> stream, ':%s:' % optname
print(':%s:' % optname, file=stream)
if help:
help = normalize_text(help, line_len=79, indent=' ')
print >> stream, _encode(help, encoding)
print(_encode(help, encoding), file=stream)
if value:
value = _encode(format_option_value(optdict, value), encoding)
print >> stream, ''
print >> stream, ' Default: ``%s``' % value.replace("`` ", "```` ``")
print(file=stream)
print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
# Options Manager ##############################################################
......
......@@ -51,7 +51,7 @@ def setugid(user):
os.environ['HOME'] = passwd.pw_dir
def daemonize(pidfile=None, uid=None, umask=077):
def daemonize(pidfile=None, uid=None, umask=0o77):
"""daemonize a Unix process. Set paranoid umask by default.
Return 1 in the original process, 2 in the first fork, and None for the
......@@ -71,9 +71,6 @@ def daemonize(pidfile=None, uid=None, umask=077):
return 2
# move to the root to avoit mount pb
os.chdir('/')
# set umask if specified
if umask is not None:
os.umask(umask)
# redirect standard descriptors
null = os.open('/dev/null', os.O_RDWR)
for i in range(3):
......@@ -95,7 +92,9 @@ def daemonize(pidfile=None, uid=None, umask=077):
f = file(pidfile, 'w')
f.write(str(os.getpid()))
f.close()
os.chmod(pidfile, 0644)
# set umask if specified
if umask is not None:
os.umask(umask)
# change process uid
if uid:
setugid(uid)
......
......@@ -314,7 +314,7 @@ def ustrftime(somedate, fmt='%Y-%m-%d'):
def utcdatetime(dt):
if dt.tzinfo is None:
return dt
return datetime(*dt.utctimetuple()[:7])
return (dt.replace(tzinfo=None) - dt.utcoffset())
def utctime(dt):
if dt.tzinfo is None:
......
......@@ -30,6 +30,7 @@ Usage:
http://www.physics.ox.ac.uk/users/santoso/Software.Repository.html
page says code is "available as is without any warranty or support".
"""
from __future__ import print_function
import struct
import os, os.path
......@@ -79,7 +80,7 @@ class Dbase:
def open(self, db_name):
filesize = os.path.getsize(db_name)
if filesize <= 68:
raise IOError, 'The file is not large enough to be a dbf file'
raise IOError('The file is not large enough to be a dbf file')
self.fdb = open(db_name, 'rb')
......@@ -152,7 +153,7 @@ class Dbase:
This function accept record number from 0 to N-1
"""
if rec_no < 0 or rec_no > self.num_records:
raise Exception, 'Unable to extract data outside the range'
raise Exception('Unable to extract data outside the range')
offset = self.header['Record Size'] * rec_no
data = self.db_data[offset:offset+self.row_len]
......@@ -227,4 +228,4 @@ def readDbf(filename):
if __name__=='__main__':
rec = readDbf('dbf/sptable.dbf')
for line in rec:
print '%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip())
print('%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip()))
......@@ -125,11 +125,12 @@ class DeprecationManager(object):
return self.class_deprecated(version)(old_name, (new_class,), clsdict)
except (NameError, TypeError):
# old-style class
warn = self.warn
class DeprecatedClass(new_class):
"""FIXME: There might be a better way to handle old/new-style class
"""
def __init__(self, *args, **kwargs):
self.warn(version, message, stacklevel=3)
warn(version, message, stacklevel=3)
new_class.__init__(self, *args, **kwargs)
return DeprecatedClass
......
......@@ -29,6 +29,8 @@ Example:
With mymod.build that defines two functions run and add_options
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
from warnings import warn
......@@ -55,9 +57,9 @@ class OptionParser(optparse.OptionParser):
def print_main_help(self):
optparse.OptionParser.print_help(self)
print '\ncommands:'
print('\ncommands:')
for cmdname, (_, help) in self._commands.items():
print '% 10s - %s' % (cmdname, help)
print('% 10s - %s' % (cmdname, help))
def parse_command(self, args):
if len(args) == 0:
......@@ -78,7 +80,7 @@ class OptionParser(optparse.OptionParser):
# optparse inserts self.description between usage and options help
self.description = help
if isinstance(mod_or_f, str):
exec 'from %s import run, add_options' % mod_or_f
exec('from %s import run, add_options' % mod_or_f)
else:
run, add_options = mod_or_f
add_options(self)
......
......@@ -119,12 +119,14 @@ from time import time, clock
import warnings
import types
from inspect import isgeneratorfunction, isclass
from contextlib import contextmanager
from logilab.common.fileutils import abspath_listdir
from logilab.common import textutils
from logilab.common import testlib, STD_BLACKLIST
# use the same unittest module as testlib
from logilab.common.testlib import unittest, start_interactive_mode
from logilab.common.deprecation import deprecated
import doctest
import unittest as unittest_legacy
......@@ -145,28 +147,41 @@ except ImportError:
CONF_FILE = 'pytestconf.py'
## coverage hacks, do not read this, do not read this, do not read this
## coverage pausing tools
@contextmanager
def replace_trace(trace=None):
"""A context manager that temporary replaces the trace function"""
oldtrace = sys.gettrace()
sys.settrace(trace)
try:
yield
finally:
# specific hack to work around a bug in pycoverage, see
# https://bitbucket.org/ned/coveragepy/issue/123
if (oldtrace is not None and not callable(oldtrace) and
hasattr(oldtrace, 'pytrace')):
oldtrace = oldtrace.pytrace
sys.settrace(oldtrace)
def pause_trace():
"""A context manager that temporary pauses any tracing"""
return replace_trace()
# hey, but this is an aspect, right ?!!!
class TraceController(object):
nesting = 0
ctx_stack = []
@classmethod
@deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
def pause_tracing(cls):
if not cls.nesting:
cls.tracefunc = staticmethod(getattr(sys, '__settrace__', sys.settrace))
cls.oldtracer = getattr(sys, '__tracer__', None)
sys.__notrace__ = True
cls.tracefunc(None)
cls.nesting += 1
pause_tracing = classmethod(pause_tracing)
cls.ctx_stack.append(pause_trace())
cls.ctx_stack[-1].__enter__()
@classmethod
@deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
def resume_tracing(cls):
cls.nesting -= 1
assert cls.nesting >= 0
if not cls.nesting:
cls.tracefunc(cls.oldtracer)
delattr(sys, '__notrace__')
resume_tracing = classmethod(resume_tracing)
cls.ctx_stack.pop().__exit__(None, None, None)
pause_tracing = TraceController.pause_tracing
......@@ -174,20 +189,18 @@ resume_tracing = TraceController.resume_tracing
def nocoverage(func):
"""Function decorator that pauses tracing functions"""
if hasattr(func, 'uncovered'):
return func
func.uncovered = True
def not_covered(*args, **kwargs):
pause_tracing()
try:
with pause_trace():
return func(*args, **kwargs)
finally:
resume_tracing()
not_covered.uncovered = True
return not_covered
## end of coverage hacks
## end of coverage pausing tools
TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$")
......@@ -1082,8 +1095,14 @@ class NonStrictTestLoader(unittest.TestLoader):
testCaseClass)
return [testname for testname in testnames if not is_skipped(testname)]
# The 2 functions below are modified versions of the TestSuite.run method
# that is provided with unittest2 for python 2.6, in unittest2/suite.py
# It is used to monkeypatch the original implementation to support
# extra runcondition and options arguments (see in testlib.py)
def _ts_run(self, result, runcondition=None, options=None):
self._wrapped_run(result,runcondition=runcondition, options=options)
self._wrapped_run(result, runcondition=runcondition, options=options)
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
return result
......@@ -1097,10 +1116,17 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None):
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
# --- modifications to deal with _wrapped_run ---
# original code is:
#
# if not debug:
# test(result)
# else:
# test.debug()
if hasattr(test, '_wrapped_run'):
try:
test._wrapped_run(result, debug, runcondition=runcondition, options=options)
......@@ -1113,6 +1139,25 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None):
test(result)
else:
test.debug()
# --- end of modifications to deal with _wrapped_run ---
return result
if sys.version_info >= (2, 7):
# The function below implements a modified version of the
# TestSuite.run method that is provided with python 2.7, in
# unittest/suite.py
def _ts_run(self, result, debug=False, runcondition=None, options=None):
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
self._wrapped_run(result, debug, runcondition, options)
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
def enable_dbc(*args):
......
......@@ -42,13 +42,13 @@ def layout_title(layout):
"""
for child in layout.children:
if isinstance(child, Title):
return ' '.join([node.data for node in get_nodes(child, Text)])
return u' '.join([node.data for node in get_nodes(child, Text)])
def build_summary(layout, level=1):
"""make a summary for the report, including X level"""
assert level > 0
level -= 1
summary = List(klass='summary')
summary = List(klass=u'summary')
for child in layout.children:
if not isinstance(child, Section):
continue
......@@ -57,7 +57,7 @@ def build_summary(layout, level=1):
continue
if not child.id:
child.id = label.replace(' ', '-')
node = Link('#'+child.id, label=label or child.id)
node = Link(u'#'+child.id, label=label or child.id)
# FIXME: Three following lines produce not very compliant
# docbook: there are some useless <para><para>. They might be
# replaced by the three commented lines but this then produces
......@@ -99,7 +99,7 @@ class BaseWriter(object):
for child in getattr(layout, 'children', ()):
child.accept(self)
def writeln(self, string=''):
def writeln(self, string=u''):
"""write a line in the output buffer"""
self.write(string + linesep)
......@@ -132,7 +132,7 @@ class BaseWriter(object):
result[-1].append(cell)
# fill missing cells
while len(result[-1]) < cols:
result[-1].append('')
result[-1].append(u'')
return result
def compute_content(self, layout):
......@@ -147,7 +147,7 @@ class BaseWriter(object):
stream.write(data)
except UnicodeEncodeError:
stream.write(data.encode(self.encoding))
def writeln(data=''):
def writeln(data=u''):
try:
stream.write(data+linesep)
except UnicodeEncodeError:
......
......@@ -27,8 +27,8 @@ from logilab.common.textutils import linesep
from logilab.common.ureports import BaseWriter
TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^']
BULLETS = ['*', '-']
TITLE_UNDERLINES = [u'', u'=', u'-', u'`', u'.', u'~', u'^']
BULLETS = [u'*', u'-']
class TextWriter(BaseWriter):
"""format layouts as text
......@@ -48,13 +48,13 @@ class TextWriter(BaseWriter):
if self.pending_urls:
self.writeln()
for label, url in self.pending_urls:
self.writeln('.. _`%s`: %s' % (label, url))
self.writeln(u'.. _`%s`: %s' % (label, url))
self.pending_urls = []
self.section -= 1
self.writeln()
def visit_title(self, layout):
title = ''.join(list(self.compute_content(layout)))
title = u''.join(list(self.compute_content(layout)))
self.writeln(title)
try:
self.writeln(TITLE_UNDERLINES[self.section] * len(title))
......@@ -88,19 +88,19 @@ class TextWriter(BaseWriter):
def default_table(self, layout, table_content, cols_width):
"""format a table"""
cols_width = [size+1 for size in cols_width]
format_strings = ' '.join(['%%-%ss'] * len(cols_width))
format_strings = u' '.join([u'%%-%ss'] * len(cols_width))
format_strings = format_strings % tuple(cols_width)
format_strings = format_strings.split(' ')
table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n'
headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n'
table_linesep = u'\n+' + u'+'.join([u'-'*w for w in cols_width]) + u'+\n'
headsep = u'\n+' + u'+'.join([u'='*w for w in cols_width]) + u'+\n'
# FIXME: layout.cheaders
self.write(table_linesep)
for i in range(len(table_content)):
self.write('|')
self.write(u'|')
line = table_content[i]
for j in range(len(line)):
self.write(format_strings[j] % line[j])
self.write('|')
self.write(u'|')
if i == 0 and layout.rheaders:
self.write(headsep)
else:
......@@ -109,7 +109,7 @@ class TextWriter(BaseWriter):
def field_table(self, layout, table_content, cols_width):
"""special case for field table"""
assert layout.cols == 2
format_string = '%s%%-%ss: %%s' % (linesep, cols_width[0])
format_string = u'%s%%-%ss: %%s' % (linesep, cols_width[0])
for field, value in table_content:
self.write(format_string % (field, value))
......@@ -120,14 +120,14 @@ class TextWriter(BaseWriter):
indent = ' ' * self.list_level
self.list_level += 1
for child in layout.children:
self.write('%s%s%s ' % (linesep, indent, bullet))
self.write(u'%s%s%s ' % (linesep, indent, bullet))
child.accept(self)
self.list_level -= 1
def visit_link(self, layout):
"""add a hyperlink"""
if layout.label != layout.url:
self.write('`%s`_' % layout.label)
self.write(u'`%s`_' % layout.label)
self.pending_urls.append( (layout.label, layout.url) )
else:
self.write(layout.url)
......@@ -135,11 +135,11 @@ class TextWriter(BaseWriter):
def visit_verbatimtext(self, layout):
"""display a verbatim layout as text (so difficult ;)
"""
self.writeln('::\n')
self.writeln(u'::\n')
for line in layout.data.splitlines():
self.writeln(' ' + line)
self.writeln(u' ' + line)
self.writeln()
def visit_text(self, layout):
"""add some text"""
self.write(layout.data)
self.write(u'%s' % layout.data)
from __future__ import print_function
import logging
import urllib2
......@@ -84,4 +86,4 @@ if __name__ == '__main__':
# test with url sys.argv[1]
h = HTTPGssapiAuthHandler()
response = urllib2.build_opener(h, ch).open(sys.argv[1])
print '\nresponse: %s\n--------------\n' % response.code, response.info()
print('\nresponse: %s\n--------------\n' % response.code, response.info())
URL: http://www.pylint.org/
Version: 1.3.1
Version: 1.4.0
License: GPL
License File: LICENSE.txt
......@@ -7,4 +7,4 @@ Description:
This directory contains the pylint module.
Local Modifications:
None
- applied upstream fix https://bitbucket.org/logilab/pylint/commits/5df347467ee0
......@@ -15,6 +15,8 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
from .__pkginfo__ import version as __version__
def run_pylint():
"""run pylint"""
from pylint.lint import Run
......
......@@ -15,18 +15,14 @@
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""pylint packaging information"""
import sys
from __future__ import absolute_import
modname = distname = 'pylint'
numversion = (1, 3, 1)
numversion = (1, 4, 0)
version = '.'.join([str(num) for num in numversion])
if sys.version_info < (2, 6):
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.2.1',
'StringFormat']
else:
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.2.1']
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.3.2', 'six']
license = 'GPL'
description = "python code static checker"
......
......@@ -30,7 +30,9 @@ Base id of standard checkers (used in msg and report ids):
12: logging
13: string_format
14: string_constant
15-50: not yet used: reserved for future internal checkers.
15: stdlib
16: python3
17-50: not yet used: reserved for future internal checkers.
51-99: perhaps used: reserved for external checkers
The raw_metrics checker has no number associated since it doesn't emit any
......@@ -46,6 +48,8 @@ from logilab.common.configuration import OptionsProviderMixIn
from pylint.reporters import diff_string
from pylint.utils import register_plugins
from pylint.interfaces import UNDEFINED
def table_lines_from_stats(stats, old_stats, columns):
"""get values listed in <columns> from <stats> and <old_stats>,
......@@ -55,7 +59,7 @@ def table_lines_from_stats(stats, old_stats, columns):
lines = []
for m_type in columns:
new = stats[m_type]
format = str
format = str # pylint: disable=redefined-builtin
if isinstance(new, float):
format = lambda num: '%.3f' % num
old = old_stats.get(m_type)
......@@ -80,6 +84,8 @@ class BaseChecker(OptionsProviderMixIn):
msgs = {}
# reports issued by this checker
reports = ()
# mark this checker as enabled or not.
enabled = True
def __init__(self, linter=None):
"""checker instances should have the linter as argument
......@@ -90,9 +96,9 @@ class BaseChecker(OptionsProviderMixIn):
OptionsProviderMixIn.__init__(self)
self.linter = linter
def add_message(self, msg_id, line=None, node=None, args=None):
def add_message(self, msg_id, line=None, node=None, args=None, confidence=UNDEFINED):
"""add a message of a given type"""
self.linter.add_message(msg_id, line, node, args)
self.linter.add_message(msg_id, line, node, args, confidence)
# dummy methods implementing the IChecker interface
......@@ -103,31 +109,6 @@ class BaseChecker(OptionsProviderMixIn):
"""called after visiting project (i.e set of modules)"""
class BaseRawChecker(BaseChecker):
"""base class for raw checkers"""
def process_module(self, node):
"""process a module
the module's content is accessible via the stream object
stream must implement the readline method
"""
warnings.warn("Modules that need access to the tokens should "
"use the ITokenChecker interface.",
DeprecationWarning)
stream = node.file_stream
stream.seek(0) # XXX may be removed with astroid > 0.23
if sys.version_info <= (3, 0):
self.process_tokens(tokenize.generate_tokens(stream.readline))
else:
self.process_tokens(tokenize.tokenize(stream.readline))
def process_tokens(self, tokens):
"""should be overridden by subclasses"""
raise NotImplementedError()
class BaseTokenChecker(BaseChecker):
"""Base class for checkers that want to have access to the token stream."""
......
......@@ -16,13 +16,21 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""basic checker for Python code"""
import collections
import itertools
import sys
import astroid
import re
import six
from six.moves import zip # pylint: disable=redefined-builtin
from logilab.common.ureports import Table
from astroid import are_exclusive, InferenceError
import astroid
import astroid.bases
from astroid import are_exclusive, InferenceError
from pylint.interfaces import IAstroidChecker
from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
from pylint.utils import EmptyReport
from pylint.reporters import diff_string
from pylint.checkers import BaseChecker
......@@ -34,12 +42,12 @@ from pylint.checkers.utils import (
overrides_a_method,
safe_infer,
get_argument_from_call,
has_known_bases,
NoSuchArgumentError,
is_import_error,
)
import re
# regex for class/function/variable/constant name
CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$')
......@@ -54,16 +62,42 @@ REVERSED_METHODS = (('__getitem__', '__len__'),
PY33 = sys.version_info >= (3, 3)
PY3K = sys.version_info >= (3, 0)
BAD_FUNCTIONS = ['map', 'filter', 'apply']
BAD_FUNCTIONS = ['map', 'filter']
if sys.version_info < (3, 0):
BAD_FUNCTIONS.append('input')
BAD_FUNCTIONS.append('file')
# Name categories that are always consistent with all naming conventions.
EXEMPT_NAME_CATEGORIES = set(('exempt', 'ignore'))
# A mapping from builtin-qname -> symbol, to be used when generating messages
# about dangerous default values as arguments
DEFAULT_ARGUMENT_SYMBOLS = dict(
zip(['.'.join([astroid.bases.BUILTINS, x]) for x in ('set', 'dict', 'list')],
['set()', '{}', '[]'])
)
del re
def _redefines_import(node):
""" Detect that the given node (AssName) is inside an
exception handler and redefines an import from the tryexcept body.
Returns True if the node redefines an import, False otherwise.
"""
current = node
while current and not isinstance(current.parent, astroid.ExceptHandler):
current = current.parent
if not current or not is_import_error(current.parent):
return False
try_block = current.parent.parent
for import_node in try_block.nodes_of_class((astroid.From, astroid.Import)):
for name, alias in import_node.names:
if alias:
if alias == node.name:
return True
elif name == node.name:
return True
return False
def in_loop(node):
"""return True if the node is inside a kind of for loop"""
parent = node.parent
......@@ -94,6 +128,7 @@ def _loop_exits_early(loop):
for child in loop.body:
if isinstance(child, loop_nodes):
# break statement may be in orelse of child loop.
# pylint: disable=superfluous-parens
for orelse in (child.orelse or ()):
for _ in orelse.nodes_of_class(astroid.Break, skip_klass=loop_nodes):
return True
......@@ -102,6 +137,13 @@ def _loop_exits_early(loop):
return True
return False
def _is_multi_naming_match(match, node_type, confidence):
return (match is not None and
match.lastgroup is not None and
match.lastgroup not in EXEMPT_NAME_CATEGORIES
and (node_type != 'method' or confidence != INFERENCE_FAILURE))
if sys.version_info < (3, 0):
PROPERTY_CLASSES = set(('__builtin__.property', 'abc.abstractproperty'))
else:
......@@ -144,18 +186,19 @@ def decorated_with_abc(func):
if func.decorators:
for node in func.decorators.nodes:
try:
infered = node.infer().next()
infered = next(node.infer())
except InferenceError:
continue
if infered and infered.qname() in ABC_METHODS:
return True
def has_abstract_methods(node):
""" Determine if the given `node` has
"""
Determine if the given `node` has
abstract methods, defined with `abc` module.
"""
return any(decorated_with_abc(meth)
for meth in node.mymethods())
for meth in node.methods())
def report_by_type_stats(sect, stats, old_stats):
"""make a report of
......@@ -258,8 +301,7 @@ class BasicErrorChecker(_BasicChecker):
'E0110': ('Abstract class with abstract methods instantiated',
'abstract-class-instantiated',
'Used when an abstract class with `abc.ABCMeta` as metaclass '
'has abstract methods and is instantiated.',
{'minversion': (3, 0)}),
'has abstract methods and is instantiated.'),
'W0120': ('Else clause on loop without a break statement',
'useless-else-on-loop',
'Loops should only have an else clause if they can exit early '
......@@ -349,24 +391,23 @@ class BasicErrorChecker(_BasicChecker):
abc.ABCMeta as metaclass.
"""
try:
infered = node.func.infer().next()
infered = next(node.func.infer())
except astroid.InferenceError:
return
if not isinstance(infered, astroid.Class):
return
# __init__ was called
metaclass = infered.metaclass()
abstract_methods = has_abstract_methods(infered)
if metaclass is None:
# Python 3.4 has `abc.ABC`, which won't be detected
# by ClassNode.metaclass()
for ancestor in infered.ancestors():
if (ancestor.qname() == 'abc.ABC' and
has_abstract_methods(infered)):
if ancestor.qname() == 'abc.ABC' and abstract_methods:
self.add_message('abstract-class-instantiated', node=node)
break
return
if (metaclass.qname() == 'abc.ABCMeta' and
has_abstract_methods(infered)):
if metaclass.qname() == 'abc.ABCMeta' and abstract_methods:
self.add_message('abstract-class-instantiated', node=node)
def _check_else_on_loop(self, node):
......@@ -446,8 +487,9 @@ functions, methods
'times.'),
'W0122': ('Use of exec',
'exec-used',
'Used when you use the "exec" statement (function for Python 3), to discourage its '
'usage. That doesn\'t mean you can not use it !'),
'Used when you use the "exec" statement (function for Python '
'3), to discourage its usage. That doesn\'t '
'mean you can not use it !'),
'W0123': ('Use of eval',
'eval-used',
'Used when you use the "eval" function, to discourage its '
......@@ -476,12 +518,6 @@ functions, methods
'A call of assert on a tuple will always evaluate to true if '
'the tuple is not empty, and will always evaluate to false if '
'it is.'),
'W0121': ('Use raise ErrorClass(args) instead of raise ErrorClass, args.',
'old-raise-syntax',
"Used when the alternate raise syntax 'raise foo, bar' is used "
"instead of 'raise foo(bar)'.",
{'maxversion': (3, 0)}),
'C0121': ('Missing required attribute "%s"', # W0103
'missing-module-attribute',
'Used when an attribute required for modules is missing.'),
......@@ -523,6 +559,7 @@ functions, methods
self._tryfinallys = []
self.stats = self.linter.add_stats(module=0, function=0,
method=0, class_=0)
@check_messages('missing-module-attribute')
def visit_module(self, node):
"""check module name, docstring and required arguments
......@@ -532,7 +569,7 @@ functions, methods
if attr not in node:
self.add_message('missing-module-attribute', node=node, args=attr)
def visit_class(self, node):
def visit_class(self, node): # pylint: disable=unused-argument
"""check module name, docstring and redefinition
increment branch counter
"""
......@@ -544,7 +581,7 @@ functions, methods
"""check for various kind of statements without effect"""
expr = node.value
if isinstance(expr, astroid.Const) and isinstance(expr.value,
basestring):
six.string_types):
# treat string statement in a separated message
# Handle PEP-257 attribute docstrings.
# An attribute docstring is defined as being a string right after
......@@ -621,13 +658,13 @@ functions, methods
# ordinary_args[i].name == call.args[i].name.
if len(ordinary_args) != len(call.args):
return
for i in xrange(len(ordinary_args)):
for i in range(len(ordinary_args)):
if not isinstance(call.args[i], astroid.Name):
return
if node.args.args[i].name != call.args[i].name:
return
if (isinstance(node.body.func, astroid.Getattr) and
isinstance(node.body.func.expr, astroid.CallFunc)):
isinstance(node.body.func.expr, astroid.CallFunc)):
# Chained call, the intermediate call might
# return something else (but we don't check that, yet).
return
......@@ -642,18 +679,26 @@ functions, methods
# check for dangerous default values as arguments
for default in node.args.defaults:
try:
value = default.infer().next()
value = next(default.infer())
except astroid.InferenceError:
continue
builtins = astroid.bases.BUILTINS
if (isinstance(value, astroid.Instance) and
value.qname() in ['.'.join([builtins, x]) for x in ('set', 'dict', 'list')]):
value.qname() in DEFAULT_ARGUMENT_SYMBOLS):
if value is default:
msg = default.as_string()
msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
elif type(value) is astroid.Instance:
msg = '%s (%s)' % (default.as_string(), value.qname())
if isinstance(default, astroid.CallFunc):
# this argument is direct call to list() or dict() etc
msg = '%s() (%s)' % (value.name, value.qname())
else:
# this argument is a variable from somewhere else which turns
# out to be a list or dict
msg = '%s (%s)' % (default.as_string(), value.qname())
else:
msg = '%s (%s)' % (default.as_string(), value.as_string())
# this argument is a name
msg = '%s (%s)' % (default.as_string(),
DEFAULT_ARGUMENT_SYMBOLS[value.qname()])
self.add_message('dangerous-default-value', node=node, args=(msg,))
@check_messages('unreachable', 'lost-exception')
......@@ -686,16 +731,12 @@ functions, methods
# 2 - Is it inside final body of a try...finally bloc ?
self._check_not_in_finally(node, 'break', (astroid.For, astroid.While,))
@check_messages('unreachable', 'old-raise-syntax')
@check_messages('unreachable')
def visit_raise(self, node):
"""check if the node has a right sibling (if so, that's some unreachable
code)
"""
self._check_unreachable(node)
if sys.version_info >= (3, 0):
return
if node.exc is not None and node.inst is not None and node.tback is None:
self.add_message('old-raise-syntax', node=node)
@check_messages('exec-used')
def visit_exec(self, node):
......@@ -758,7 +799,7 @@ functions, methods
"""update try...finally flag"""
self._tryfinallys.append(node)
def leave_tryfinally(self, node):
def leave_tryfinally(self, node): # pylint: disable=unused-argument
"""update try...finally flag"""
self._tryfinallys.pop()
......@@ -796,11 +837,11 @@ functions, methods
if argument is astroid.YES:
return
if argument is None:
# nothing was infered
# try to see if we have iter()
# Nothing was infered.
# Try to see if we have iter().
if isinstance(node.args[0], astroid.CallFunc):
try:
func = node.args[0].func.infer().next()
func = next(node.args[0].func.infer())
except InferenceError:
return
if (getattr(func, 'name', None) == 'iter' and
......@@ -828,9 +869,9 @@ functions, methods
else:
break
else:
# check if it is a .deque. It doesn't seem that
# Check if it is a .deque. It doesn't seem that
# we can retrieve special methods
# from C implemented constructs
# from C implemented constructs.
if argument._proxied.qname().endswith(".deque"):
return
self.add_message('bad-reversed-sequence', node=node)
......@@ -853,7 +894,7 @@ _NAME_TYPES = {
def _create_naming_options():
name_options = []
for name_type, (rgx, human_readable_name) in _NAME_TYPES.iteritems():
for name_type, (rgx, human_readable_name) in six.iteritems(_NAME_TYPES):
name_type = name_type.replace('_', '-')
name_options.append((
'%s-rgx' % (name_type,),
......@@ -907,6 +948,7 @@ class NameChecker(_BasicChecker):
_BasicChecker.__init__(self, linter)
self._name_category = {}
self._name_group = {}
self._bad_names = {}
def open(self):
self.stats = self.linter.add_stats(badname_module=0,
......@@ -924,11 +966,30 @@ class NameChecker(_BasicChecker):
@check_messages('blacklisted-name', 'invalid-name')
def visit_module(self, node):
self._check_name('module', node.name.split('.')[-1], node)
self._bad_names = {}
def leave_module(self, node): # pylint: disable=unused-argument
for all_groups in six.itervalues(self._bad_names):
if len(all_groups) < 2:
continue
groups = collections.defaultdict(list)
min_warnings = sys.maxsize
for group in six.itervalues(all_groups):
groups[len(group)].append(group)
min_warnings = min(len(group), min_warnings)
if len(groups[min_warnings]) > 1:
by_line = sorted(groups[min_warnings],
key=lambda group: min(warning[0].lineno for warning in group))
warnings = itertools.chain(*by_line[1:])
else:
warnings = groups[min_warnings][0]
for args in warnings:
self._raise_name_warning(*args)
@check_messages('blacklisted-name', 'invalid-name')
def visit_class(self, node):
self._check_name('class', node.name, node)
for attr, anodes in node.instance_attrs.iteritems():
for attr, anodes in six.iteritems(node.instance_attrs):
if not list(node.instance_attr_ancestors(attr)):
self._check_name('attr', attr, anodes[0])
......@@ -936,10 +997,15 @@ class NameChecker(_BasicChecker):
def visit_function(self, node):
# Do not emit any warnings if the method is just an implementation
# of a base class method.
if node.is_method() and overrides_a_method(node.parent.frame(), node.name):
return
confidence = HIGH
if node.is_method():
if overrides_a_method(node.parent.frame(), node.name):
return
confidence = (INFERENCE if has_known_bases(node.parent.frame())
else INFERENCE_FAILURE)
self._check_name(_determine_function_name_type(node),
node.name, node)
node.name, node, confidence)
# Check argument names
args = node.args.args
if args is not None:
......@@ -962,13 +1028,17 @@ class NameChecker(_BasicChecker):
if isinstance(safe_infer(ass_type.value), astroid.Class):
self._check_name('class', node.name, node)
else:
self._check_name('const', node.name, node)
if not _redefines_import(node):
# Don't emit if the name redefines an import
# in an ImportError except handler.
self._check_name('const', node.name, node)
elif isinstance(ass_type, astroid.ExceptHandler):
self._check_name('variable', node.name, node)
elif isinstance(frame, astroid.Function):
# global introduced variable aren't in the function locals
if node.name in frame and node.name not in frame.argnames():
self._check_name('variable', node.name, node)
if not _redefines_import(node):
self._check_name('variable', node.name, node)
elif isinstance(frame, astroid.Class):
if not list(frame.local_attr_ancestors(node.name)):
self._check_name('class_attribute', node.name, node)
......@@ -984,12 +1054,16 @@ class NameChecker(_BasicChecker):
def _find_name_group(self, node_type):
return self._name_group.get(node_type, node_type)
def _is_multi_naming_match(self, match):
return (match is not None and
match.lastgroup is not None and
match.lastgroup not in EXEMPT_NAME_CATEGORIES)
def _raise_name_warning(self, node, node_type, name, confidence):
type_label = _NAME_TYPES[node_type][1]
hint = ''
if self.config.include_naming_hint:
hint = ' (hint: %s)' % (getattr(self.config, node_type + '_name_hint'))
self.add_message('invalid-name', node=node, args=(type_label, name, hint),
confidence=confidence)
self.stats['badname_' + node_type] += 1
def _check_name(self, node_type, name, node):
def _check_name(self, node_type, name, node, confidence=HIGH):
"""check for a name using the type's regexp"""
if is_inside_except(node):
clobbering, _ = clobber_in_except(node)
......@@ -1004,20 +1078,14 @@ class NameChecker(_BasicChecker):
regexp = getattr(self.config, node_type + '_rgx')
match = regexp.match(name)
if self._is_multi_naming_match(match):
if _is_multi_naming_match(match, node_type, confidence):
name_group = self._find_name_group(node_type)
if name_group not in self._name_category:
self._name_category[name_group] = match.lastgroup
elif self._name_category[name_group] != match.lastgroup:
match = None
bad_name_group = self._bad_names.setdefault(name_group, {})
warnings = bad_name_group.setdefault(match.lastgroup, [])
warnings.append((node, node_type, name, confidence))
if match is None:
type_label = _NAME_TYPES[node_type][1]
hint = ''
if self.config.include_naming_hint:
hint = ' (hint: %s)' % (getattr(self.config, node_type + '_name_hint'))
self.add_message('invalid-name', node=node, args=(type_label, name, hint))
self.stats['badname_' + node_type] += 1
self._raise_name_warning(node, node_type, name, confidence)
class DocStringChecker(_BasicChecker):
......@@ -1061,12 +1129,15 @@ class DocStringChecker(_BasicChecker):
def visit_class(self, node):
if self.config.no_docstring_rgx.match(node.name) is None:
self._check_docstring('class', node)
@check_messages('missing-docstring', 'empty-docstring')
def visit_function(self, node):
if self.config.no_docstring_rgx.match(node.name) is None:
ftype = node.is_method() and 'method' or 'function'
if isinstance(node.parent.frame(), astroid.Class):
overridden = False
confidence = (INFERENCE if has_known_bases(node.parent.frame())
else INFERENCE_FAILURE)
# check if node is from a method overridden by its ancestor
for ancestor in node.parent.frame().ancestors():
if node.name in ancestor and \
......@@ -1074,11 +1145,13 @@ class DocStringChecker(_BasicChecker):
overridden = True
break
self._check_docstring(ftype, node,
report_missing=not overridden)
report_missing=not overridden,
confidence=confidence)
else:
self._check_docstring(ftype, node)
def _check_docstring(self, node_type, node, report_missing=True):
def _check_docstring(self, node_type, node, report_missing=True,
confidence=HIGH):
"""check the node has a non empty docstring"""
docstring = node.doc
if docstring is None:
......@@ -1094,20 +1167,22 @@ class DocStringChecker(_BasicChecker):
return
self.stats['undocumented_'+node_type] += 1
if (node.body and isinstance(node.body[0], astroid.Discard) and
isinstance(node.body[0].value, astroid.CallFunc)):
isinstance(node.body[0].value, astroid.CallFunc)):
# Most likely a string with a format call. Let's see.
func = safe_infer(node.body[0].value.func)
if (isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)):
and isinstance(func.bound, astroid.Instance)):
# Strings in Python 3, others in Python 2.
if PY3K and func.bound.name == 'str':
return
elif func.bound.name in ('str', 'unicode', 'bytes'):
return
self.add_message('missing-docstring', node=node, args=(node_type,))
self.add_message('missing-docstring', node=node, args=(node_type,),
confidence=confidence)
elif not docstring.strip():
self.stats['undocumented_'+node_type] += 1
self.add_message('empty-docstring', node=node, args=(node_type,))
self.add_message('empty-docstring', node=node, args=(node_type,),
confidence=confidence)
class PassChecker(_BasicChecker):
......
......@@ -18,16 +18,20 @@
from __future__ import generators
import sys
from collections import defaultdict
import astroid
from astroid import YES, Instance, are_exclusive, AssAttr, Class
from astroid.bases import Generator
from astroid.bases import Generator, BUILTINS
from astroid.inference import InferenceContext
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import (
PYMETHODS, overrides_a_method, check_messages, is_attr_private,
is_attr_protected, node_frame_class, safe_infer)
is_attr_protected, node_frame_class, safe_infer, is_builtin_object,
decorated_with_property)
import six
if sys.version_info >= (3, 0):
NEXT_METHOD = '__next__'
......@@ -35,6 +39,32 @@ else:
NEXT_METHOD = 'next'
ITER_METHODS = ('__iter__', '__getitem__')
def _called_in_methods(func, klass, methods):
""" Check if the func was called in any of the given methods,
belonging to the *klass*. Returns True if so, False otherwise.
"""
if not isinstance(func, astroid.Function):
return False
for method in methods:
try:
infered = klass.getattr(method)
except astroid.NotFoundError:
continue
for infer_method in infered:
for callfunc in infer_method.nodes_of_class(astroid.CallFunc):
try:
bound = next(callfunc.func.infer())
except (astroid.InferenceError, StopIteration):
continue
if not isinstance(bound, astroid.BoundMethod):
continue
func_obj = bound._proxied
if isinstance(func_obj, astroid.UnboundMethod):
func_obj = func_obj._proxied
if func_obj.name == func.name:
return True
return False
def class_is_abstract(node):
"""return true if the given class node should be considered as an abstract
class
......@@ -45,11 +75,39 @@ def class_is_abstract(node):
return True
return False
def _is_attribute_property(name, klass):
""" Check if the given attribute *name* is a property
in the given *klass*.
It will look for `property` calls or for functions
with the given name, decorated by `property` or `property`
subclasses.
Returns ``True`` if the name is a property in the given klass,
``False`` otherwise.
"""
try:
attributes = klass.getattr(name)
except astroid.NotFoundError:
return False
property_name = "{0}.property".format(BUILTINS)
for attr in attributes:
try:
infered = next(attr.infer())
except astroid.InferenceError:
continue
if (isinstance(infered, astroid.Function) and
decorated_with_property(infered)):
return True
if infered.pytype() == property_name:
return True
return False
MSGS = {
'F0202': ('Unable to check methods signature (%s / %s)',
'method-check-failed',
'Used when PyLint has been unable to check methods signature \
'Used when Pylint has been unable to check methods signature \
compatibility for an unexpected reason. Please report this kind \
if you don\'t make sense of it.'),
......@@ -136,7 +194,7 @@ MSGS = {
),
'F0220': ('failed to resolve interfaces implemented by %s (%s)', # W0224
'unresolved-interface',
'Used when a PyLint as failed to find interfaces implemented by \
'Used when a Pylint as failed to find interfaces implemented by \
a class'),
......@@ -172,7 +230,11 @@ MSGS = {
'E0238': ('Invalid __slots__ object',
'invalid-slots',
'Used when an invalid __slots__ is found in class. '
'Only a string, an iterable or a sequence is permitted.')
'Only a string, an iterable or a sequence is permitted.'),
'E0239': ('Inheriting %r, which is not a class.',
'inherit-non-class',
'Used when a class inherits from something which is not a '
'class.'),
}
......@@ -234,7 +296,16 @@ a class method.'}
'help' : 'List of valid names for the first argument in \
a metaclass class method.'}
),
)
('exclude-protected',
{
'default': (
# namedtuple public API.
'_asdict', '_fields', '_replace', '_source', '_make'),
'type': 'csv',
'metavar': '<protected access exclusions>',
'help': ('List of member names, which should be excluded '
'from the protected access warning.')}
))
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
......@@ -245,7 +316,7 @@ a metaclass class method.'}
def visit_class(self, node):
"""init visit variable _accessed and check interfaces
"""
self._accessed.append({})
self._accessed.append(defaultdict(list))
self._check_bases_classes(node)
self._check_interfaces(node)
# if not an interface, exception, metaclass
......@@ -255,8 +326,27 @@ a metaclass class method.'}
except astroid.NotFoundError:
self.add_message('no-init', args=node, node=node)
self._check_slots(node)
self._check_proper_bases(node)
@check_messages('inherit-non-class')
def _check_proper_bases(self, node):
"""
Detect that a class inherits something which is not
a class or a type.
"""
for base in node.bases:
ancestor = safe_infer(base)
if ancestor in (YES, None):
continue
if (isinstance(ancestor, astroid.Instance) and
ancestor.is_subtype_of('%s.type' % (BUILTINS,))):
continue
if not isinstance(ancestor, astroid.Class):
self.add_message('inherit-non-class',
args=base.as_string(), node=node)
@check_messages('access-member-before-definition', 'attribute-defined-outside-init')
@check_messages('access-member-before-definition',
'attribute-defined-outside-init')
def leave_class(self, cnode):
"""close a class node:
check that instance attributes are defined in __init__ and check
......@@ -271,7 +361,7 @@ a metaclass class method.'}
return
defining_methods = self.config.defining_attr_methods
current_module = cnode.root()
for attr, nodes in cnode.instance_attrs.iteritems():
for attr, nodes in six.iteritems(cnode.instance_attrs):
# skip nodes which are not in the current module and it may screw up
# the output, while it's not worth it
nodes = [n for n in nodes if not
......@@ -301,6 +391,12 @@ a metaclass class method.'}
except astroid.NotFoundError:
for node in nodes:
if node.frame().name not in defining_methods:
# If the attribute was set by a callfunc in any
# of the defining methods, then don't emit
# the warning.
if _called_in_methods(node.frame(), cnode,
defining_methods):
continue
self.add_message('attribute-defined-outside-init',
args=attr, node=node)
......@@ -348,7 +444,7 @@ a metaclass class method.'}
and overridden_frame.type == 'method'):
overridden_frame = overridden_frame.parent.frame()
if (isinstance(overridden_frame, Class)
and klass._is_subtype_of(overridden_frame.qname())):
and klass.is_subtype_of(overridden_frame.qname())):
args = (overridden.root().name, overridden.fromlineno)
self.add_message('method-hidden', args=args, node=node)
except astroid.NotFoundError:
......@@ -466,7 +562,7 @@ a metaclass class method.'}
attrname = node.attrname
# Check self
if self.is_first_attr(node):
self._accessed[-1].setdefault(attrname, []).append(node)
self._accessed[-1][attrname].append(node)
return
if not self.linter.is_message_enabled('protected-access'):
return
......@@ -475,7 +571,7 @@ a metaclass class method.'}
def visit_assattr(self, node):
if isinstance(node.ass_type(), astroid.AugAssign) and self.is_first_attr(node):
self._accessed[-1].setdefault(node.attrname, []).append(node)
self._accessed[-1][node.attrname].append(node)
self._check_in_slots(node)
def _check_in_slots(self, node):
......@@ -500,6 +596,10 @@ a metaclass class method.'}
# If we have a '__dict__' in slots, then
# assigning any name is valid.
if not any(slot.value == '__dict__' for slot in slots):
if _is_attribute_property(node.attrname, klass):
# Properties circumvent the slots mechanism,
# so we should not emit a warning for them.
return
self.add_message('assigning-non-slot',
args=(node.attrname, ), node=node)
......@@ -526,7 +626,8 @@ a metaclass class method.'}
'''
attrname = node.attrname
if is_attr_protected(attrname):
if (is_attr_protected(attrname) and
attrname not in self.config.exclude_protected):
klass = node_frame_class(node)
......@@ -549,6 +650,23 @@ a metaclass class method.'}
# We are in a class, one remaining valid cases, Klass._attr inside
# Klass
if not (callee == klass.name or callee in klass.basenames):
# Detect property assignments in the body of the class.
# This is acceptable:
#
# class A:
# b = property(lambda: self._b)
stmt = node.parent.statement()
try:
if (isinstance(stmt, astroid.Assign) and
(stmt in klass.body or klass.parent_of(stmt)) and
isinstance(stmt.value, astroid.CallFunc) and
isinstance(stmt.value.func, astroid.Name) and
stmt.value.func.name == 'property' and
is_builtin_object(next(stmt.value.func.infer(), None))):
return
except astroid.InferenceError:
pass
self.add_message('protected-access', node=node, args=attrname)
def visit_name(self, node):
......@@ -562,7 +680,7 @@ a metaclass class method.'}
def _check_accessed_members(self, node, accessed):
"""check that accessed members are defined"""
# XXX refactor, probably much simpler now that E0201 is in type checker
for attr, nodes in accessed.iteritems():
for attr, nodes in six.iteritems(accessed):
# deactivate "except doesn't do anything", that's expected
# pylint: disable=W0704
try:
......@@ -574,7 +692,7 @@ a metaclass class method.'}
pass
# is it an instance attribute of a parent class ?
try:
node.instance_attr_ancestors(attr).next()
next(node.instance_attr_ancestors(attr))
# yes, stop here
continue
except StopIteration:
......@@ -606,7 +724,8 @@ a metaclass class method.'}
lno = defstmt.fromlineno
for _node in nodes:
if _node.frame() is frame and _node.fromlineno < lno \
and not are_exclusive(_node.statement(), defstmt, ('AttributeError', 'Exception', 'BaseException')):
and not are_exclusive(_node.statement(), defstmt,
('AttributeError', 'Exception', 'BaseException')):
self.add_message('access-member-before-definition',
node=_node, args=(attr, lno))
......@@ -649,7 +768,8 @@ a metaclass class method.'}
else:
self._check_first_arg_config(
first,
self.config.valid_classmethod_first_arg, node, 'bad-mcs-method-argument',
self.config.valid_classmethod_first_arg, node,
'bad-mcs-method-argument',
node.name)
# regular class
else:
......@@ -657,7 +777,8 @@ a metaclass class method.'}
if node.type == 'classmethod':
self._check_first_arg_config(
first,
self.config.valid_classmethod_first_arg, node, 'bad-classmethod-argument',
self.config.valid_classmethod_first_arg, node,
'bad-classmethod-argument',
node.name)
# regular method without self as argument
elif first != 'self':
......@@ -719,7 +840,8 @@ a metaclass class method.'}
try:
method = node_method(node, name)
except astroid.NotFoundError:
self.add_message('missing-interface-method', args=(name, iface.name),
self.add_message('missing-interface-method',
args=(name, iface.name),
node=node)
continue
# ignore inherited methods
......@@ -762,17 +884,29 @@ a metaclass class method.'}
expr.expr.func.name == 'super':
return
try:
klass = expr.expr.infer().next()
klass = next(expr.expr.infer())
if klass is YES:
continue
# The infered klass can be super(), which was
# assigned to a variable and the `__init__` was called later.
#
# base = super()
# base.__init__(...)
if (isinstance(klass, astroid.Instance) and
isinstance(klass._proxied, astroid.Class) and
is_builtin_object(klass._proxied) and
klass._proxied.name == 'super'):
return
try:
del not_called_yet[klass]
except KeyError:
if klass not in to_call:
self.add_message('non-parent-init-called', node=expr, args=klass.name)
self.add_message('non-parent-init-called',
node=expr, args=klass.name)
except astroid.InferenceError:
continue
for klass, method in not_called_yet.iteritems():
for klass, method in six.iteritems(not_called_yet):
if klass.name == 'object' or method.parent.name == 'object':
continue
self.add_message('super-init-not-called', args=klass.name, node=node)
......@@ -784,7 +918,8 @@ a metaclass class method.'}
"""
if not (isinstance(method1, astroid.Function)
and isinstance(refmethod, astroid.Function)):
self.add_message('method-check-failed', args=(method1, refmethod), node=method1)
self.add_message('method-check-failed',
args=(method1, refmethod), node=method1)
return
# don't care about functions with unknown argument (builtins)
if method1.args.args is None or refmethod.args.args is None:
......@@ -813,7 +948,7 @@ def _ancestors_to_call(klass_node, method='__init__'):
to_call = {}
for base_node in klass_node.ancestors(recurs=False):
try:
to_call[base_node] = base_node.igetattr(method).next()
to_call[base_node] = next(base_node.igetattr(method))
except astroid.InferenceError:
continue
return to_call
......
......@@ -15,14 +15,15 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""check for signs of poor design"""
import re
from collections import defaultdict
from astroid import Function, If, InferenceError
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
import re
# regexp for ignored argument name
IGNORED_ARGUMENT_NAMES = re.compile('_.*')
......@@ -174,7 +175,7 @@ class MisdesignChecker(BaseChecker):
"""initialize visit variables"""
self.stats = self.linter.add_stats()
self._returns = []
self._branches = []
self._branches = defaultdict(int)
self._used_abstracts = {}
self._used_ifaces = {}
self._abstracts = []
......@@ -200,7 +201,6 @@ class MisdesignChecker(BaseChecker):
def visit_class(self, node):
"""check size of inheritance hierarchy and number of instance attributes
"""
self._inc_branch()
# Is the total inheritance hierarchy is 7 or less?
nb_parents = len(list(node.ancestors()))
if nb_parents > self.config.max_parents:
......@@ -241,12 +241,9 @@ class MisdesignChecker(BaseChecker):
def leave_class(self, node):
"""check number of public methods"""
nb_public_methods = 0
special_methods = set()
for method in node.methods():
for method in node.mymethods():
if not method.name.startswith('_'):
nb_public_methods += 1
if method.name.startswith("__"):
special_methods.add(method.name)
# Does the class contain less than 20 public methods ?
if nb_public_methods > self.config.max_public_methods:
self.add_message('too-many-public-methods', node=node,
......@@ -257,20 +254,19 @@ class MisdesignChecker(BaseChecker):
return
# Does the class contain more than 5 public methods ?
if nb_public_methods < self.config.min_public_methods:
self.add_message('R0903', node=node,
self.add_message('too-few-public-methods', node=node,
args=(nb_public_methods,
self.config.min_public_methods))
@check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals', 'too-many-statements')
'too-many-arguments', 'too-many-locals',
'too-many-statements')
def visit_function(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
"""
self._inc_branch()
# init branch and returns counters
self._returns.append(0)
self._branches.append(0)
# check number of arguments
args = node.args.args
if args is not None:
......@@ -291,7 +287,9 @@ class MisdesignChecker(BaseChecker):
# init statements counter
self._stmts = 1
@check_messages('too-many-return-statements', 'too-many-branches', 'too-many-arguments', 'too-many-locals', 'too-many-statements')
@check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals',
'too-many-statements')
def leave_function(self, node):
"""most of the work is done here on close:
checks for max returns, branch, return in __init__
......@@ -300,7 +298,7 @@ class MisdesignChecker(BaseChecker):
if returns > self.config.max_returns:
self.add_message('too-many-return-statements', node=node,
args=(returns, self.config.max_returns))
branches = self._branches.pop()
branches = self._branches[node]
if branches > self.config.max_branches:
self.add_message('too-many-branches', node=node,
args=(branches, self.config.max_branches))
......@@ -327,12 +325,12 @@ class MisdesignChecker(BaseChecker):
branches = len(node.handlers)
if node.orelse:
branches += 1
self._inc_branch(branches)
self._inc_branch(node, branches)
self._stmts += branches
def visit_tryfinally(self, _):
def visit_tryfinally(self, node):
"""increments the branches counter"""
self._inc_branch(2)
self._inc_branch(node, 2)
self._stmts += 2
def visit_if(self, node):
......@@ -342,7 +340,7 @@ class MisdesignChecker(BaseChecker):
if node.orelse and (len(node.orelse) > 1 or
not isinstance(node.orelse[0], If)):
branches += 1
self._inc_branch(branches)
self._inc_branch(node, branches)
self._stmts += branches
def visit_while(self, node):
......@@ -350,15 +348,13 @@ class MisdesignChecker(BaseChecker):
branches = 1
if node.orelse:
branches += 1
self._inc_branch(branches)
self._inc_branch(node, branches)
visit_for = visit_while
def _inc_branch(self, branchesnum=1):
def _inc_branch(self, node, branchesnum=1):
"""increments the branches counter"""
branches = self._branches
for i in xrange(len(branches)):
branches[i] += branchesnum
self._branches[node.scope()] += branchesnum
# FIXME: make a nice report...
......
......@@ -19,29 +19,45 @@ import sys
from logilab.common.compat import builtins
BUILTINS_NAME = builtins.__name__
import astroid
from astroid import YES, Instance, unpack_infer
from astroid import YES, Instance, unpack_infer, List, Tuple
from pylint.checkers import BaseChecker
from pylint.checkers.utils import is_empty, is_raising, check_messages
from pylint.interfaces import IAstroidChecker
from pylint.checkers.utils import (
is_empty, is_raising,
check_messages, inherit_from_std_ex,
EXCEPTIONS_MODULE, has_known_bases)
from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE
def infer_bases(klass):
""" Fully infer the bases of the klass node.
This doesn't use .ancestors(), because we need
the non-inferable nodes (YES nodes),
which can't be retrieved from .ancestors()
def _annotated_unpack_infer(stmt, context=None):
"""
Recursively generate nodes inferred by the given statement.
If the inferred value is a list or a tuple, recurse on the elements.
Returns an iterator which yields tuples in the format
('original node', 'infered node').
"""
for base in klass.bases:
try:
inferit = base.infer().next()
except astroid.InferenceError:
continue
if inferit is YES:
yield inferit
# TODO: the same code as unpack_infer, except for the annotated
# return. We need this type of annotation only here and
# there is no point in complicating the API for unpack_infer.
# If the need arises, this behaviour can be promoted to unpack_infer
# as well.
if isinstance(stmt, (List, Tuple)):
for elt in stmt.elts:
for infered_elt in unpack_infer(elt, context):
yield elt, infered_elt
return
# if infered is a final node, return it and stop
infered = next(stmt.infer(context))
if infered is stmt:
yield stmt, infered
return
# else, infer recursivly, except YES object that should be returned as is
for infered in stmt.infer(context):
if infered is YES:
yield stmt, infered
else:
for base in infer_bases(inferit):
yield base
for inf_inf in unpack_infer(infered, context):
yield stmt, inf_inf
PY3K = sys.version_info >= (3, 0)
OVERGENERAL_EXCEPTIONS = ('Exception',)
......@@ -52,7 +68,7 @@ MSGS = {
'Used when except clauses are not in the correct order (from the '
'more specific to the more generic). If you don\'t fix the order, '
'some exceptions may not be catched by the most specific handler.'),
'E0702': ('Raising %s while only classes, instances or string are allowed',
'E0702': ('Raising %s while only classes or instances are allowed',
'raising-bad-type',
'Used when something which is neither a class, an instance or a \
string is raised (i.e. a `TypeError` will be raised).'),
......@@ -75,10 +91,6 @@ MSGS = {
'catching-non-exception',
'Used when a class which doesn\'t inherit from \
BaseException is used as an exception in an except clause.'),
'W0701': ('Raising a string exception',
'raising-string',
'Used when a string exception is raised.'),
'W0702': ('No exception type(s) specified',
'bare-except',
'Used when an except clause doesn\'t specify exceptions type to \
......@@ -101,25 +113,9 @@ MSGS = {
'Used when the exception to catch is of the form \
"except A or B:". If intending to catch multiple, \
rewrite as "except (A, B):"'),
'W0712': ('Implicit unpacking of exceptions is not supported in Python 3',
'unpacking-in-except',
'Python3 will not allow implicit unpacking of exceptions in except '
'clauses. '
'See http://www.python.org/dev/peps/pep-3110/',
{'maxversion': (3, 0)}),
'W0713': ('Indexing exceptions will not work on Python 3',
'indexing-exception',
'Indexing exceptions will not work on Python 3. Use '
'`exception.args[index]` instead.',
{'maxversion': (3, 0)}),
}
if sys.version_info < (3, 0):
EXCEPTIONS_MODULE = "exceptions"
else:
EXCEPTIONS_MODULE = "builtins"
class ExceptionsChecker(BaseChecker):
"""checks for
* excepts without exception filter
......@@ -140,8 +136,9 @@ class ExceptionsChecker(BaseChecker):
),
)
@check_messages('raising-string', 'nonstandard-exception', 'raising-bad-type',
'raising-non-exception', 'notimplemented-raised', 'bad-exception-context')
@check_messages('nonstandard-exception',
'raising-bad-type', 'raising-non-exception',
'notimplemented-raised', 'bad-exception-context')
def visit_raise(self, node):
"""visit raise possibly inferring value"""
# ignore empty raise
......@@ -149,7 +146,7 @@ class ExceptionsChecker(BaseChecker):
return
if PY3K and node.cause:
try:
cause = node.cause.infer().next()
cause = next(node.cause.infer())
except astroid.InferenceError:
pass
else:
......@@ -168,7 +165,7 @@ class ExceptionsChecker(BaseChecker):
return
else:
try:
value = unpack_infer(expr).next()
value = next(unpack_infer(expr))
except astroid.InferenceError:
return
self._check_raise_value(node, value)
......@@ -180,7 +177,8 @@ class ExceptionsChecker(BaseChecker):
if isinstance(expr, astroid.Const):
value = expr.value
if isinstance(value, str):
self.add_message('raising-string', node=node)
# raising-string will be emitted from python3 porting checker.
pass
else:
self.add_message('raising-bad-type', node=node,
args=value.__class__.__name__)
......@@ -194,8 +192,6 @@ class ExceptionsChecker(BaseChecker):
isinstance(expr.func, astroid.Name) and
expr.func.name == 'NotImplemented')):
self.add_message('notimplemented-raised', node=node)
elif isinstance(expr, astroid.BinOp) and expr.op == '%':
self.add_message('raising-string', node=node)
elif isinstance(expr, (Instance, astroid.Class)):
if isinstance(expr, Instance):
expr = expr._proxied
......@@ -205,31 +201,15 @@ class ExceptionsChecker(BaseChecker):
if expr.newstyle:
self.add_message('raising-non-exception', node=node)
else:
self.add_message('nonstandard-exception', node=node)
self.add_message(
'nonstandard-exception', node=node,
confidence=INFERENCE if has_known_bases(expr) else INFERENCE_FAILURE)
else:
value_found = False
else:
value_found = False
return value_found
@check_messages('unpacking-in-except')
def visit_excepthandler(self, node):
"""Visit an except handler block and check for exception unpacking."""
if isinstance(node.name, (astroid.Tuple, astroid.List)):
self.add_message('unpacking-in-except', node=node)
@check_messages('indexing-exception')
def visit_subscript(self, node):
""" Look for indexing exceptions. """
try:
for infered in node.value.infer():
if not isinstance(infered, astroid.Instance):
continue
if inherit_from_std_ex(infered):
self.add_message('indexing-exception', node=node)
except astroid.InferenceError:
return
@check_messages('bare-except', 'broad-except', 'pointless-except',
'binary-op-exception', 'bad-except-order',
'catching-non-exception')
......@@ -237,10 +217,11 @@ class ExceptionsChecker(BaseChecker):
"""check for empty except"""
exceptions_classes = []
nb_handlers = len(node.handlers)
for index, handler in enumerate(node.handlers):
for index, handler in enumerate(node.handlers):
# single except doing nothing but "pass" without else clause
if is_empty(handler.body) and not node.orelse:
self.add_message('pointless-except', node=handler.type or handler.body[0])
self.add_message('pointless-except',
node=handler.type or handler.body[0])
if handler.type is None:
if not is_raising(handler.body):
self.add_message('bare-except', node=handler)
......@@ -251,56 +232,64 @@ class ExceptionsChecker(BaseChecker):
self.add_message('bad-except-order', node=node, args=msg)
elif isinstance(handler.type, astroid.BoolOp):
self.add_message('binary-op-exception', node=handler, args=handler.type.op)
self.add_message('binary-op-exception',
node=handler, args=handler.type.op)
else:
try:
excs = list(unpack_infer(handler.type))
excs = list(_annotated_unpack_infer(handler.type))
except astroid.InferenceError:
continue
for exc in excs:
# XXX skip other non class nodes
if exc is YES or not isinstance(exc, astroid.Class):
for part, exc in excs:
if exc is YES:
continue
if isinstance(exc, astroid.Instance) and inherit_from_std_ex(exc):
exc = exc._proxied
if not isinstance(exc, astroid.Class):
# Don't emit the warning if the infered stmt
# is None, but the exception handler is something else,
# maybe it was redefined.
if (isinstance(exc, astroid.Const) and
exc.value is None):
if ((isinstance(handler.type, astroid.Const) and
handler.type.value is None) or
handler.type.parent_of(exc)):
# If the exception handler catches None or
# the exception component, which is None, is
# defined by the entire exception handler, then
# emit a warning.
self.add_message('catching-non-exception',
node=handler.type,
args=(part.as_string(), ))
else:
self.add_message('catching-non-exception',
node=handler.type,
args=(part.as_string(), ))
continue
exc_ancestors = [anc for anc in exc.ancestors()
if isinstance(anc, astroid.Class)]
for previous_exc in exceptions_classes:
if previous_exc in exc_ancestors:
msg = '%s is an ancestor class of %s' % (
previous_exc.name, exc.name)
self.add_message('bad-except-order', node=handler.type, args=msg)
self.add_message('bad-except-order',
node=handler.type, args=msg)
if (exc.name in self.config.overgeneral_exceptions
and exc.root().name == EXCEPTIONS_MODULE
and not is_raising(handler.body)):
self.add_message('broad-except', args=exc.name, node=handler.type)
self.add_message('broad-except',
args=exc.name, node=handler.type)
if (not inherit_from_std_ex(exc) and
exc.root().name != BUILTINS_NAME):
# try to see if the exception is based on a C based
# exception, by infering all the base classes and
# looking for inference errors
bases = infer_bases(exc)
fully_infered = all(inferit is not YES
for inferit in bases)
if fully_infered:
if has_known_bases(exc):
self.add_message('catching-non-exception',
node=handler.type,
args=(exc.name, ))
exceptions_classes += excs
exceptions_classes += [exc for _, exc in excs]
def inherit_from_std_ex(node):
"""return true if the given class node is subclass of
exceptions.Exception
"""
if node.name in ('Exception', 'BaseException') \
and node.root().name == EXCEPTIONS_MODULE:
return True
for parent in node.ancestors(recurs=False):
if inherit_from_std_ex(parent):
return True
return False
def register(linter):
"""required method to auto register this checker"""
linter.register_checker(ExceptionsChecker(linter))
......@@ -24,9 +24,10 @@ Some parts of the process_token method is based from The Tab Nanny std module.
import keyword
import sys
import tokenize
from functools import reduce # pylint: disable=redefined-builtin
if not hasattr(tokenize, 'NL'):
raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
import six
from six.moves import zip, map, filter # pylint: disable=redefined-builtin
from astroid import nodes
......@@ -65,7 +66,7 @@ MSGS = {
'C0301': ('Line too long (%s/%s)',
'line-too-long',
'Used when a line is longer than a given number of characters.'),
'C0302': ('Too many lines in module (%s)', # was W0302
'C0302': ('Too many lines in module (%s/%s)', # was W0302
'too-many-lines',
'Used when a module has too much lines, reducing its readability.'
),
......@@ -105,22 +106,18 @@ MSGS = {
{'old_names': [('C0323', 'no-space-after-operator'),
('C0324', 'no-space-after-comma'),
('C0322', 'no-space-before-operator')]}),
'W0331': ('Use of the <> operator',
'old-ne-operator',
'Used when the deprecated "<>" operator is used instead '
'of "!=".',
{'maxversion': (3, 0)}),
'W0332': ('Use of "l" as long integer identifier',
'lowercase-l-suffix',
'Used when a lower case "l" is used to mark a long integer. You '
'should use a upper case "L" since the letter "l" looks too much '
'like the digit "1"',
{'maxversion': (3, 0)}),
'W0333': ('Use of the `` operator',
'backtick',
'Used when the deprecated "``" (backtick) operator is used '
'instead of the str() function.',
{'scope': WarningScope.NODE, 'maxversion': (3, 0)}),
'C0327': ('Mixed line endings LF and CRLF',
'mixed-line-endings',
'Used when there are mixed (LF and CRLF) newline signs in a file.'),
'C0328': ('Unexpected line ending format. There is \'%s\' while it should be \'%s\'.',
'unexpected-line-ending-format',
'Used when there is different newline than expected.'),
}
......@@ -336,7 +333,8 @@ class ContinuedLineState(object):
# current indent level
paren_align = self._cont_stack[-1].valid_outdent_offsets
next_align = self._cont_stack[-1].valid_continuation_offsets.copy()
next_align[next_align.keys()[0] + self._continuation_size] = True
next_align_keys = list(next_align.keys())
next_align[next_align_keys[0] + self._continuation_size] = True
# Note that the continuation of
# d = {
# 'a': 'b'
......@@ -401,7 +399,6 @@ class FormatChecker(BaseTokenChecker):
* unauthorized constructions
* strict indentation
* line length
* use of <> instead of !=
"""
__implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
......@@ -413,7 +410,7 @@ class FormatChecker(BaseTokenChecker):
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
options = (('max-line-length',
{'default' : 80, 'type' : "int", 'metavar' : '<int>',
{'default' : 100, 'type' : "int", 'metavar' : '<int>',
'help' : 'Maximum number of characters on a single line.'}),
('ignore-long-lines',
{'type': 'regexp', 'metavar': '<regexp>',
......@@ -442,6 +439,10 @@ class FormatChecker(BaseTokenChecker):
{'type': 'int', 'metavar': '<int>', 'default': 4,
'help': 'Number of spaces of indent required inside a hanging '
' or continued line.'}),
('expected-line-ending-format',
{'type': 'choice', 'metavar': '<empty or LF or CRLF>', 'default': '',
'choices': ['', 'LF', 'CRLF'],
'help': 'Expected format of line ending, e.g. empty (any line ending), LF or CRLF.'}),
)
def __init__(self, linter=None):
......@@ -496,7 +497,7 @@ class FormatChecker(BaseTokenChecker):
keyword_token = tokens[start][1]
line_num = tokens[start][2][0]
for i in xrange(start, len(tokens) - 1):
for i in range(start, len(tokens) - 1):
token = tokens[i]
# If we hit a newline, then assume any parens were for continuation.
......@@ -622,13 +623,13 @@ class FormatChecker(BaseTokenChecker):
return 'No', 'allowed'
def _name_construct(token):
if tokens[i][1] == ',':
if token[1] == ',':
return 'comma'
elif tokens[i][1] == ':':
elif token[1] == ':':
return ':'
elif tokens[i][1] in '()[]{}':
elif token[1] in '()[]{}':
return 'bracket'
elif tokens[i][1] in ('<', '>', '<=', '>=', '!=', '=='):
elif token[1] in ('<', '>', '<=', '>=', '!=', '=='):
return 'comparison'
else:
if self._inside_brackets('('):
......@@ -637,7 +638,8 @@ class FormatChecker(BaseTokenChecker):
return 'assignment'
good_space = [True, True]
pairs = [(tokens[i-1], tokens[i]), (tokens[i], tokens[i+1])]
token = tokens[i]
pairs = [(tokens[i-1], token), (token, tokens[i+1])]
for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
......@@ -658,19 +660,15 @@ class FormatChecker(BaseTokenChecker):
if not ok:
warnings.append((policy, position))
for policy, position in warnings:
construct = _name_construct(tokens[i])
construct = _name_construct(token)
count, state = _policy_string(policy)
self.add_message('bad-whitespace', line=tokens[i][2][0],
self.add_message('bad-whitespace', line=token[2][0],
args=(count, state, position, construct,
_underline_token(tokens[i])))
_underline_token(token)))
def _inside_brackets(self, left):
return self._bracket_stack[-1] == left
def _handle_old_ne_operator(self, tokens, i):
if tokens[i][1] == '<>':
self.add_message('old-ne-operator', line=tokens[i][2][0])
def _prepare_token_dispatcher(self):
raw = [
(_KEYWORD_TOKENS,
......@@ -690,7 +688,6 @@ class FormatChecker(BaseTokenChecker):
(['lambda'], self._open_lambda),
(['<>'], self._handle_old_ne_operator),
]
dispatch = {}
......@@ -715,6 +712,7 @@ class FormatChecker(BaseTokenChecker):
self._lines = {}
self._visited_lines = {}
token_handlers = self._prepare_token_dispatcher()
self._last_line_ending = None
self._current_line = ContinuedLineState(tokens, self.config)
for idx, (tok_type, token, start, _, line) in enumerate(tokens):
......@@ -737,6 +735,7 @@ class FormatChecker(BaseTokenChecker):
check_equal = True
self._process_retained_warnings(TokenWrapper(tokens), idx)
self._current_line.next_logical_line()
self._check_line_ending(token, line_num)
elif tok_type == tokenize.INDENT:
check_equal = False
self.check_indent_level(token, indents[-1]+1, line_num)
......@@ -776,14 +775,39 @@ class FormatChecker(BaseTokenChecker):
line_num -= 1 # to be ok with "wc -l"
if line_num > self.config.max_module_lines:
self.add_message('too-many-lines', args=line_num, line=1)
# Get the line where the too-many-lines (or its message id)
# was disabled or default to 1.
symbol = self.linter.msgs_store.check_message_id('too-many-lines')
names = (symbol.msgid, 'too-many-lines')
line = next(filter(None,
map(self.linter._pragma_lineno.get, names)), 1)
self.add_message('too-many-lines',
args=(line_num, self.config.max_module_lines),
line=line)
def _check_line_ending(self, line_ending, line_num):
# check if line endings are mixed
if self._last_line_ending is not None:
if line_ending != self._last_line_ending:
self.add_message('mixed-line-endings', line=line_num)
self._last_line_ending = line_ending
# check if line ending is as expected
expected = self.config.expected_line_ending_format
if expected:
line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "") # reduce multiple \n\n\n\n to one \n
line_ending = 'LF' if line_ending == '\n' else 'CRLF'
if line_ending != expected:
self.add_message('unexpected-line-ending-format', args=(line_ending, expected), line=line_num)
def _process_retained_warnings(self, tokens, current_pos):
single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ':')
for indent_pos, state, offsets in self._current_line.retained_warnings:
block_type = offsets[tokens.start_col(indent_pos)]
hints = dict((k, v) for k, v in offsets.iteritems()
hints = dict((k, v) for k, v in six.iteritems(offsets)
if v != block_type)
if single_line_block_stmt and block_type == WITH_BODY:
self._add_continuation_message(state, hints, tokens, indent_pos)
......@@ -857,7 +881,7 @@ class FormatChecker(BaseTokenChecker):
tolineno = node.tolineno
assert tolineno, node
lines = []
for line in xrange(line, tolineno + 1):
for line in range(line, tolineno + 1):
self._visited_lines[line] = 1
try:
lines.append(self._lines[line].rstrip())
......@@ -881,10 +905,6 @@ class FormatChecker(BaseTokenChecker):
self.add_message('multiple-statements', node=node)
self._visited_lines[line] = 2
@check_messages('backtick')
def visit_backquote(self, node):
self.add_message('backtick', node=node)
def check_lines(self, lines, i):
"""check lines have less than a maximum number of characters
"""
......
......@@ -16,6 +16,10 @@
"""imports checkers for Python code"""
import sys
from collections import defaultdict
import six
from six.moves import map # pylint: disable=redefined-builtin
from logilab.common.graph import get_cycles, DotBackend
from logilab.common.ureports import VerbatimText, Paragraph
......@@ -27,8 +31,16 @@ from astroid.modutils import get_module_part, is_standard_module
from pylint.interfaces import IAstroidChecker
from pylint.utils import EmptyReport
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
from pylint.checkers.utils import check_messages, is_import_error
def _except_import_error(node):
"""
Check if the try-except node has an ImportError handler.
Return True if an ImportError handler was infered, False otherwise.
"""
if not isinstance(node, astroid.TryExcept):
return
return any(map(is_import_error, node.handlers))
def get_first_import(node, context, name, base, level):
"""return the node where [base.]<name> is imported or None if not found
......@@ -98,14 +110,14 @@ def dependencies_graph(filename, dep_info):
done = {}
printer = DotBackend(filename[:-4], rankdir='LR')
printer.emit('URL="." node[shape="box"]')
for modname, dependencies in sorted(dep_info.iteritems()):
for modname, dependencies in sorted(six.iteritems(dep_info)):
done[modname] = 1
printer.emit_node(modname)
for modname in dependencies:
if modname not in done:
done[modname] = 1
printer.emit_node(modname)
for depmodname, dependencies in sorted(dep_info.iteritems()):
for depmodname, dependencies in sorted(six.iteritems(dep_info)):
for modname in dependencies:
printer.emit_edge(modname, depmodname)
printer.generate(filename)
......@@ -220,20 +232,21 @@ given file (report RP0402 must not be disabled)'}
self.linter.add_stats(dependencies={})
self.linter.add_stats(cycles=[])
self.stats = self.linter.stats
self.import_graph = {}
self.import_graph = defaultdict(set)
def close(self):
"""called before visiting project (i.e set of modules)"""
# don't try to compute cycles if the associated message is disabled
if self.linter.is_message_enabled('cyclic-import'):
for cycle in get_cycles(self.import_graph):
vertices = list(self.import_graph)
for cycle in get_cycles(self.import_graph, vertices=vertices):
self.add_message('cyclic-import', args=' -> '.join(cycle))
def visit_import(self, node):
"""triggered when an import statement is seen"""
modnode = node.root()
for name, _ in node.names:
importedmodnode = self.get_imported_module(modnode, node, name)
importedmodnode = self.get_imported_module(node, name)
if importedmodnode is None:
continue
self._check_relative_import(modnode, node, importedmodnode, name)
......@@ -260,7 +273,7 @@ given file (report RP0402 must not be disabled)'}
if name == '*':
self.add_message('wildcard-import', args=basename, node=node)
modnode = node.root()
importedmodnode = self.get_imported_module(modnode, node, basename)
importedmodnode = self.get_imported_module(node, basename)
if importedmodnode is None:
return
self._check_relative_import(modnode, node, importedmodnode, basename)
......@@ -270,15 +283,16 @@ given file (report RP0402 must not be disabled)'}
self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
self._check_reimport(node, name, basename, node.level)
def get_imported_module(self, modnode, importnode, modname):
def get_imported_module(self, importnode, modname):
try:
return importnode.do_import_module(modname)
except astroid.InferenceError, ex:
except astroid.InferenceError as ex:
if str(ex) != modname:
args = '%r (%s)' % (modname, ex)
else:
args = repr(modname)
self.add_message("import-error", args=args, node=importnode)
if not _except_import_error(importnode.parent):
self.add_message("import-error", args=args, node=importnode)
def _check_relative_import(self, modnode, importnode, importedmodnode,
importedasname):
......@@ -295,7 +309,8 @@ given file (report RP0402 must not be disabled)'}
return False
if importedmodnode.name != importedasname:
# this must be a relative import...
self.add_message('relative-import', args=(importedasname, importedmodnode.name),
self.add_message('relative-import',
args=(importedasname, importedmodnode.name),
node=importnode)
def _add_imported_module(self, node, importedmodname):
......@@ -315,8 +330,8 @@ given file (report RP0402 must not be disabled)'}
if not context_name in importedmodnames:
importedmodnames.add(context_name)
# update import graph
mgraph = self.import_graph.setdefault(context_name, set())
if not importedmodname in mgraph:
mgraph = self.import_graph[context_name]
if importedmodname not in mgraph:
mgraph.add(importedmodname)
def _check_deprecated_module(self, node, mod_path):
......@@ -343,7 +358,7 @@ given file (report RP0402 must not be disabled)'}
def report_external_dependencies(self, sect, _, dummy):
"""return a verbatim layout for displaying dependencies"""
dep_info = make_tree_defs(self._external_dependencies_info().iteritems())
dep_info = make_tree_defs(six.iteritems(self._external_dependencies_info()))
if not dep_info:
raise EmptyReport()
tree_str = repr_tree_defs(dep_info)
......@@ -375,7 +390,7 @@ given file (report RP0402 must not be disabled)'}
if self.__ext_dep_info is None:
package = self.linter.current_name
self.__ext_dep_info = result = {}
for importee, importers in self.stats['dependencies'].iteritems():
for importee, importers in six.iteritems(self.stats['dependencies']):
if not importee.startswith(package):
result[importee] = importers
return self.__ext_dep_info
......@@ -387,7 +402,7 @@ given file (report RP0402 must not be disabled)'}
if self.__int_dep_info is None:
package = self.linter.current_name
self.__int_dep_info = result = {}
for importee, importers in self.stats['dependencies'].iteritems():
for importee, importers in six.iteritems(self.stats['dependencies']):
if importee.startswith(package):
result[importee] = importers
return self.__int_dep_info
......
......@@ -20,6 +20,9 @@ from pylint import interfaces
from pylint.checkers import utils
from pylint.checkers.utils import check_messages
import six
MSGS = {
'W1201': ('Specify string format arguments as logging function parameters',
'logging-not-lazy',
......@@ -32,6 +35,14 @@ MSGS = {
'interpolation in those cases in which no message will be '
'logged. For more, see '
'http://www.python.org/dev/peps/pep-0282/.'),
'W1202': ('Use % formatting in logging functions but pass the % '
'parameters as arguments',
'logging-format-interpolation',
'Used when a logging statement has a call form of '
'"logging.<logging method>(format_string.format(format_args...))"'
'. Such calls should use % formatting instead, but leave '
'interpolation to the logging function by passing the parameters '
'as arguments.'),
'E1200': ('Unsupported logging format character %r (%#02x) at index %d',
'logging-unsupported-format',
'Used when an unsupported format character is used in a logging\
......@@ -53,6 +64,27 @@ CHECKED_CONVENIENCE_FUNCTIONS = set([
'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn',
'warning'])
def is_method_call(callfunc_node, types=(), methods=()):
"""Determines if a CallFunc node represents a method call.
Args:
callfunc_node: The CallFunc AST node to check.
types: Optional sequence of caller type names to restrict check.
methods: Optional sequence of method names to restrict check.
Returns:
True, if the node represents a method call for the given type and
method names, False otherwise.
"""
if not isinstance(callfunc_node, astroid.CallFunc):
return False
func = utils.safe_infer(callfunc_node.func)
return (isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)
and (func.bound.name in types if types else True)
and (func.name in methods if methods else True))
class LoggingChecker(checkers.BaseChecker):
"""Checks use of the logging module."""
......@@ -62,15 +94,15 @@ class LoggingChecker(checkers.BaseChecker):
msgs = MSGS
options = (('logging-modules',
{'default' : ('logging',),
'type' : 'csv',
'metavar' : '<comma separated list>',
'help' : 'Logging modules to check that the string format '
'arguments are in logging function parameter format'}
{'default': ('logging',),
'type': 'csv',
'metavar': '<comma separated list>',
'help': 'Logging modules to check that the string format '
'arguments are in logging function parameter format'}
),
)
def visit_module(self, unused_node):
def visit_module(self, node): # pylint: disable=unused-argument
"""Clears any state left in this checker from last module checked."""
# The code being checked can just as easily "import logging as foo",
# so it is necessary to process the imports and store in this field
......@@ -150,9 +182,20 @@ class LoggingChecker(checkers.BaseChecker):
if isinstance(node.args[format_pos], astroid.BinOp) and node.args[format_pos].op == '%':
self.add_message('logging-not-lazy', node=node)
elif isinstance(node.args[format_pos], astroid.CallFunc):
self._check_call_func(node.args[format_pos])
elif isinstance(node.args[format_pos], astroid.Const):
self._check_format_string(node, format_pos)
def _check_call_func(self, callfunc_node):
"""Checks that function call is not format_string.format().
Args:
callfunc_node: CallFunc AST node to be checked.
"""
if is_method_call(callfunc_node, ('str', 'unicode'), ('format',)):
self.add_message('logging-format-interpolation', node=callfunc_node)
def _check_format_string(self, node, format_arg):
"""Checks that format string tokens match the supplied arguments.
......@@ -166,7 +209,7 @@ class LoggingChecker(checkers.BaseChecker):
# don't check any further.
return
format_string = node.args[format_arg].value
if not isinstance(format_string, basestring):
if not isinstance(format_string, six.string_types):
# If the log format is constant non-string (e.g. logging.debug(5)),
# ensure there are no arguments.
required_num_args = 0
......@@ -178,7 +221,7 @@ class LoggingChecker(checkers.BaseChecker):
# Keyword checking on logging strings is complicated by
# special keywords - out of scope.
return
except utils.UnsupportedFormatCharacter, ex:
except utils.UnsupportedFormatCharacter as ex:
char = format_string[ex.index]
self.add_message('logging-unsupported-format', node=node,
args=(char, ord(char), ex.index))
......
......@@ -21,6 +21,7 @@ import re
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker
import six
MSGS = {
......@@ -72,8 +73,8 @@ class EncodingChecker(BaseChecker):
def _check_encoding(self, lineno, line, file_encoding):
try:
return unicode(line, file_encoding)
except UnicodeDecodeError, ex:
return six.text_type(line, file_encoding)
except UnicodeDecodeError as ex:
self.add_message('invalid-encoded-data', line=lineno,
args=(file_encoding, ex.args[2]))
......
......@@ -19,9 +19,13 @@ import sys
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
from pylint.checkers.utils import (
check_messages,
has_known_bases,
node_frame_class,
)
MSGS = {
'E1001': ('Use of __slots__ on an old style class',
......@@ -43,7 +47,7 @@ MSGS = {
{'maxversion': (3, 0)}),
'W1001': ('Use of "property" on an old style class',
'property-on-old-class',
'Used when PyLint detect the use of the builtin "property" \
'Used when Pylint detect the use of the builtin "property" \
on an old style class while this is relying on new style \
classes features.',
{'maxversion': (3, 0)}),
......@@ -74,15 +78,21 @@ class NewStyleConflictChecker(BaseChecker):
@check_messages('slots-on-old-class', 'old-style-class')
def visit_class(self, node):
"""check __slots__ usage
""" Check __slots__ in old style classes and old
style class definition.
"""
if '__slots__' in node and not node.newstyle:
self.add_message('slots-on-old-class', node=node)
confidence = (INFERENCE if has_known_bases(node)
else INFERENCE_FAILURE)
self.add_message('slots-on-old-class', node=node,
confidence=confidence)
# The node type could be class, exception, metaclass, or
# interface. Presumably, the non-class-type nodes would always
# have an explicit base class anyway.
if not node.bases and node.type == 'class':
self.add_message('old-style-class', node=node)
if not node.bases and node.type == 'class' and not node.metaclass():
# We use confidence HIGH here because this message should only ever
# be emitted for classes at the root of the inheritance hierarchyself.
self.add_message('old-style-class', node=node, confidence=HIGH)
@check_messages('property-on-old-class')
def visit_callfunc(self, node):
......@@ -91,9 +101,12 @@ class NewStyleConflictChecker(BaseChecker):
if (isinstance(parent, astroid.Class) and
not parent.newstyle and
isinstance(node.func, astroid.Name)):
confidence = (INFERENCE if has_known_bases(parent)
else INFERENCE_FAILURE)
name = node.func.name
if name == 'property':
self.add_message('property-on-old-class', node=node)
self.add_message('property-on-old-class', node=node,
confidence=confidence)
@check_messages('super-on-old-class', 'bad-super-call', 'missing-super-argument')
def visit_function(self, node):
......@@ -103,6 +116,9 @@ class NewStyleConflictChecker(BaseChecker):
return
klass = node.parent.frame()
for stmt in node.nodes_of_class(astroid.CallFunc):
if node_frame_class(stmt) != node_frame_class(node):
# Don't look down in other scopes.
continue
expr = stmt.func
if not isinstance(expr, astroid.Getattr):
continue
......@@ -111,9 +127,12 @@ class NewStyleConflictChecker(BaseChecker):
if isinstance(call, astroid.CallFunc) and \
isinstance(call.func, astroid.Name) and \
call.func.name == 'super':
confidence = (INFERENCE if has_known_bases(klass)
else INFERENCE_FAILURE)
if not klass.newstyle:
# super should not be used on an old style class
self.add_message('super-on-old-class', node=node)
self.add_message('super-on-old-class', node=node,
confidence=confidence)
else:
# super first arg should be the class
if not call.args and sys.version_info[0] == 3:
......@@ -121,13 +140,14 @@ class NewStyleConflictChecker(BaseChecker):
continue
try:
supcls = (call.args and call.args[0].infer().next()
supcls = (call.args and next(call.args[0].infer())
or None)
except astroid.InferenceError:
continue
if supcls is None:
self.add_message('missing-super-argument', node=call)
self.add_message('missing-super-argument', node=call,
confidence=confidence)
continue
if klass is not supcls:
......@@ -143,7 +163,8 @@ class NewStyleConflictChecker(BaseChecker):
if name is not None:
self.add_message('bad-super-call',
node=call,
args=(name, ))
args=(name, ),
confidence=confidence)
def register(linter):
......
# Copyright 2014 Google Inc.
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Check Python 2 code for Python 2/3 source-compatible issues."""
from __future__ import absolute_import
import re
import tokenize
import astroid
from pylint import checkers, interfaces
from pylint.utils import WarningScope
from pylint.checkers import utils
_ZERO = re.compile("^0+$")
def _is_old_octal(literal):
if _ZERO.match(literal):
return False
if re.match('0\d+', literal):
try:
int(literal, 8)
except ValueError:
return False
return True
def _check_dict_node(node):
inferred_types = set()
try:
inferred = node.infer()
for inferred_node in inferred:
inferred_types.add(inferred_node)
except (astroid.InferenceError, astroid.UnresolvableName):
pass
return (not inferred_types
or any(isinstance(x, astroid.Dict) for x in inferred_types))
class Python3Checker(checkers.BaseChecker):
__implements__ = interfaces.IAstroidChecker
enabled = False
name = 'python3'
msgs = {
# Errors for what will syntactically break in Python 3, warnings for
# everything else.
'E1601': ('print statement used',
'print-statement',
'Used when a print statement is used '
'(`print` is a function in Python 3)',
{'maxversion': (3, 0)}),
'E1602': ('Parameter unpacking specified',
'parameter-unpacking',
'Used when parameter unpacking is specified for a function'
"(Python 3 doesn't allow it)",
{'maxversion': (3, 0)}),
'E1603': ('Implicit unpacking of exceptions is not supported '
'in Python 3',
'unpacking-in-except',
'Python3 will not allow implicit unpacking of '
'exceptions in except clauses. '
'See http://www.python.org/dev/peps/pep-3110/',
{'maxversion': (3, 0),
'old_names': [('W0712', 'unpacking-in-except')]}),
'E1604': ('Use raise ErrorClass(args) instead of '
'raise ErrorClass, args.',
'old-raise-syntax',
"Used when the alternate raise syntax "
"'raise foo, bar' is used "
"instead of 'raise foo(bar)'.",
{'maxversion': (3, 0),
'old_names': [('W0121', 'old-raise-syntax')]}),
'E1605': ('Use of the `` operator',
'backtick',
'Used when the deprecated "``" (backtick) operator is used '
'instead of the str() function.',
{'scope': WarningScope.NODE,
'maxversion': (3, 0),
'old_names': [('W0333', 'backtick')]}),
'W1601': ('apply built-in referenced',
'apply-builtin',
'Used when the apply built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1602': ('basestring built-in referenced',
'basestring-builtin',
'Used when the basestring built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1603': ('buffer built-in referenced',
'buffer-builtin',
'Used when the buffer built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1604': ('cmp built-in referenced',
'cmp-builtin',
'Used when the cmp built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1605': ('coerce built-in referenced',
'coerce-builtin',
'Used when the coerce built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1606': ('execfile built-in referenced',
'execfile-builtin',
'Used when the execfile built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1607': ('file built-in referenced',
'file-builtin',
'Used when the file built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1608': ('long built-in referenced',
'long-builtin',
'Used when the long built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1609': ('raw_input built-in referenced',
'raw_input-builtin',
'Used when the raw_input built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1610': ('reduce built-in referenced',
'reduce-builtin',
'Used when the reduce built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1611': ('StandardError built-in referenced',
'standarderror-builtin',
'Used when the StandardError built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1612': ('unicode built-in referenced',
'unicode-builtin',
'Used when the unicode built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1613': ('xrange built-in referenced',
'xrange-builtin',
'Used when the xrange built-in function is referenced '
'(missing from Python 3)',
{'maxversion': (3, 0)}),
'W1614': ('__coerce__ method defined',
'coerce-method',
'Used when a __coerce__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1615': ('__delslice__ method defined',
'delslice-method',
'Used when a __delslice__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1616': ('__getslice__ method defined',
'getslice-method',
'Used when a __getslice__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1617': ('__setslice__ method defined',
'setslice-method',
'Used when a __setslice__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1618': ('import missing `from __future__ import absolute_import`',
'no-absolute-import',
'Used when an import is not accompanied by '
'`from __future__ import absolute_import`'
' (default behaviour in Python 3)',
{'maxversion': (3, 0)}),
'W1619': ('division w/o __future__ statement',
'old-division',
'Used for non-floor division w/o a float literal or '
'``from __future__ import division``'
'(Python 3 returns a float for int division unconditionally)',
{'maxversion': (3, 0)}),
'W1620': ('Calling a dict.iter*() method',
'dict-iter-method',
'Used for calls to dict.iterkeys(), itervalues() or iteritems() '
'(Python 3 lacks these methods)',
{'maxversion': (3, 0)}),
'W1621': ('Calling a dict.view*() method',
'dict-view-method',
'Used for calls to dict.viewkeys(), viewvalues() or viewitems() '
'(Python 3 lacks these methods)',
{'maxversion': (3, 0)}),
'W1622': ('Called a next() method on an object',
'next-method-called',
"Used when an object's next() method is called "
'(Python 3 uses the next() built-in function)',
{'maxversion': (3, 0)}),
'W1623': ("Assigning to a class' __metaclass__ attribute",
'metaclass-assignment',
"Used when a metaclass is specified by assigning to __metaclass__ "
'(Python 3 specifies the metaclass as a class statement argument)',
{'maxversion': (3, 0)}),
'W1624': ('Indexing exceptions will not work on Python 3',
'indexing-exception',
'Indexing exceptions will not work on Python 3. Use '
'`exception.args[index]` instead.',
{'maxversion': (3, 0),
'old_names': [('W0713', 'indexing-exception')]}),
'W1625': ('Raising a string exception',
'raising-string',
'Used when a string exception is raised. This will not '
'work on Python 3.',
{'maxversion': (3, 0),
'old_names': [('W0701', 'raising-string')]}),
'W1626': ('reload built-in referenced',
'reload-builtin',
'Used when the reload built-in function is referenced '
'(missing from Python 3). You can use instead imp.reload '
'or importlib.reload.',
{'maxversion': (3, 0)}),
'W1627': ('__oct__ method defined',
'oct-method',
'Used when a __oct__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1628': ('__hex__ method defined',
'hex-method',
'Used when a __hex__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1629': ('__nonzero__ method defined',
'nonzero-method',
'Used when a __nonzero__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1630': ('__cmp__ method defined',
'cmp-method',
'Used when a __cmp__ method is defined '
'(method is not used by Python 3)',
{'maxversion': (3, 0)}),
'W1631': ('map is used as implicitly evaluated call',
'implicit-map-evaluation',
'Used when the map builtin is used as implicitly '
'evaluated call, as in "map(func, args)" on a single line. '
'This behaviour will not work in Python 3, where '
'map is a generator and must be evaluated. '
'Prefer a for-loop as alternative.',
{'maxversion': (3, 0)}),
}
_missing_builtins = frozenset([
'apply',
'basestring',
'buffer',
'cmp',
'coerce',
'execfile',
'file',
'long',
'raw_input',
'reduce',
'StandardError',
'unicode',
'xrange',
'reload',
])
_unused_magic_methods = frozenset([
'__coerce__',
'__delslice__',
'__getslice__',
'__setslice__',
'__oct__',
'__hex__',
'__nonzero__',
'__cmp__',
])
def __init__(self, *args, **kwargs):
self._future_division = False
self._future_absolute_import = False
super(Python3Checker, self).__init__(*args, **kwargs)
def visit_function(self, node):
if node.is_method() and node.name in self._unused_magic_methods:
method_name = node.name
if node.name.startswith('__'):
method_name = node.name[2:-2]
self.add_message(method_name + '-method', node=node)
@utils.check_messages('parameter-unpacking')
def visit_arguments(self, node):
for arg in node.args:
if isinstance(arg, astroid.Tuple):
self.add_message('parameter-unpacking', node=arg)
@utils.check_messages('implicit-map-evaluation')
def visit_discard(self, node):
if (isinstance(node.value, astroid.CallFunc) and
isinstance(node.value.func, astroid.Name) and
node.value.func.name == 'map'):
module = node.value.func.lookup('map')[0]
if getattr(module, 'name', None) == '__builtin__':
self.add_message('implicit-map-evaluation', node=node)
def visit_name(self, node):
"""Detect when a built-in that is missing in Python 3 is referenced."""
found_node = node.lookup(node.name)[0]
if getattr(found_node, 'name', None) == '__builtin__':
if node.name in self._missing_builtins:
message = node.name.lower() + '-builtin'
self.add_message(message, node=node)
@utils.check_messages('print-statement')
def visit_print(self, node):
self.add_message('print-statement', node=node)
@utils.check_messages('no-absolute-import')
def visit_from(self, node):
if node.modname == '__future__':
for name, _ in node.names:
if name == 'division':
self._future_division = True
elif name == 'absolute_import':
self._future_absolute_import = True
elif not self._future_absolute_import:
self.add_message('no-absolute-import', node=node)
@utils.check_messages('no-absolute-import')
def visit_import(self, node):
if not self._future_absolute_import:
self.add_message('no-absolute-import', node=node)
@utils.check_messages('metaclass-assignment')
def visit_class(self, node):
if '__metaclass__' in node.locals:
self.add_message('metaclass-assignment', node=node)
@utils.check_messages('old-division')
def visit_binop(self, node):
if not self._future_division and node.op == '/':
for arg in (node.left, node.right):
if isinstance(arg, astroid.Const) and isinstance(arg.value, float):
break
else:
self.add_message('old-division', node=node)
@utils.check_messages('next-method-called',
'dict-iter-method',
'dict-view-method')
def visit_callfunc(self, node):
if not isinstance(node.func, astroid.Getattr):
return
if any([node.args, node.starargs, node.kwargs]):
return
if node.func.attrname == 'next':
self.add_message('next-method-called', node=node)
else:
if _check_dict_node(node.func.expr):
if node.func.attrname in ('iterkeys', 'itervalues', 'iteritems'):
self.add_message('dict-iter-method', node=node)
elif node.func.attrname in ('viewkeys', 'viewvalues', 'viewitems'):
self.add_message('dict-view-method', node=node)
@utils.check_messages('indexing-exception')
def visit_subscript(self, node):
""" Look for indexing exceptions. """
try:
for infered in node.value.infer():
if not isinstance(infered, astroid.Instance):
continue
if utils.inherit_from_std_ex(infered):
self.add_message('indexing-exception', node=node)
except astroid.InferenceError:
return
@utils.check_messages('unpacking-in-except')
def visit_excepthandler(self, node):
"""Visit an except handler block and check for exception unpacking."""
if isinstance(node.name, (astroid.Tuple, astroid.List)):
self.add_message('unpacking-in-except', node=node)
@utils.check_messages('backtick')
def visit_backquote(self, node):
self.add_message('backtick', node=node)
@utils.check_messages('raising-string', 'old-raise-syntax')
def visit_raise(self, node):
"""Visit a raise statement and check for raising
strings or old-raise-syntax.
"""
if (node.exc is not None and
node.inst is not None and
node.tback is None):
self.add_message('old-raise-syntax', node=node)
# Ignore empty raise.
if node.exc is None:
return
expr = node.exc
if self._check_raise_value(node, expr):
return
else:
try:
value = next(astroid.unpack_infer(expr))
except astroid.InferenceError:
return
self._check_raise_value(node, value)
def _check_raise_value(self, node, expr):
if isinstance(expr, astroid.Const):
value = expr.value
if isinstance(value, str):
self.add_message('raising-string', node=node)
return True
class Python3TokenChecker(checkers.BaseTokenChecker):
__implements__ = interfaces.ITokenChecker
name = 'python3'
enabled = False
msgs = {
'E1606': ('Use of long suffix',
'long-suffix',
'Used when "l" or "L" is used to mark a long integer. '
'This will not work in Python 3, since `int` and `long` '
'types have merged.',
{'maxversion': (3, 0)}),
'E1607': ('Use of the <> operator',
'old-ne-operator',
'Used when the deprecated "<>" operator is used instead '
'of "!=". This is removed in Python 3.',
{'maxversion': (3, 0),
'old_names': [('W0331', 'old-ne-operator')]}),
'E1608': ('Use of old octal literal',
'old-octal-literal',
'Usen when encountering the old octal syntax, '
'removed in Python 3. To use the new syntax, '
'prepend 0o on the number.',
{'maxversion': (3, 0)}),
}
def process_tokens(self, tokens):
for idx, (tok_type, token, start, _, _) in enumerate(tokens):
if tok_type == tokenize.NUMBER:
if token.lower().endswith('l'):
# This has a different semantic than lowercase-l-suffix.
self.add_message('long-suffix', line=start[0])
elif _is_old_octal(token):
self.add_message('old-octal-literal', line=start[0])
if tokens[idx][1] == '<>':
self.add_message('old-ne-operator', line=tokens[idx][2][0])
def register(linter):
linter.register_checker(Python3Checker(linter))
linter.register_checker(Python3TokenChecker(linter))
......@@ -16,14 +16,18 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""a similarities / code duplication command line tool and pylint checker
"""
from __future__ import print_function
import sys
from itertools import izip
from collections import defaultdict
from logilab.common.ureports import Table
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker, table_lines_from_stats
import six
from six.moves import zip
class Similar(object):
"""finds copy-pasted lines of code in a project"""
......@@ -58,9 +62,9 @@ class Similar(object):
def _compute_sims(self):
"""compute similarities in appended files"""
no_duplicates = {}
no_duplicates = defaultdict(list)
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
duplicate = no_duplicates.setdefault(num, [])
duplicate = no_duplicates[num]
for couples in duplicate:
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
couples.add((lineset1, idx1))
......@@ -69,7 +73,7 @@ class Similar(object):
else:
duplicate.append(set([(lineset1, idx1), (lineset2, idx2)]))
sims = []
for num, ensembles in no_duplicates.iteritems():
for num, ensembles in six.iteritems(no_duplicates):
for couples in ensembles:
sims.append((num, couples))
sims.sort()
......@@ -80,19 +84,19 @@ class Similar(object):
"""display computed similarities on stdout"""
nb_lignes_dupliquees = 0
for num, couples in sims:
print
print num, "similar lines in", len(couples), "files"
print()
print(num, "similar lines in", len(couples), "files")
couples = sorted(couples)
for lineset, idx in couples:
print "==%s:%s" % (lineset.name, idx)
print("==%s:%s" % (lineset.name, idx))
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
print " ", line.rstrip()
print(" ", line.rstrip())
nb_lignes_dupliquees += num * (len(couples)-1)
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
print "TOTAL lines=%s duplicates=%s percent=%.2f" \
print("TOTAL lines=%s duplicates=%s percent=%.2f" \
% (nb_total_lignes, nb_lignes_dupliquees,
nb_lignes_dupliquees*100. / nb_total_lignes)
nb_lignes_dupliquees*100. / nb_total_lignes))
def _find_common(self, lineset1, lineset2):
"""find similarities in the two given linesets"""
......@@ -107,7 +111,7 @@ class Similar(object):
for index2 in find(lineset1[index1]):
non_blank = 0
for num, ((_, line1), (_, line2)) in enumerate(
izip(lines1(index1), lines2(index2))):
zip(lines1(index1), lines2(index2))):
if line1 != line2:
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
......@@ -207,10 +211,10 @@ class LineSet(object):
def _mk_index(self):
"""create the index for this set"""
index = {}
index = defaultdict(list)
for line_no, line in enumerate(self._stripped_lines):
if line:
index.setdefault(line, []).append(line_no)
index[line].append(line_no)
return index
......@@ -323,10 +327,10 @@ def register(linter):
def usage(status=0):
"""display command line usage information"""
print "finds copy pasted blocks in a set of files"
print
print 'Usage: symilar [-d|--duplicates min_duplicated_lines] \
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...'
print("finds copy pasted blocks in a set of files")
print()
print('Usage: symilar [-d|--duplicates min_duplicated_lines] \
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...')
sys.exit(status)
def Run(argv=None):
......
# Copyright 2014 Michal Nowikowski.
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Checker for spelling errors in comments and docstrings.
"""
import sys
import tokenize
import string
import re
if sys.version_info[0] >= 3:
maketrans = str.maketrans
else:
maketrans = string.maketrans
from pylint.interfaces import ITokenChecker, IAstroidChecker
from pylint.checkers import BaseTokenChecker
from pylint.checkers.utils import check_messages
try:
import enchant
except ImportError:
enchant = None
if enchant is not None:
br = enchant.Broker()
dicts = br.list_dicts()
dict_choices = [''] + [d[0] for d in dicts]
dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
dicts = ", ".join(dicts)
instr = ""
else:
dicts = "none"
dict_choices = ['']
instr = " To make it working install python-enchant package."
table = maketrans("", "")
class SpellingChecker(BaseTokenChecker):
"""Check spelling in comments and docstrings"""
__implements__ = (ITokenChecker, IAstroidChecker)
name = 'spelling'
msgs = {
'C0401': ('Wrong spelling of a word \'%s\' in a comment:\n%s\n'
'%s\nDid you mean: \'%s\'?',
'wrong-spelling-in-comment',
'Used when a word in comment is not spelled correctly.'),
'C0402': ('Wrong spelling of a word \'%s\' in a docstring:\n%s\n'
'%s\nDid you mean: \'%s\'?',
'wrong-spelling-in-docstring',
'Used when a word in docstring is not spelled correctly.'),
}
options = (('spelling-dict',
{'default' : '', 'type' : 'choice', 'metavar' : '<dict name>',
'choices': dict_choices,
'help' : 'Spelling dictionary name. '
'Available dictionaries: %s.%s' % (dicts, instr)}),
('spelling-ignore-words',
{'default' : '',
'type' : 'string',
'metavar' : '<comma separated words>',
'help' : 'List of comma separated words that '
'should not be checked.'}),
('spelling-private-dict-file',
{'default' : '',
'type' : 'string',
'metavar' : '<path to file>',
'help' : 'A path to a file that contains private '
'dictionary; one word per line.'}),
('spelling-store-unknown-words',
{'default' : 'n', 'type' : 'yn', 'metavar' : '<y_or_n>',
'help' : 'Tells whether to store unknown words to '
'indicated private dictionary in '
'--spelling-private-dict-file option instead of '
'raising a message.'}),
)
def open(self):
self.initialized = False
self.private_dict_file = None
if enchant is None:
return
dict_name = self.config.spelling_dict
if not dict_name:
return
self.ignore_list = self.config.spelling_ignore_words.split(",")
# "param" appears in docstring in param description and
# "pylint" appears in comments in pylint pragmas.
self.ignore_list.extend(["param", "pylint"])
if self.config.spelling_private_dict_file:
self.spelling_dict = enchant.DictWithPWL(
dict_name, self.config.spelling_private_dict_file)
self.private_dict_file = open(
self.config.spelling_private_dict_file, "a")
else:
self.spelling_dict = enchant.Dict(dict_name)
if self.config.spelling_store_unknown_words:
self.unknown_words = set()
# Prepare regex for stripping punctuation signs from text.
# ' and _ are treated in a special way.
puncts = string.punctuation.replace("'", "").replace("_", "")
self.punctuation_regex = re.compile('[%s]' % re.escape(puncts))
self.initialized = True
def close(self):
if self.private_dict_file:
self.private_dict_file.close()
def _check_spelling(self, msgid, line, line_num):
line2 = line.strip()
# Replace ['afadf with afadf (but preserve don't)
line2 = re.sub("'([^a-zA-Z]|$)", " ", line2)
# Replace afadf'] with afadf (but preserve don't)
line2 = re.sub("([^a-zA-Z]|^)'", " ", line2)
# Replace punctuation signs with space e.g. and/or -> and or
line2 = self.punctuation_regex.sub(' ', line2)
words = []
for word in line2.split():
# Skip words with digits.
if len(re.findall(r"\d", word)) > 0:
continue
# Skip words with mixed big and small letters,
# they are probaly class names.
if (len(re.findall("[A-Z]", word)) > 0 and
len(re.findall("[a-z]", word)) > 0 and
len(word) > 2):
continue
# Skip words with _ - they are probably function parameter names.
if word.count('_') > 0:
continue
words.append(word)
# Go through words and check them.
for word in words:
# Skip words from ignore list.
if word in self.ignore_list:
continue
orig_word = word
word = word.lower()
# Strip starting u' from unicode literals and r' from raw strings.
if (word.startswith("u'") or
word.startswith('u"') or
word.startswith("r'") or
word.startswith('r"')) and len(word) > 2:
word = word[2:]
# If it is a known word, then continue.
if self.spelling_dict.check(word):
continue
# Store word to private dict or raise a message.
if self.config.spelling_store_unknown_words:
if word not in self.unknown_words:
self.private_dict_file.write("%s\n" % word)
self.unknown_words.add(word)
else:
# Present up to 4 suggestions.
# TODO: add support for customising this.
suggestions = self.spelling_dict.suggest(word)[:4]
m = re.search(r"(\W|^)(%s)(\W|$)" % word, line.lower())
if m:
# Start position of second group in regex.
col = m.regs[2][0]
else:
col = line.lower().index(word)
indicator = (" " * col) + ("^" * len(word))
self.add_message(msgid, line=line_num,
args=(orig_word, line,
indicator,
"' or '".join(suggestions)))
def process_tokens(self, tokens):
if not self.initialized:
return
# Process tokens and look for comments.
for (tok_type, token, (start_row, _), _, _) in tokens:
if tok_type == tokenize.COMMENT:
self._check_spelling('wrong-spelling-in-comment',
token, start_row)
@check_messages('wrong-spelling-in-docstring')
def visit_module(self, node):
if not self.initialized:
return
self._check_docstring(node)
@check_messages('wrong-spelling-in-docstring')
def visit_class(self, node):
if not self.initialized:
return
self._check_docstring(node)
@check_messages('wrong-spelling-in-docstring')
def visit_function(self, node):
if not self.initialized:
return
self._check_docstring(node)
def _check_docstring(self, node):
"""check the node has any spelling errors"""
docstring = node.doc
if not docstring:
return
start_line = node.lineno + 1
# Go through lines of docstring
for idx, line in enumerate(docstring.splitlines()):
self._check_spelling('wrong-spelling-in-docstring',
line, start_line + idx)
def register(linter):
"""required method to auto register this checker """
linter.register_checker(SpellingChecker(linter))
......@@ -19,6 +19,7 @@ import re
import sys
import astroid
from astroid.bases import Instance
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
......@@ -31,15 +32,22 @@ if sys.version_info >= (3, 0):
else:
OPEN_MODULE = '__builtin__'
class OpenModeChecker(BaseChecker):
class StdlibChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
name = 'open_mode'
name = 'stdlib'
msgs = {
'W1501': ('"%s" is not a valid mode for open.',
'bad-open-mode',
'Python supports: r, w, a modes with b, +, and U options. '
'See http://docs.python.org/2/library/functions.html#open'),
'W1502': ('Using datetime.time in a boolean context.',
'boolean-datetime',
'Using datetetime.time in a boolean context can hide '
'subtle bugs when the time they represent matches '
'midnight UTC. This behaviour was fixed in Python 3.5. '
'See http://bugs.python.org/issue13936 for reference.',
{'maxversion': (3, 5)}),
}
@utils.check_messages('bad-open-mode')
......@@ -51,6 +59,36 @@ class OpenModeChecker(BaseChecker):
if getattr(node.func, 'name', None) in ('open', 'file'):
self._check_open_mode(node)
@utils.check_messages('boolean-datetime')
def visit_unaryop(self, node):
if node.op == 'not':
self._check_datetime(node.operand)
@utils.check_messages('boolean-datetime')
def visit_if(self, node):
self._check_datetime(node.test)
@utils.check_messages('boolean-datetime')
def visit_ifexp(self, node):
self._check_datetime(node.test)
@utils.check_messages('boolean-datetime')
def visit_boolop(self, node):
for value in node.values:
self._check_datetime(value)
def _check_datetime(self, node):
""" Check that a datetime was infered.
If so, emit boolean-datetime warning.
"""
try:
infered = next(node.infer())
except astroid.InferenceError:
return
if (isinstance(infered, Instance) and
infered.qname() == 'datetime.time'):
self.add_message('boolean-datetime', node=node)
def _check_open_mode(self, node):
"""Check that the mode argument of an open or file call is valid."""
try:
......@@ -66,5 +104,5 @@ class OpenModeChecker(BaseChecker):
def register(linter):
"""required method to auto register this checker """
linter.register_checker(OpenModeChecker(linter))
linter.register_checker(StdlibChecker(linter))
......@@ -21,10 +21,7 @@
import sys
import tokenize
import string
try:
import numbers
except ImportError:
numbers = None
import numbers
import astroid
......@@ -33,6 +30,9 @@ from pylint.checkers import BaseChecker, BaseTokenChecker
from pylint.checkers import utils
from pylint.checkers.utils import check_messages
import six
_PY3K = sys.version_info[:2] >= (3, 0)
_PY27 = sys.version_info[:2] == (2, 7)
......@@ -145,8 +145,8 @@ def collect_string_fields(format_string):
"""
formatter = string.Formatter()
parseiterator = formatter.parse(format_string)
try:
parseiterator = formatter.parse(format_string)
for result in parseiterator:
if all(item is None for item in result[1:]):
# not a replacement format
......@@ -181,6 +181,7 @@ def parse_format_method_string(format_string):
if isinstance(keyname, numbers.Number):
# In Python 2 it will return long which will lead
# to different output between 2 and 3
manual_pos_arg.add(keyname)
keyname = int(keyname)
keys.append((keyname, list(fielditerator)))
else:
......@@ -233,13 +234,13 @@ class StringFormatChecker(BaseChecker):
args = node.right
if not (isinstance(left, astroid.Const)
and isinstance(left.value, basestring)):
and isinstance(left.value, six.string_types)):
return
format_string = left.value
try:
required_keys, required_num_args = \
utils.parse_format_string(format_string)
except utils.UnsupportedFormatCharacter, e:
except utils.UnsupportedFormatCharacter as e:
c = format_string[e.index]
self.add_message('bad-format-character',
node=node, args=(c, ord(c), e.index))
......@@ -262,7 +263,7 @@ class StringFormatChecker(BaseChecker):
for k, _ in args.items:
if isinstance(k, astroid.Const):
key = k.value
if isinstance(key, basestring):
if isinstance(key, six.string_types):
keys.add(key)
else:
self.add_message('bad-format-string-key',
......@@ -345,10 +346,17 @@ class StringMethodsChecker(BaseChecker):
# We do this because our inference engine can't properly handle
# redefinitions of the original string.
# For more details, see issue 287.
if not isinstance(node.func.expr, astroid.Const):
#
# Note that there may not be any left side at all, if the format method
# has been assigned to another variable. See issue 351. For example:
#
# fmt = 'some string {}'.format
# fmt('arg')
if (isinstance(node.func, astroid.Getattr)
and not isinstance(node.func.expr, astroid.Const)):
return
try:
strnode = func.bound.infer().next()
strnode = next(func.bound.infer())
except astroid.InferenceError:
return
if not isinstance(strnode, astroid.Const):
......@@ -366,10 +374,8 @@ class StringMethodsChecker(BaseChecker):
self.add_message('bad-format-string', node=node)
return
manual_fields = set(field[0] for field in fields
if isinstance(field[0], numbers.Number))
named_fields = set(field[0] for field in fields
if isinstance(field[0], basestring))
if isinstance(field[0], six.string_types))
if num_args and manual_pos:
self.add_message('format-combined-specification',
node=node)
......@@ -408,12 +414,7 @@ class StringMethodsChecker(BaseChecker):
# num_args can be 0 if manual_pos is not.
num_args = num_args or manual_pos
if positional > num_args:
# We can have two possibilities:
# * "{0} {1}".format(a, b)
# * "{} {} {}".format(a, b, c, d)
# We can check the manual keys for the first one.
if len(manual_fields) != positional:
self.add_message('too-many-format-args', node=node)
self.add_message('too-many-format-args', node=node)
elif positional < num_args:
self.add_message('too-few-format-args', node=node)
......@@ -444,7 +445,7 @@ class StringMethodsChecker(BaseChecker):
if argname in (astroid.YES, None):
continue
try:
argument = argname.infer().next()
argument = next(argname.infer())
except astroid.InferenceError:
continue
if not specifiers or argument is astroid.YES:
......@@ -452,12 +453,9 @@ class StringMethodsChecker(BaseChecker):
# use attribute / item access
continue
if argument.parent and isinstance(argument.parent, astroid.Arguments):
# Check to see if our argument is kwarg or vararg,
# and skip the check for this argument if so, because when inferring,
# astroid will return empty objects (dicts and tuples) and
# that can lead to false positives.
if argname.name in (argument.parent.kwarg, argument.parent.vararg):
continue
# Ignore any object coming from an argument,
# because we can't infer its value properly.
continue
previous = argument
parsed = []
for is_attribute, specifier in specifiers:
......@@ -501,7 +499,7 @@ class StringMethodsChecker(BaseChecker):
break
try:
previous = previous.infer().next()
previous = next(previous.infer())
except astroid.InferenceError:
# can't check further if we can't infer it
break
......@@ -540,17 +538,18 @@ class StringConstantChecker(BaseTokenChecker):
self._unicode_literals = 'unicode_literals' in module.future_imports
def process_tokens(self, tokens):
for (tok_type, token, (start_row, start_col), _, _) in tokens:
for (tok_type, token, (start_row, _), _, _) in tokens:
if tok_type == tokenize.STRING:
# 'token' is the whole un-parsed token; we can look at the start
# of it to see whether it's a raw or unicode string etc.
self.process_string_token(token, start_row, start_col)
self.process_string_token(token, start_row)
def process_string_token(self, token, start_row, start_col):
def process_string_token(self, token, start_row):
for i, c in enumerate(token):
if c in '\'\"':
quote_char = c
break
# pylint: disable=undefined-loop-variable
prefix = token[:i].lower() # markers like u, b, r.
after_prefix = token[i:]
if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
......@@ -559,18 +558,15 @@ class StringConstantChecker(BaseTokenChecker):
string_body = after_prefix[1:-1] # Chop off quotes
# No special checks on raw strings at the moment.
if 'r' not in prefix:
self.process_non_raw_string_token(prefix, string_body,
start_row, start_col)
self.process_non_raw_string_token(prefix, string_body, start_row)
def process_non_raw_string_token(self, prefix, string_body, start_row,
start_col):
def process_non_raw_string_token(self, prefix, string_body, start_row):
"""check for bad escapes in a non-raw string.
prefix: lowercase string of eg 'ur' string prefix markers.
string_body: the un-parsed body of the string, not including the quote
marks.
start_row: integer line number in the source.
start_col: integer column number in the source.
"""
# Walk through the string; if we see a backslash then escape the next
# character, and skip over it. If we see a non-escaped character,
......
......@@ -23,23 +23,21 @@ import astroid
from astroid import InferenceError, NotFoundError, YES, Instance
from astroid.bases import BUILTINS
from pylint.interfaces import IAstroidChecker
from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE
from pylint.checkers import BaseChecker
from pylint.checkers.utils import safe_infer, is_super, check_messages
from pylint.checkers.utils import (
safe_infer, is_super,
check_messages, decorated_with_property)
MSGS = {
'E1101': ('%s %r has no %r member',
'no-member',
'Used when a variable is accessed for an unexistent member.'),
'Used when a variable is accessed for an unexistent member.',
{'old_names': [('E1103', 'maybe-no-member')]}),
'E1102': ('%s is not callable',
'not-callable',
'Used when an object being called has been inferred to a non \
callable object'),
'E1103': ('%s %r has no %r member (but some types could not be inferred)',
'maybe-no-member',
'Used when a variable is accessed for an unexistent member, but \
astroid was not able to interpret all possible types of this \
variable.'),
'E1111': ('Assigning to function call which doesn\'t return',
'assignment-from-no-return',
'Used when an assignment is done on a function call but the \
......@@ -56,11 +54,6 @@ MSGS = {
'too-many-function-args',
'Used when a function call passes too many positional \
arguments.'),
'E1122': ('Duplicate keyword argument %r in %s call',
'duplicate-keyword-arg',
'Used when a function call passes the same keyword argument \
multiple times.',
{'maxversion': (2, 6)}),
'E1123': ('Unexpected keyword argument %r in %s call',
'unexpected-keyword-arg',
'Used when a function call passes a keyword argument that \
......@@ -192,7 +185,7 @@ accessed. Python regular expressions are accepted.'}
def visit_delattr(self, node):
self.visit_getattr(node)
@check_messages('no-member', 'maybe-no-member')
@check_messages('no-member')
def visit_getattr(self, node):
"""check that the accessed attribute exists
......@@ -254,6 +247,20 @@ accessed. Python regular expressions are accepted.'}
# explicit skipping of module member access
if owner.root().name in self.config.ignored_modules:
continue
if isinstance(owner, astroid.Class):
# Look up in the metaclass only if the owner is itself
# a class.
# TODO: getattr doesn't return by default members
# from the metaclass, because handling various cases
# of methods accessible from the metaclass itself
# and/or subclasses only is too complicated for little to
# no benefit.
metaclass = owner.metaclass()
try:
if metaclass and metaclass.getattr(node.attrname):
continue
except NotFoundError:
pass
missingattr.add((owner, name))
continue
# stop on the first found
......@@ -270,13 +277,11 @@ accessed. Python regular expressions are accepted.'}
if actual in done:
continue
done.add(actual)
if inference_failure:
msgid = 'maybe-no-member'
else:
msgid = 'no-member'
self.add_message(msgid, node=node,
confidence = INFERENCE if not inference_failure else INFERENCE_FAILURE
self.add_message('no-member', node=node,
args=(owner.display_type(), name,
node.attrname))
node.attrname),
confidence=confidence)
@check_messages('assignment-from-no-return', 'assignment-from-none')
def visit_assign(self, node):
......@@ -333,43 +338,17 @@ accessed. Python regular expressions are accepted.'}
except astroid.NotFoundError:
return
stop_checking = False
for attr in attrs:
if attr is astroid.YES:
continue
if stop_checking:
break
if not isinstance(attr, astroid.Function):
continue
# Decorated, see if it is decorated with a property
if not attr.decorators:
continue
for decorator in attr.decorators.nodes:
if not isinstance(decorator, astroid.Name):
continue
try:
for infered in decorator.infer():
property_like = False
if isinstance(infered, astroid.Class):
if (infered.root().name == BUILTINS and
infered.name == 'property'):
property_like = True
else:
for ancestor in infered.ancestors():
if (ancestor.name == 'property' and
ancestor.root().name == BUILTINS):
property_like = True
break
if property_like:
self.add_message('not-callable', node=node,
args=node.func.as_string())
stop_checking = True
break
except InferenceError:
pass
if stop_checking:
break
if decorated_with_property(attr):
self.add_message('not-callable', node=node,
args=node.func.as_string())
break
@check_messages(*(list(MSGS.keys())))
def visit_callfunc(self, node):
......@@ -383,11 +362,7 @@ accessed. Python regular expressions are accepted.'}
num_positional_args = 0
for arg in node.args:
if isinstance(arg, astroid.Keyword):
keyword = arg.arg
if keyword in keyword_args:
self.add_message('duplicate-keyword-arg', node=node,
args=(keyword, 'function'))
keyword_args.add(keyword)
keyword_args.add(arg.arg)
else:
num_positional_args += 1
......@@ -549,7 +524,6 @@ accessed. Python regular expressions are accepted.'}
# slice or instances with __index__.
parent_type = safe_infer(node.parent.value)
if not isinstance(parent_type, (astroid.Class, astroid.Instance)):
return
......@@ -578,13 +552,10 @@ accessed. Python regular expressions are accepted.'}
if not isinstance(itemmethod, astroid.Function):
return
if itemmethod.root().name != BUILTINS:
return
if not itemmethod.parent:
return
if itemmethod.parent.name not in SEQUENCE_TYPES:
return
......@@ -595,7 +566,6 @@ accessed. Python regular expressions are accepted.'}
index_type = node
else:
index_type = safe_infer(node)
if index_type is None or index_type is astroid.YES:
return
......@@ -607,7 +577,6 @@ accessed. Python regular expressions are accepted.'}
elif isinstance(index_type, astroid.Instance):
if index_type.pytype() in (BUILTINS + '.int', BUILTINS + '.slice'):
return
try:
index_type.getattr('__index__')
return
......@@ -625,7 +594,6 @@ accessed. Python regular expressions are accepted.'}
continue
index_type = safe_infer(index)
if index_type is None or index_type is astroid.YES:
continue
......
......@@ -30,6 +30,11 @@ BUILTINS_NAME = builtins.__name__
COMP_NODE_TYPES = astroid.ListComp, astroid.SetComp, astroid.DictComp, astroid.GenExpr
PY3K = sys.version_info[0] == 3
if not PY3K:
EXCEPTIONS_MODULE = "exceptions"
else:
EXCEPTIONS_MODULE = "builtins"
class NoSuchArgumentError(Exception):
pass
......@@ -82,11 +87,11 @@ def safe_infer(node):
"""
try:
inferit = node.infer()
value = inferit.next()
value = next(inferit)
except astroid.InferenceError:
return
try:
inferit.next()
next(inferit)
return # None if there is ambiguity on the inferred node
except astroid.InferenceError:
return # there is some kind of ambiguity
......@@ -152,12 +157,12 @@ def is_defined_before(var_node):
if ass_node.name == varname:
return True
elif isinstance(_node, astroid.With):
for expr, vars in _node.items:
for expr, ids in _node.items:
if expr.parent_of(var_node):
break
if (vars and
isinstance(vars, astroid.AssName) and
vars.name == varname):
if (ids and
isinstance(ids, astroid.AssName) and
ids.name == varname):
return True
elif isinstance(_node, (astroid.Lambda, astroid.Function)):
if _node.args.is_argument(varname):
......@@ -412,10 +417,85 @@ def get_argument_from_call(callfunc_node, position=None, keyword=None):
try:
if position is not None and not isinstance(callfunc_node.args[position], astroid.Keyword):
return callfunc_node.args[position]
except IndexError, error:
except IndexError as error:
raise NoSuchArgumentError(error)
if keyword:
for arg in callfunc_node.args:
if isinstance(arg, astroid.Keyword) and arg.arg == keyword:
return arg.value
raise NoSuchArgumentError
def inherit_from_std_ex(node):
"""
Return true if the given class node is subclass of
exceptions.Exception.
"""
if node.name in ('Exception', 'BaseException') \
and node.root().name == EXCEPTIONS_MODULE:
return True
return any(inherit_from_std_ex(parent)
for parent in node.ancestors(recurs=False))
def is_import_error(handler):
"""
Check if the given exception handler catches
ImportError.
:param handler: A node, representing an ExceptHandler node.
:returns: True if the handler catches ImportError, False otherwise.
"""
names = None
if isinstance(handler.type, astroid.Tuple):
names = [name for name in handler.type.elts
if isinstance(name, astroid.Name)]
elif isinstance(handler.type, astroid.Name):
names = [handler.type]
else:
# Don't try to infer that.
return
for name in names:
try:
for infered in name.infer():
if (isinstance(infered, astroid.Class) and
inherit_from_std_ex(infered) and
infered.name == 'ImportError'):
return True
except astroid.InferenceError:
continue
def has_known_bases(klass):
"""Returns true if all base classes of a class could be inferred."""
try:
return klass._all_bases_known
except AttributeError:
pass
for base in klass.bases:
result = safe_infer(base)
# TODO: check for A->B->A->B pattern in class structure too?
if (not isinstance(result, astroid.Class) or
result is klass or
not has_known_bases(result)):
klass._all_bases_known = False
return False
klass._all_bases_known = True
return True
def decorated_with_property(node):
""" Detect if the given function node is decorated with a property. """
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if not isinstance(decorator, astroid.Name):
continue
try:
for infered in decorator.infer():
if isinstance(infered, astroid.Class):
if (infered.root().name == BUILTINS_NAME and
infered.name == 'property'):
return True
for ancestor in infered.ancestors():
if (ancestor.name == 'property' and
ancestor.root().name == BUILTINS_NAME):
return True
except astroid.InferenceError:
pass
......@@ -17,22 +17,26 @@
"""
import os
import sys
import re
from copy import copy
import astroid
from astroid import are_exclusive, builtin_lookup, AstroidBuildingException
from astroid import are_exclusive, builtin_lookup
from astroid import modutils
from logilab.common.modutils import file_from_modpath
from pylint.interfaces import IAstroidChecker
from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE, HIGH
from pylint.utils import get_global_option
from pylint.checkers import BaseChecker
from pylint.checkers.utils import (
PYMETHODS, is_ancestor_name, is_builtin,
is_defined_before, is_error, is_func_default, is_func_decorator,
assign_parent, check_messages, is_inside_except, clobber_in_except,
get_all_elements)
get_all_elements, has_known_bases)
import six
SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
PY3K = sys.version_info >= (3, 0)
def in_for_else_branch(parent, stmt):
"""Returns True if stmt in inside the else branch for a parent For stmt."""
......@@ -42,7 +46,7 @@ def in_for_else_branch(parent, stmt):
def overridden_method(klass, name):
"""get overridden method if any"""
try:
parent = klass.local_attr_ancestors(name).next()
parent = next(klass.local_attr_ancestors(name))
except (StopIteration, KeyError):
return None
try:
......@@ -134,6 +138,55 @@ def _detect_global_scope(node, frame, defframe):
# and the definition of the first depends on the second.
return frame.lineno < defframe.lineno
def _fix_dot_imports(not_consumed):
""" Try to fix imports with multiple dots, by returning a dictionary
with the import names expanded. The function unflattens root imports,
like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree'
and 'xml.sax' respectively.
"""
# TODO: this should be improved in issue astroid #46
names = {}
for name, stmts in six.iteritems(not_consumed):
if any(isinstance(stmt, astroid.AssName)
and isinstance(stmt.ass_type(), astroid.AugAssign)
for stmt in stmts):
continue
for stmt in stmts:
if not isinstance(stmt, (astroid.From, astroid.Import)):
continue
for imports in stmt.names:
second_name = None
if imports[0] == "*":
# In case of wildcard imports,
# pick the name from inside the imported module.
second_name = name
else:
if imports[0].find(".") > -1 or name in imports:
# Most likely something like 'xml.etree',
# which will appear in the .locals as 'xml'.
# Only pick the name if it wasn't consumed.
second_name = imports[0]
if second_name and second_name not in names:
names[second_name] = stmt
return sorted(names.items(), key=lambda a: a[1].fromlineno)
def _find_frame_imports(name, frame):
"""
Detect imports in the frame, with the required
*name*. Such imports can be considered assignments.
Returns True if an import for the given name was found.
"""
imports = frame.nodes_of_class((astroid.Import, astroid.From))
for import_node in imports:
for import_name, import_alias in import_node.names:
# If the import uses an alias, check only that.
# Otherwise, check only the import name.
if import_alias:
if import_alias == name:
return True
elif import_name and import_name == name:
return True
MSGS = {
'E0601': ('Using variable %r before assignment',
......@@ -164,13 +217,13 @@ MSGS = {
'W0603': ('Using the global statement', # W0121
'global-statement',
'Used when you use the "global" statement to update a global \
variable. PyLint just try to discourage this \
variable. Pylint just try to discourage this \
usage. That doesn\'t mean you can not use it !'),
'W0604': ('Using the global statement at the module level', # W0103
'global-at-module-level',
'Used when you use the "global" statement at the module level \
since it has no effect'),
'W0611': ('Unused import %s',
'W0611': ('Unused %s',
'unused-import',
'Used when an imported module or variable is not used.'),
'W0612': ('Unused variable %r',
......@@ -250,6 +303,13 @@ variables (i.e. expectedly not used).'}),
'help' : 'List of additional names supposed to be defined in \
builtins. Remember that you should avoid to define new builtins when possible.'
}),
("callbacks",
{'default' : ('cb_', '_cb'), 'type' : 'csv',
'metavar' : '<callbacks>',
'help' : 'List of strings which can identify a callback '
'function by name. A callback name must start or '
'end with one of those strings.'}
)
)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
......@@ -261,12 +321,14 @@ builtins. Remember that you should avoid to define new builtins when possible.'
checks globals doesn't overrides builtins
"""
self._to_consume = [(copy(node.locals), {}, 'module')]
for name, stmts in node.locals.iteritems():
for name, stmts in six.iteritems(node.locals):
if is_builtin(name) and not is_inside_except(stmts[0]):
# do not print Redefining builtin for additional builtins
self.add_message('redefined-builtin', args=name, node=stmts[0])
@check_messages('unused-import', 'unused-wildcard-import', 'redefined-builtin', 'undefined-all-variable', 'invalid-all-object')
@check_messages('unused-import', 'unused-wildcard-import',
'redefined-builtin', 'undefined-all-variable',
'invalid-all-object')
def leave_module(self, node):
"""leave module: check globals
"""
......@@ -274,17 +336,18 @@ builtins. Remember that you should avoid to define new builtins when possible.'
not_consumed = self._to_consume.pop()[0]
# attempt to check for __all__ if defined
if '__all__' in node.locals:
assigned = node.igetattr('__all__').next()
assigned = next(node.igetattr('__all__'))
if assigned is not astroid.YES:
for elt in getattr(assigned, 'elts', ()):
try:
elt_name = elt.infer().next()
elt_name = next(elt.infer())
except astroid.InferenceError:
continue
if not isinstance(elt_name, astroid.Const) \
or not isinstance(elt_name.value, basestring):
self.add_message('invalid-all-object', args=elt.as_string(), node=elt)
or not isinstance(elt_name.value, six.string_types):
self.add_message('invalid-all-object',
args=elt.as_string(), node=elt)
continue
elt_name = elt_name.value
# If elt is in not_consumed, remove it from not_consumed
......@@ -301,7 +364,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
if os.path.basename(basename) == '__init__':
name = node.name + "." + elt_name
try:
file_from_modpath(name.split("."))
modutils.file_from_modpath(name.split("."))
except ImportError:
self.add_message('undefined-all-variable',
args=elt_name,
......@@ -314,19 +377,52 @@ builtins. Remember that you should avoid to define new builtins when possible.'
# don't check unused imports in __init__ files
if not self.config.init_import and node.package:
return
for name, stmts in not_consumed.iteritems():
if any(isinstance(stmt, astroid.AssName)
and isinstance(stmt.ass_type(), astroid.AugAssign)
for stmt in stmts):
continue
stmt = stmts[0]
if isinstance(stmt, astroid.Import):
self.add_message('unused-import', args=name, node=stmt)
elif isinstance(stmt, astroid.From) and stmt.modname != '__future__':
if stmt.names[0][0] == '*':
self.add_message('unused-wildcard-import', args=name, node=stmt)
else:
self.add_message('unused-import', args=name, node=stmt)
self._check_imports(not_consumed)
def _check_imports(self, not_consumed):
local_names = _fix_dot_imports(not_consumed)
checked = set()
for name, stmt in local_names:
for imports in stmt.names:
real_name = imported_name = imports[0]
if imported_name == "*":
real_name = name
as_name = imports[1]
if real_name in checked:
continue
if name not in (real_name, as_name):
continue
checked.add(real_name)
if (isinstance(stmt, astroid.Import) or
(isinstance(stmt, astroid.From) and
not stmt.modname)):
if (isinstance(stmt, astroid.From) and
SPECIAL_OBJ.search(imported_name)):
# Filter special objects (__doc__, __all__) etc.,
# because they can be imported for exporting.
continue
if as_name is None:
msg = "import %s" % imported_name
else:
msg = "%s imported as %s" % (imported_name, as_name)
self.add_message('unused-import', args=msg, node=stmt)
elif isinstance(stmt, astroid.From) and stmt.modname != '__future__':
if SPECIAL_OBJ.search(imported_name):
# Filter special objects (__doc__, __all__) etc.,
# because they can be imported for exporting.
continue
if imported_name == '*':
self.add_message('unused-wildcard-import',
args=name, node=stmt)
else:
if as_name is None:
msg = "%s imported from %s" % (imported_name, stmt.modname)
else:
fields = (imported_name, stmt.modname, as_name)
msg = "%s imported from %s as %s" % fields
self.add_message('unused-import', args=msg, node=stmt)
del self._to_consume
def visit_class(self, node):
......@@ -418,6 +514,10 @@ builtins. Remember that you should avoid to define new builtins when possible.'
klass = node.parent.frame()
if is_method and (klass.type == 'interface' or node.is_abstract()):
return
if is_method and isinstance(klass, astroid.Class):
confidence = INFERENCE if has_known_bases(klass) else INFERENCE_FAILURE
else:
confidence = HIGH
authorized_rgx = self.config.dummy_variables_rgx
called_overridden = False
argnames = node.argnames()
......@@ -428,7 +528,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
for nonlocal_stmt in node.nodes_of_class(astroid.Nonlocal):
nonlocal_names.update(set(nonlocal_stmt.names))
for name, stmts in not_consumed.iteritems():
for name, stmts in six.iteritems(not_consumed):
# ignore some special names specified by user configuration
if authorized_rgx.match(name):
continue
......@@ -468,10 +568,12 @@ builtins. Remember that you should avoid to define new builtins when possible.'
continue
if node.name in PYMETHODS and node.name not in ('__init__', '__new__'):
continue
# don't check callback arguments XXX should be configurable
if node.name.startswith('cb_') or node.name.endswith('_cb'):
# don't check callback arguments
if any(node.name.startswith(cb) or node.name.endswith(cb)
for cb in self.config.callbacks):
continue
self.add_message('unused-argument', args=name, node=stmt)
self.add_message('unused-argument', args=name, node=stmt,
confidence=confidence)
else:
if stmt.parent and isinstance(stmt.parent, astroid.Assign):
if name in nonlocal_names:
......@@ -503,25 +605,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
# same scope level assignment
break
else:
# global but no assignment
# Detect imports in the current frame, with the required
# name. Such imports can be considered assignments.
imports = frame.nodes_of_class((astroid.Import, astroid.From))
for import_node in imports:
found = False
for import_name, import_alias in import_node.names:
# If the import uses an alias, check only that.
# Otherwise, check only the import name.
if import_alias:
if import_alias == name:
found = True
break
elif import_name and import_name == name:
found = True
break
if found:
break
else:
if not _find_frame_imports(name, frame):
self.add_message('global-variable-not-assigned',
args=name, node=node)
default_message = False
......@@ -541,7 +625,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
if default_message:
self.add_message('global-statement', node=node)
def _check_late_binding_closure(self, node, assignment_node, scope_type):
def _check_late_binding_closure(self, node, assignment_node):
def _is_direct_lambda_call():
return (isinstance(node_scope.parent, astroid.CallFunc)
and node_scope.parent.func is node_scope)
......@@ -651,18 +735,34 @@ builtins. Remember that you should avoid to define new builtins when possible.'
base_scope_type == 'comprehension' and i == start_index-1):
# Detect if we are in a local class scope, as an assignment.
# For example, the following is fair game.
#
# class A:
# b = 1
# c = lambda b=b: b * b
class_assignment = (isinstance(frame, astroid.Class) and
name in frame.locals)
if not class_assignment:
#
# class B:
# tp = 1
# def func(self, arg: tp):
# ...
in_annotation = (
PY3K and isinstance(frame, astroid.Function)
and node.statement() is frame and
(node in frame.args.annotations
or node is frame.args.varargannotation
or node is frame.args.kwargannotation))
if in_annotation:
frame_locals = frame.parent.scope().locals
else:
frame_locals = frame.locals
if not ((isinstance(frame, astroid.Class) or in_annotation)
and name in frame_locals):
continue
# the name has already been consumed, only check it's not a loop
# variable used outside the loop
if name in consumed:
defnode = assign_parent(consumed[name][0])
self._check_late_binding_closure(node, defnode, scope_type)
self._check_late_binding_closure(node, defnode)
self._loopvar_name(node, name)
break
# mark the name as consumed if it's defined in this scope
......@@ -674,7 +774,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
# checks for use before assignment
defnode = assign_parent(to_consume[name][0])
if defnode is not None:
self._check_late_binding_closure(node, defnode, scope_type)
self._check_late_binding_closure(node, defnode)
defstmt = defnode.statement()
defframe = defstmt.frame()
maybee0601 = True
......@@ -696,10 +796,35 @@ builtins. Remember that you should avoid to define new builtins when possible.'
maybee0601 = not any(isinstance(child, astroid.Nonlocal)
and name in child.names
for child in defframe.get_children())
# Handle a couple of class scoping issues.
annotation_return = False
# The class reuses itself in the class scope.
recursive_klass = (frame is defframe and
defframe.parent_of(node) and
isinstance(defframe, astroid.Class) and
node.name == defframe.name)
if (self._to_consume[-1][-1] == 'lambda' and
isinstance(frame, astroid.Class)
and name in frame.locals):
maybee0601 = True
elif (isinstance(defframe, astroid.Class) and
isinstance(frame, astroid.Function)):
# Special rule for function return annotations,
# which uses the same name as the class where
# the function lives.
if (PY3K and node is frame.returns and
defframe.parent_of(frame.returns)):
maybee0601 = annotation_return = True
if (maybee0601 and defframe.name in defframe.locals and
defframe.locals[name][0].lineno < frame.lineno):
# Detect class assignments with the same
# name as the class. In this case, no warning
# should be raised.
maybee0601 = False
elif recursive_klass:
maybee0601 = True
else:
maybee0601 = maybee0601 and stmt.fromlineno <= defstmt.fromlineno
......@@ -708,8 +833,11 @@ builtins. Remember that you should avoid to define new builtins when possible.'
and not are_exclusive(stmt, defstmt, ('NameError',
'Exception',
'BaseException'))):
if defstmt is stmt and isinstance(node, (astroid.DelName,
astroid.AssName)):
if recursive_klass or (defstmt is stmt and
isinstance(node, (astroid.DelName,
astroid.AssName))):
self.add_message('undefined-variable', args=name, node=node)
elif annotation_return:
self.add_message('undefined-variable', args=name, node=node)
elif self._to_consume[-1][-1] != 'lambda':
# E0601 may *not* occurs in lambda scope.
......@@ -753,7 +881,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
for name, _ in node.names:
parts = name.split('.')
try:
module = node.infer_name_module(parts[0]).next()
module = next(node.infer_name_module(parts[0]))
except astroid.ResolveError:
continue
self._check_module_attrs(node, module, parts[1:])
......@@ -765,10 +893,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
level = getattr(node, 'level', None)
try:
module = node.root().import_module(name_parts[0], level=level)
except AstroidBuildingException:
return
except Exception, exc:
print 'Unhandled exception in VariablesChecker:', exc
except Exception: # pylint: disable=broad-except
return
module = self._check_module_attrs(node, module, name_parts[1:])
if not module:
......@@ -799,6 +924,11 @@ builtins. Remember that you should avoid to define new builtins when possible.'
"""
if infered is astroid.YES:
return
if (isinstance(infered.parent, astroid.Arguments) and
isinstance(node.value, astroid.Name) and
node.value.name == infered.parent.vararg):
# Variable-length argument, we can't determine the length.
return
if isinstance(infered, (astroid.Tuple, astroid.List)):
# attempt to check unpacking is properly balanced
values = infered.itered()
......@@ -841,7 +971,7 @@ builtins. Remember that you should avoid to define new builtins when possible.'
module = None
break
try:
module = module.getattr(name)[0].infer().next()
module = next(module.getattr(name)[0].infer())
if module is astroid.YES:
return None
except astroid.NotFoundError:
......
......@@ -17,6 +17,7 @@
* pylint.d (PYLINTHOME)
"""
from __future__ import with_statement
from __future__ import print_function
import pickle
import os
......@@ -52,7 +53,7 @@ def load_results(base):
try:
with open(data_file, _PICK_LOAD) as stream:
return pickle.load(stream)
except:
except Exception: # pylint: disable=broad-except
return {}
if sys.version_info < (3, 0):
......@@ -66,13 +67,13 @@ def save_results(results, base):
try:
os.mkdir(PYLINT_HOME)
except OSError:
print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME
print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
data_file = get_pdata_path(base, 1)
try:
with open(data_file, _PICK_DUMP) as stream:
pickle.dump(results, stream)
except (IOError, OSError), ex:
print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex)
except (IOError, OSError) as ex:
print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
# location of the configuration file ##########################################
......
......@@ -45,6 +45,7 @@ For example:
You may also use py_run to run pylint with desired options and get back (or not)
its output.
"""
from __future__ import print_function
import sys, os
import os.path as osp
......@@ -102,7 +103,7 @@ def lint(filename, options=None):
parts = line.split(":")
if parts and parts[0] == child_path:
line = ":".join([filename] + parts[1:])
print line,
print(line, end=' ')
process.wait()
return process.returncode
......@@ -113,18 +114,18 @@ def py_run(command_options='', return_std=False, stdout=None, stderr=None,
"""Run pylint from python
``command_options`` is a string containing ``pylint`` command line options;
``return_std`` (boolean) indicates return of created standart output
``return_std`` (boolean) indicates return of created standard output
and error (see below);
``stdout`` and ``stderr`` are 'file-like' objects in which standart output
``stdout`` and ``stderr`` are 'file-like' objects in which standard output
could be written.
Calling agent is responsible for stdout/err management (creation, close).
Default standart output and error are those from sys,
Default standard output and error are those from sys,
or standalone ones (``subprocess.PIPE``) are used
if they are not set and ``return_std``.
If ``return_std`` is set to ``True``, this function returns a 2-uple
containing standart output and error related to created process,
containing standard output and error related to created process,
as follows: ``(stdout, stderr)``.
A trivial usage could be as follows:
......@@ -133,14 +134,14 @@ def py_run(command_options='', return_std=False, stdout=None, stderr=None,
pylint 0.18.1,
...
To silently run Pylint on a module, and get its standart output and error:
To silently run Pylint on a module, and get its standard output and error:
>>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True)
"""
# Create command line to call pylint
if os.name == 'nt':
script += '.bat'
command_line = script + ' ' + command_options
# Providing standart output and/or error if not set
# Providing standard output and/or error if not set
if stdout is None:
if return_std:
stdout = PIPE
......@@ -155,17 +156,17 @@ def py_run(command_options='', return_std=False, stdout=None, stderr=None,
p = Popen(command_line, shell=True, stdout=stdout, stderr=stderr,
env=_get_env(), universal_newlines=True)
p.wait()
# Return standart output and error
# Return standard output and error
if return_std:
return (p.stdout, p.stderr)
def Run():
if len(sys.argv) == 1:
print "Usage: %s <filename> [options]" % sys.argv[0]
print("Usage: %s <filename> [options]" % sys.argv[0])
sys.exit(1)
elif not osp.exists(sys.argv[1]):
print "%s does not exist" % sys.argv[1]
print("%s does not exist" % sys.argv[1])
sys.exit(1)
else:
sys.exit(lint(sys.argv[1], sys.argv[2:]))
......
......@@ -14,17 +14,24 @@
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Tkinker gui for pylint"""
from __future__ import print_function
import os
import sys
import re
import Queue
from threading import Thread
from Tkinter import (Tk, Frame, Listbox, Entry, Label, Button, Scrollbar,
Checkbutton, Radiobutton, IntVar, StringVar)
from Tkinter import (TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W,
HORIZONTAL, DISABLED, NORMAL, W)
from tkFileDialog import askopenfilename, askdirectory
import six
from six.moves.tkinter import (
Tk, Frame, Listbox, Entry, Label, Button, Scrollbar,
Checkbutton, Radiobutton, IntVar, StringVar,
TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W,
HORIZONTAL, DISABLED, NORMAL, W,
)
from six.moves.tkinter_tkfiledialog import (
askopenfilename, askdirectory,
)
import pylint.lint
from pylint.reporters.guireporter import GUIReporter
......@@ -86,7 +93,7 @@ class BasicStream(object):
"""finalize what the contents of the dict should look like before output"""
for item in self.outdict:
num_empty = self.outdict[item].count('')
for _ in xrange(num_empty):
for _ in range(num_empty):
self.outdict[item].remove('')
if self.outdict[item]:
self.outdict[item].pop(0)
......@@ -97,7 +104,7 @@ class BasicStream(object):
self.gui.tabs = self.outdict
try:
self.gui.rating.set(self.outdict['Global evaluation'][0])
except:
except KeyError:
self.gui.rating.set('Error')
self.gui.refresh_results_window()
......@@ -118,7 +125,7 @@ class LintGui(object):
#reporter
self.reporter = None
#message queue for output from reporter
self.msg_queue = Queue.Queue()
self.msg_queue = six.moves.queue.Queue()
self.msgs = []
self.visible_msgs = []
self.filenames = []
......@@ -321,7 +328,7 @@ class LintGui(object):
self.txt_module.focus_set()
def select_recent_file(self, event):
def select_recent_file(self, event): # pylint: disable=unused-argument
"""adds the selected file in the history listbox to the Module box"""
if not self.showhistory.size():
return
......@@ -352,7 +359,7 @@ class LintGui(object):
try:
for res in self.tabs[self.box.get()]:
self.results.insert(END, res)
except:
except KeyError:
pass
def process_incoming(self):
......@@ -375,7 +382,7 @@ class LintGui(object):
fg_color = COLORS.get(msg_str[:3], 'black')
self.lb_messages.itemconfigure(END, fg=fg_color)
except Queue.Empty:
except six.moves.queue.Empty:
pass
return True
......@@ -395,7 +402,7 @@ class LintGui(object):
"""quit the application"""
self.root.quit()
def halt(self):
def halt(self): # pylint: disable=no-self-use
"""program halt placeholder"""
return
......@@ -476,7 +483,7 @@ class LintGui(object):
self.root.configure(cursor='')
def show_sourcefile(self, event=None):
def show_sourcefile(self, event=None): # pylint: disable=unused-argument
selected = self.lb_messages.curselection()
if not selected:
return
......@@ -503,7 +510,7 @@ def lint_thread(module, reporter, gui):
def Run(args):
"""launch pylint gui from args"""
if args:
print 'USAGE: pylint-gui\n launch a simple pylint gui using Tk'
print('USAGE: pylint-gui\n launch a simple pylint gui using Tk')
sys.exit(1)
gui = LintGui()
gui.mainloop()
......
......@@ -10,10 +10,22 @@
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Interfaces for PyLint objects"""
"""Interfaces for Pylint objects"""
from collections import namedtuple
from logilab.common.interface import Interface
Confidence = namedtuple('Confidence', ['name', 'description'])
# Warning Certainties
HIGH = Confidence('HIGH', 'No false positive possible.')
INFERENCE = Confidence('INFERENCE', 'Warning based on inference result.')
INFERENCE_FAILURE = Confidence('INFERENCE_FAILURE',
'Warning based on inference with failures.')
UNDEFINED = Confidence('UNDEFINED',
'Warning without any associated confidence level.')
CONFIDENCE_LEVELS = [HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
class IChecker(Interface):
"""This is an base interface, not designed to be used elsewhere than for
......
......@@ -25,6 +25,7 @@
Display help messages about given message identifiers and exit.
"""
from __future__ import print_function
# import this first to avoid builtin namespace pollution
from pylint.checkers import utils #pylint: disable=unused-import
......@@ -32,16 +33,24 @@ from pylint.checkers import utils #pylint: disable=unused-import
import sys
import os
import tokenize
from collections import defaultdict
from contextlib import contextmanager
from operator import attrgetter
from warnings import warn
from logilab.common.configuration import UnsupportedAction, OptionsManagerMixIn
from itertools import chain
try:
import multiprocessing
except ImportError:
multiprocessing = None
import six
from logilab.common.configuration import (
UnsupportedAction, OptionsManagerMixIn)
from logilab.common.optik_ext import check_csv
from logilab.common.interface import implements
from logilab.common.textutils import splitstrip, unquote
from logilab.common.ureports import Table, Text, Section
from logilab.common.__pkginfo__ import version as common_version
from astroid import MANAGER, AstroidBuildingException
from astroid.__pkginfo__ import version as astroid_version
from astroid.modutils import load_module_from_name, get_module_part
......@@ -50,21 +59,37 @@ from pylint.utils import (
MSG_TYPES, OPTION_RGX,
PyLintASTWalker, UnknownMessage, MessagesHandlerMixIn, ReportsHandlerMixIn,
MessagesStore, FileState, EmptyReport,
expand_modules, tokenize_module)
from pylint.interfaces import IRawChecker, ITokenChecker, IAstroidChecker
expand_modules, tokenize_module, Message)
from pylint.interfaces import IRawChecker, ITokenChecker, IAstroidChecker, CONFIDENCE_LEVELS
from pylint.checkers import (BaseTokenChecker,
table_lines_from_stats,
initialize as checkers_initialize)
from pylint.reporters import initialize as reporters_initialize
from pylint.reporters import initialize as reporters_initialize, CollectingReporter
from pylint import config
from pylint.__pkginfo__ import version
def _get_new_args(message):
location = (
message.abspath,
message.path,
message.module,
message.obj,
message.line,
message.column,
)
return (
message.msg_id,
message.symbol,
location,
message.msg,
message.confidence,
)
def _get_python_path(filepath):
dirname = os.path.dirname(os.path.realpath(
os.path.expanduser(filepath)))
dirname = os.path.realpath(os.path.expanduser(filepath))
if not os.path.isdir(dirname):
dirname = os.path.dirname(dirname)
while True:
if not os.path.exists(os.path.join(dirname, "__init__.py")):
return dirname
......@@ -74,6 +99,20 @@ def _get_python_path(filepath):
return os.getcwd()
def _merge_stats(stats):
merged = {}
for stat in stats:
for key, item in six.iteritems(stat):
if key not in merged:
merged[key] = item
else:
if isinstance(item, dict):
merged[key].update(item)
else:
merged[key] = merged[key] + item
return merged
# Python Linter class #########################################################
MSGS = {
......@@ -147,12 +186,63 @@ MSGS = {
def _deprecated_option(shortname, opt_type):
def _warn_deprecated(option, optname, *args):
def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument
sys.stderr.write('Warning: option %s is deprecated and ignored.\n' % (optname,))
return {'short': shortname, 'help': 'DEPRECATED', 'hide': True,
'type': opt_type, 'action': 'callback', 'callback': _warn_deprecated}
if multiprocessing is not None:
class ChildLinter(multiprocessing.Process): # pylint: disable=no-member
def run(self):
tasks_queue, results_queue, config = self._args # pylint: disable=no-member
for file_or_module in iter(tasks_queue.get, 'STOP'):
result = self._run_linter(config, file_or_module[0])
try:
results_queue.put(result)
except Exception as ex:
print("internal error with sending report for module %s" % file_or_module, file=sys.stderr)
print(ex, file=sys.stderr)
results_queue.put({})
def _run_linter(self, config, file_or_module):
linter = PyLinter()
# Register standard checkers.
linter.load_default_plugins()
# Load command line plugins.
# TODO linter.load_plugin_modules(self._plugins)
linter.disable('pointless-except')
linter.disable('suppressed-message')
linter.disable('useless-suppression')
# TODO(cpopa): the sub-linters will not know all the options
# because they are not available here, as they are patches to
# PyLinter options. The following is just a hack to handle
# just a part of the options available in the Run class.
if 'disable_msg' in config:
# Disable everything again. We don't have access
# to the original linter though.
for msgid in config['disable_msg']:
linter.disable(msgid)
for key in set(config) - set(dict(linter.options)):
del config[key]
config['jobs'] = 1 # Child does not parallelize any further.
linter.load_configuration(**config)
linter.set_reporter(CollectingReporter())
# Run the checks.
linter.check(file_or_module)
msgs = [_get_new_args(m) for m in linter.reporter.messages]
return (file_or_module, linter.file_state.base_name, linter.current_name,
msgs, linter.stats, linter.msg_status)
class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
BaseTokenChecker):
"""lint Python modules using external checkers.
......@@ -174,7 +264,6 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
priority = 0
level = 0
msgs = MSGS
may_be_disabled = False
@staticmethod
def make_options():
......@@ -238,6 +327,15 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
'help' : 'Add a comment according to your evaluation note. '
'This is used by the global evaluation report (RP0004).'}),
('confidence',
{'type' : 'multiple_choice', 'metavar': '<levels>',
'default': '',
'choices': [c.name for c in CONFIDENCE_LEVELS],
'group': 'Messages control',
'help' : 'Only show warnings with the listed confidence levels.'
' Leave empty to show all. Valid levels: %s' % (
', '.join(c.name for c in CONFIDENCE_LEVELS),)}),
('enable',
{'type' : 'csv', 'metavar': '<msg ids>',
'short': 'e',
......@@ -275,6 +373,27 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
('include-ids', _deprecated_option('i', 'yn')),
('symbols', _deprecated_option('s', 'yn')),
('jobs',
{'type' : 'int', 'metavar': '<n-processes>',
'short': 'j',
'default': 1,
'help' : '''Use multiple processes to speed up Pylint.''',
}),
('unsafe-load-any-extension',
{'type': 'yn', 'metavar': '<yn>', 'default': False, 'hide': True,
'help': ('Allow loading of arbitrary C extensions. Extensions'
' are imported into the active Python interpreter and'
' may run arbitrary code.')}),
('extension-pkg-whitelist',
{'type': 'csv', 'metavar': '<pkg[,pkg]>', 'default': [],
'help': ('A comma-separated list of package or module names'
' from where C extensions may be loaded. Extensions are'
' loading into the active Python interpreter and may run'
' arbitrary code')}
),
)
option_groups = (
......@@ -291,7 +410,8 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
self.reporter = None
self._reporter_name = None
self._reporters = {}
self._checkers = {}
self._checkers = defaultdict(list)
self._pragma_lineno = {}
self._ignore_file = False
# visit variables
self.file_state = FileState()
......@@ -338,17 +458,6 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
if not self.reporter:
self._load_reporter()
def prepare_import_path(self, args):
"""Prepare sys.path for running the linter checks."""
if len(args) == 1:
sys.path.insert(0, _get_python_path(args[0]))
else:
sys.path.insert(0, os.getcwd())
def cleanup_import_path(self):
"""Revert any changes made to sys.path in prepare_import_path."""
sys.path.pop(0)
def load_plugin_modules(self, modnames):
"""take a list of module names which are pylint plugins and load
and register them
......@@ -404,12 +513,24 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
try:
BaseTokenChecker.set_option(self, optname, value, action, optdict)
except UnsupportedAction:
print >> sys.stderr, 'option %s can\'t be read from config file' % \
optname
print('option %s can\'t be read from config file' % \
optname, file=sys.stderr)
def register_reporter(self, reporter_class):
self._reporters[reporter_class.name] = reporter_class
def report_order(self):
reports = sorted(self._reports, key=lambda x: getattr(x, 'name', ''))
try:
# Remove the current reporter and add it
# at the end of the list.
reports.pop(reports.index(self))
except ValueError:
pass
else:
reports.append(self)
return reports
# checkers manipulation methods ############################################
def register_checker(self, checker):
......@@ -418,7 +539,7 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
checker is an object implementing IRawChecker or / and IAstroidChecker
"""
assert checker.priority <= 0, 'checker priority can\'t be >= 0'
self._checkers.setdefault(checker.name, []).append(checker)
self._checkers[checker.name].append(checker)
for r_id, r_title, r_cb in checker.reports:
self.register_report(r_id, r_title, r_cb, checker)
self.register_options_provider(checker)
......@@ -426,8 +547,13 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
self.msgs_store.register_messages(checker)
checker.load_defaults()
# Register the checker, but disable all of its messages.
# TODO(cpopa): we should have a better API for this.
if not getattr(checker, 'enabled', True):
self.disable(checker.name)
def disable_noerror_messages(self):
for msgcat, msgids in self.msgs_store._msgs_by_category.iteritems():
for msgcat, msgids in six.iteritems(self.msgs_store._msgs_by_category):
if msgcat == 'E':
for msgid in msgids:
self.enable(msgid)
......@@ -437,7 +563,7 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
def disable_reporters(self):
"""disable all reporters"""
for reporters in self._reports.itervalues():
for reporters in six.itervalues(self._reports):
for report_id, _title, _cb in reporters:
self.disable_report(report_id)
......@@ -456,6 +582,7 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
"""process tokens from the current module to search for module/block
level options
"""
control_pragmas = {'disable', 'enable'}
for (tok_type, content, start, _, _) in tokens:
if tok_type != tokenize.COMMENT:
continue
......@@ -485,6 +612,10 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
# found a "(dis|en)able-msg" pragma deprecated suppresssion
self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', '')))
for msgid in splitstrip(value):
# Add the line where a control pragma was encountered.
if opt in control_pragmas:
self._pragma_lineno[msgid] = start[0]
try:
if (opt, msgid) == ('disable', 'all'):
self.add_message('deprecated-pragma', line=start[0], args=('disable=all', 'skip-file'))
......@@ -502,7 +633,7 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
def get_checkers(self):
"""return all available checkers as a list"""
return [self] + [c for checkers in self._checkers.itervalues()
return [self] + [c for checkers in six.itervalues(self._checkers)
for c in checkers if c is not self]
def prepare_checkers(self):
......@@ -523,7 +654,7 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
reverse=True)
return neededcheckers
def should_analyze_file(self, modname, path): # pylint: disable=unused-argument
def should_analyze_file(self, modname, path): # pylint: disable=unused-argument, no-self-use
"""Returns whether or not a module should be checked.
This implementation returns True for all python source file, indicating
......@@ -551,6 +682,99 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
if not isinstance(files_or_modules, (list, tuple)):
files_or_modules = (files_or_modules,)
if self.config.jobs == 1:
self._do_check(files_or_modules)
else:
# Hack that permits running pylint, on Windows, with -m switch
# and with --jobs, as in 'py -2 -m pylint .. --jobs'.
# For more details why this is needed,
# see Python issue http://bugs.python.org/issue10845.
mock_main = six.PY2 and __name__ != '__main__' # -m switch
if mock_main:
sys.modules['__main__'] = sys.modules[__name__]
try:
self._parallel_check(files_or_modules)
finally:
if mock_main:
sys.modules.pop('__main__')
def _parallel_task(self, files_or_modules):
# Prepare configuration for child linters.
config = vars(self.config)
childs = []
manager = multiprocessing.Manager() # pylint: disable=no-member
tasks_queue = manager.Queue() # pylint: disable=no-member
results_queue = manager.Queue() # pylint: disable=no-member
for _ in range(self.config.jobs):
cl = ChildLinter(args=(tasks_queue, results_queue, config))
cl.start() # pylint: disable=no-member
childs.append(cl)
# send files to child linters
for files_or_module in files_or_modules:
tasks_queue.put([files_or_module])
# collect results from child linters
failed = False
for _ in files_or_modules:
try:
result = results_queue.get()
except Exception as ex:
print("internal error while receiving results from child linter",
file=sys.stderr)
print(ex, file=sys.stderr)
failed = True
break
yield result
# Stop child linters and wait for their completion.
for _ in range(self.config.jobs):
tasks_queue.put('STOP')
for cl in childs:
cl.join()
if failed:
print("Error occured, stopping the linter.", file=sys.stderr)
sys.exit(32)
def _parallel_check(self, files_or_modules):
# Reset stats.
self.open()
all_stats = []
for result in self._parallel_task(files_or_modules):
(
file_or_module,
self.file_state.base_name,
module,
messages,
stats,
msg_status
) = result
if file_or_module == files_or_modules[-1]:
last_module = module
for msg in messages:
msg = Message(*msg)
self.set_current_module(module)
self.reporter.handle_message(msg)
all_stats.append(stats)
self.msg_status |= msg_status
self.stats = _merge_stats(chain(all_stats, [self.stats]))
self.current_name = last_module
# Insert stats data to local checkers.
for checker in self.get_checkers():
if checker is not self:
checker.stats = self.stats
def _do_check(self, files_or_modules):
walker = PyLintASTWalker(self)
checkers = self.prepare_checkers()
tokencheckers = [c for c in checkers if implements(c, ITokenChecker)
......@@ -587,7 +811,6 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
for msgid, line, args in self.file_state.iter_spurious_suppression_messages(self.msgs_store):
self.add_message(msgid, line, None, args)
# notify global end
self.set_current_module('')
self.stats['statement'] = walker.nbstatements
checkers.reverse()
for checker in checkers:
......@@ -617,28 +840,42 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
self.current_file = filepath or modname
self.stats['by_module'][modname] = {}
self.stats['by_module'][modname]['statement'] = 0
for msg_cat in MSG_TYPES.itervalues():
for msg_cat in six.itervalues(MSG_TYPES):
self.stats['by_module'][modname][msg_cat] = 0
def get_ast(self, filepath, modname):
"""return a ast(roid) representation for a module"""
try:
return MANAGER.ast_from_file(filepath, modname, source=True)
except SyntaxError, ex:
except SyntaxError as ex:
self.add_message('syntax-error', line=ex.lineno, args=ex.msg)
except AstroidBuildingException, ex:
except AstroidBuildingException as ex:
self.add_message('parse-error', args=ex)
except Exception, ex:
except Exception as ex: # pylint: disable=broad-except
import traceback
traceback.print_exc()
self.add_message('astroid-error', args=(ex.__class__, ex))
def check_astroid_module(self, astroid, walker, rawcheckers, tokencheckers):
"""check a module from its astroid representation, real work"""
try:
return self._check_astroid_module(astroid, walker,
rawcheckers, tokencheckers)
finally:
# Close file_stream, if opened, to avoid to open many files.
if astroid.file_stream:
astroid.file_stream.close()
# TODO(cpopa): This is an implementation detail, but it will
# be moved in astroid at some point.
# We invalidate the cached property, to let the others
# modules which relies on this one to get a new file stream.
del astroid.file_stream
def _check_astroid_module(self, astroid, walker, rawcheckers, tokencheckers):
# call raw checkers if possible
try:
tokens = tokenize_module(astroid)
except tokenize.TokenError, ex:
except tokenize.TokenError as ex:
self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0])
return
......@@ -669,10 +906,12 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
self.stats = {'by_module' : {},
'by_msg' : {},
}
for msg_cat in MSG_TYPES.itervalues():
MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
MANAGER.extension_package_whitelist.update(self.config.extension_pkg_whitelist)
for msg_cat in six.itervalues(MSG_TYPES):
self.stats[msg_cat] = 0
def close(self):
def generate_reports(self):
"""close the whole package /module, it's time to make reports !
if persistent run, pickle results for later comparison
......@@ -695,6 +934,11 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
if self.config.persistent:
config.save_results(self.stats, self.file_state.base_name)
else:
if self.config.output_format == 'html':
# No output will be emitted for the html
# reporter if the file doesn't exist, so emit
# the results here.
self.reporter.display_results(Section())
self.reporter.on_close(self.stats, {})
# specific reports ########################################################
......@@ -708,8 +952,8 @@ class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
# get a global note for the code
evaluation = self.config.evaluation
try:
note = eval(evaluation, {}, self.stats)
except Exception, ex:
note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
except Exception as ex: # pylint: disable=broad-except
msg = 'An exception occurred while rating: %s' % ex
else:
stats['global_note'] = note
......@@ -737,7 +981,7 @@ def report_messages_stats(sect, stats, _):
# don't print this report when we didn't detected any errors
raise EmptyReport()
in_order = sorted([(value, msg_id)
for msg_id, value in stats['by_msg'].iteritems()
for msg_id, value in six.iteritems(stats['by_msg'])
if not msg_id.startswith('I')])
in_order.reverse()
lines = ('message id', 'occurrences')
......@@ -750,18 +994,18 @@ def report_messages_by_module_stats(sect, stats, _):
if len(stats['by_module']) == 1:
# don't print this report when we are analysing a single module
raise EmptyReport()
by_mod = {}
by_mod = defaultdict(dict)
for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'):
total = stats[m_type]
for module in stats['by_module'].iterkeys():
for module in six.iterkeys(stats['by_module']):
mod_total = stats['by_module'][module][m_type]
if total == 0:
percent = 0
else:
percent = float((mod_total)*100) / total
by_mod.setdefault(module, {})[m_type] = percent
by_mod[module][m_type] = percent
sorted_result = []
for module, mod_info in by_mod.iteritems():
for module, mod_info in six.iteritems(by_mod):
sorted_result.append((mod_info['error'],
mod_info['warning'],
mod_info['refactor'],
......@@ -771,8 +1015,9 @@ def report_messages_by_module_stats(sect, stats, _):
sorted_result.reverse()
lines = ['module', 'error', 'warning', 'refactor', 'convention']
for line in sorted_result:
if line[0] == 0 and line[1] == 0:
break
# Don't report clean modules.
if all(entry == 0 for entry in line[:-1]):
continue
lines.append(line[-1])
for val in line[:-1]:
lines.append('%.2f' % val)
......@@ -783,12 +1028,6 @@ def report_messages_by_module_stats(sect, stats, _):
# utilities ###################################################################
# this may help to import modules using gettext
# XXX syt, actually needed since we don't import code?
from logilab.common.compat import builtins
builtins._ = str
class ArgumentPreprocessingError(Exception):
"""Raised if an error occurs during argument preprocessing."""
......@@ -828,6 +1067,31 @@ def preprocess_options(args, search_for):
else:
i += 1
@contextmanager
def fix_import_path(args):
"""Prepare sys.path for running the linter checks.
Within this context, each of the given arguments is importable.
Paths are added to sys.path in corresponding order to the arguments.
We avoid adding duplicate directories to sys.path.
`sys.path` is reset to its original value upon exitign this context.
"""
orig = list(sys.path)
changes = []
for arg in args:
path = _get_python_path(arg)
if path in changes:
continue
else:
changes.append(path)
sys.path[:] = changes + sys.path
try:
yield
finally:
sys.path[:] = orig
class Run(object):
"""helper class to use as main for pylint :
......@@ -849,8 +1113,8 @@ group are mutually exclusive.'),
'rcfile': (self.cb_set_rcfile, True),
'load-plugins': (self.cb_add_plugins, True),
})
except ArgumentPreprocessingError, ex:
print >> sys.stderr, ex
except ArgumentPreprocessingError as ex:
print(ex, file=sys.stderr)
sys.exit(32)
self.linter = linter = self.LinterClass((
......@@ -879,6 +1143,12 @@ group are mutually exclusive.'),
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's messages."}),
('list-conf-levels',
{'action' : 'callback',
'callback' : cb_list_confidence_levels,
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's messages."}),
('full-documentation',
{'action' : 'callback', 'metavar': '<msg-id>',
'callback' : self.cb_full_documentation,
......@@ -905,6 +1175,12 @@ group are mutually exclusive.'),
'disabled and for others, only the ERROR messages are '
'displayed, and no reports are done by default'''}),
('py3k',
{'action' : 'callback', 'callback' : self.cb_python3_porting_mode,
'help' : 'In Python 3 porting mode, all checkers will be '
'disabled and only messages emitted by the porting '
'checker will be displayed'}),
('profile',
{'type' : 'yn', 'metavar' : '<y_or_n>',
'default': False, 'hide': True,
......@@ -968,28 +1244,42 @@ group are mutually exclusive.'),
linter.set_reporter(reporter)
try:
args = linter.load_command_line_configuration(args)
except SystemExit, exc:
except SystemExit as exc:
if exc.code == 2: # bad options
exc.code = 32
raise
if not args:
print linter.help()
print(linter.help())
sys.exit(32)
if linter.config.jobs < 0:
print("Jobs number (%d) should be greater than 0"
% linter.config.jobs, file=sys.stderr)
sys.exit(32)
if linter.config.jobs > 1 or linter.config.jobs == 0:
if multiprocessing is None:
print("Multiprocessing library is missing, "
"fallback to single process", file=sys.stderr)
linter.set_option("jobs", 1)
else:
if linter.config.jobs == 0:
linter.config.jobs = multiprocessing.cpu_count()
# insert current working directory to the python path to have a correct
# behaviour
linter.prepare_import_path(args)
if self.linter.config.profile:
print >> sys.stderr, '** profiled run'
import cProfile, pstats
cProfile.runctx('linter.check(%r)' % args, globals(), locals(),
'stones.prof')
data = pstats.Stats('stones.prof')
data.strip_dirs()
data.sort_stats('time', 'calls')
data.print_stats(30)
else:
linter.check(args)
linter.cleanup_import_path()
with fix_import_path(args):
if self.linter.config.profile:
print('** profiled run', file=sys.stderr)
import cProfile, pstats
cProfile.runctx('linter.check(%r)' % args, globals(), locals(),
'stones.prof')
data = pstats.Stats('stones.prof')
data.strip_dirs()
data.sort_stats('time', 'calls')
data.print_stats(30)
else:
linter.check(args)
linter.generate_reports()
if exit:
sys.exit(self.linter.msg_status)
......@@ -1037,9 +1327,20 @@ group are mutually exclusive.'),
self.linter.msgs_store.list_messages()
sys.exit(0)
def cb_python3_porting_mode(self, *args, **kwargs):
"""Activate only the python3 porting checker."""
self.linter.disable('all')
self.linter.enable('python3')
def cb_list_confidence_levels(option, optname, value, parser):
for level in CONFIDENCE_LEVELS:
print('%-18s: %s' % level)
sys.exit(0)
def cb_init_hook(optname, value):
"""exec arbitrary code to set sys.path for instance"""
exec value
exec(value) # pylint: disable=exec-used
if __name__ == '__main__':
......
......@@ -99,8 +99,8 @@ class DiaDefGenerator(object):
"""return associated nodes of a class node"""
if level == 0:
return
for ass_nodes in klass_node.instance_attrs_type.values() + \
klass_node.locals_type.values():
for ass_nodes in list(klass_node.instance_attrs_type.values()) + \
list(klass_node.locals_type.values()):
for ass_node in ass_nodes:
if isinstance(ass_node, astroid.Instance):
ass_node = ass_node._proxied
......@@ -145,7 +145,7 @@ class DefaultDiadefGenerator(LocalsVisitor, DiaDefGenerator):
self.pkgdiagram = None
self.classdiagram = ClassDiagram('classes %s' % node.name, mode)
def leave_project(self, node):
def leave_project(self, node): # pylint: disable=unused-argument
"""leave the astroid.Project node
return the generated diagram definition
......@@ -198,7 +198,7 @@ class ClassDiadefGenerator(DiaDefGenerator):
else:
module = project.modules[0]
klass = klass.split('.')[-1]
klass = module.ilookup(klass).next()
klass = next(module.ilookup(klass))
anc_level, ass_level = self._get_levels()
self.extract_classes(klass, anc_level, ass_level)
......
......@@ -77,8 +77,8 @@ class ClassDiagram(Figure, FilterMixIn):
def get_attrs(self, node):
"""return visible attributes, possibly with class name"""
attrs = []
for node_name, ass_nodes in node.instance_attrs_type.items() + \
node.locals_type.items():
for node_name, ass_nodes in list(node.instance_attrs_type.items()) + \
list(node.locals_type.items()):
if not self.show_attr(node_name):
continue
names = self.class_names(ass_nodes)
......@@ -170,8 +170,8 @@ class ClassDiagram(Figure, FilterMixIn):
except KeyError:
continue
# associations link
for name, values in node.instance_attrs_type.items() + \
node.locals_type.items():
for name, values in list(node.instance_attrs_type.items()) + \
list(node.locals_type.items()):
for value in values:
if value is astroid.YES:
continue
......
......@@ -18,6 +18,7 @@
create UML diagrams for classes and modules in <packages>
"""
from __future__ import print_function
import sys, os
from logilab.common.configuration import ConfigurationMixIn
......@@ -99,7 +100,7 @@ class Run(ConfigurationMixIn):
def run(self, args):
"""checking arguments and run project"""
if not args:
print self.help()
print(self.help())
return 1
# insert current working directory to the python path to recognize
# dependencies to local modules even if cwd is not in the PYTHONPATH
......
......@@ -16,6 +16,7 @@
"""
generic classes/functions for pyreverse core/extensions
"""
from __future__ import print_function
import sys
import re
......@@ -118,8 +119,8 @@ class FilterMixIn(object):
for nummod in mode.split('+'):
try:
__mode += MODES[nummod]
except KeyError, ex:
print >> sys.stderr, 'Unknown filter mode %s' % ex
except KeyError as ex:
print('Unknown filter mode %s' % ex, file=sys.stderr)
self.__mode = __mode
......@@ -127,5 +128,5 @@ class FilterMixIn(object):
"""return true if the node should be treated
"""
visibility = get_visibility(getattr(node, 'name', node))
return not (self.__mode & VIS_MOD[visibility])
return not self.__mode & VIS_MOD[visibility]
......@@ -12,12 +12,12 @@
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""utilities methods and classes for reporters"""
from __future__ import print_function
import sys
import locale
import os
from pylint.utils import MSG_TYPES
from pylint import utils
......@@ -25,13 +25,9 @@ CMPS = ['=', '-', '+']
# py3k has no more cmp builtin
if sys.version_info >= (3, 0):
def cmp(a, b):
def cmp(a, b): # pylint: disable=redefined-builtin
return (a > b) - (a < b)
if sys.version_info < (2, 6):
import stringformat
stringformat.init(True)
def diff_string(old, new):
"""given a old and new int value, return a string representing the
difference
......@@ -41,27 +37,6 @@ def diff_string(old, new):
return diff_str
class Message(object):
"""This class represent a message to be issued by the reporters"""
def __init__(self, reporter, msg_id, location, msg):
self.msg_id = msg_id
self.abspath, self.module, self.obj, self.line, self.column = location
self.path = self.abspath.replace(reporter.path_strip_prefix, '')
self.msg = msg
self.C = msg_id[0]
self.category = MSG_TYPES[msg_id[0]]
self.symbol = reporter.linter.msgs_store.check_message_id(msg_id).symbol
def format(self, template):
"""Format the message according to the given template.
The template format is the one of the format method :
cf. http://docs.python.org/2/library/string.html#formatstrings
"""
return template.format(**(self.__dict__))
class BaseReporter(object):
"""base class for reporters
......@@ -82,9 +57,16 @@ class BaseReporter(object):
# Build the path prefix to strip to get relative paths
self.path_strip_prefix = os.getcwd() + os.sep
def handle_message(self, msg):
"""Handle a new message triggered on the current file.
Invokes the legacy add_message API by default."""
self.add_message(
msg.msg_id, (msg.abspath, msg.module, msg.obj, msg.line, msg.column),
msg.msg)
def add_message(self, msg_id, location, msg):
"""Client API to send a message"""
# Shall we store the message objects somewhere, do some validity checking ?
"""Deprecated, do not use."""
raise NotImplementedError
def set_output(self, output=None):
......@@ -109,7 +91,7 @@ class BaseReporter(object):
def writeln(self, string=''):
"""write a line in the output buffer"""
print >> self.out, self.encode(string)
print(self.encode(string), file=self.out)
def display_results(self, layout):
"""display results encapsulated in the layout tree"""
......@@ -133,6 +115,19 @@ class BaseReporter(object):
pass
class CollectingReporter(BaseReporter):
"""collects messages"""
name = 'collector'
def __init__(self):
BaseReporter.__init__(self)
self.messages = []
def handle_message(self, msg):
self.messages.append(msg)
def initialize(linter):
"""initialize linter with reporters in this package """
utils.register_plugins(linter, __path__[0])
......@@ -3,7 +3,7 @@
import sys
from pylint.interfaces import IReporter
from pylint.reporters import BaseReporter, Message
from pylint.reporters import BaseReporter
from logilab.common.ureports import TextWriter
......@@ -18,10 +18,9 @@ class GUIReporter(BaseReporter):
BaseReporter.__init__(self, output)
self.gui = gui
def add_message(self, msg_id, location, msg):
def handle_message(self, msg):
"""manage message of different type and in the context of path"""
message = Message(self, msg_id, location, msg)
self.gui.msg_queue.put(message)
self.gui.msg_queue.put(msg)
def _display(self, layout):
"""launch layouts display"""
......
......@@ -19,7 +19,7 @@ from cgi import escape
from logilab.common.ureports import HTMLWriter, Section, Table
from pylint.interfaces import IReporter
from pylint.reporters import BaseReporter, Message
from pylint.reporters import BaseReporter
class HTMLReporter(BaseReporter):
......@@ -33,9 +33,8 @@ class HTMLReporter(BaseReporter):
BaseReporter.__init__(self, output)
self.msgs = []
def add_message(self, msg_id, location, msg):
def handle_message(self, msg):
"""manage message of different type and in the context of path"""
msg = Message(self, msg_id, location, msg)
self.msgs += (msg.category, msg.module, msg.obj,
str(msg.line), str(msg.column), escape(msg.msg))
......
......@@ -16,6 +16,7 @@
:text: the default one grouping messages by module
:colorized: an ANSI colorized text reporter
"""
from __future__ import print_function
import warnings
......@@ -23,7 +24,8 @@ from logilab.common.ureports import TextWriter
from logilab.common.textutils import colorize_ansi
from pylint.interfaces import IReporter
from pylint.reporters import BaseReporter, Message
from pylint.reporters import BaseReporter
import six
TITLE_UNDERLINES = ['', '=', '-', '.']
......@@ -42,26 +44,25 @@ class TextReporter(BaseReporter):
self._template = None
def on_set_current_module(self, module, filepath):
self._template = unicode(self.linter.config.msg_template or self.line_format)
self._template = six.text_type(self.linter.config.msg_template or self.line_format)
def write_message(self, msg):
"""Convenience method to write a formated message with class default template"""
self.writeln(msg.format(self._template))
def add_message(self, msg_id, location, msg):
def handle_message(self, msg):
"""manage message of different type and in the context of path"""
m = Message(self, msg_id, location, msg)
if m.module not in self._modules:
if m.module:
self.writeln('************* Module %s' % m.module)
self._modules.add(m.module)
if msg.module not in self._modules:
if msg.module:
self.writeln('************* Module %s' % msg.module)
self._modules.add(msg.module)
else:
self.writeln('************* ')
self.write_message(m)
self.write_message(msg)
def _display(self, layout):
"""launch layouts display"""
print >> self.out
print(file=self.out)
TextWriter().format(layout, self.out)
......@@ -114,11 +115,10 @@ class ColorizedTextReporter(TextReporter):
except KeyError:
return None, None
def add_message(self, msg_id, location, msg):
def handle_message(self, msg):
"""manage message of different types, and colorize output
using ansi escape codes
"""
msg = Message(self, msg_id, location, msg)
if msg.module not in self._modules:
color, style = self._get_decoration('S')
if msg.module:
......@@ -130,8 +130,10 @@ class ColorizedTextReporter(TextReporter):
self.writeln(modsep)
self._modules.add(msg.module)
color, style = self._get_decoration(msg.C)
for attr in ('msg', 'symbol', 'category', 'C'):
setattr(msg, attr, colorize_ansi(getattr(msg, attr), color, style))
msg = msg._replace(
**{attr: colorize_ansi(getattr(msg, attr), color, style)
for attr in ('msg', 'symbol', 'category', 'C')})
self.write_message(msg)
......
......@@ -14,20 +14,23 @@
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""functional/non regression tests for pylint"""
from __future__ import with_statement
from __future__ import print_function
import collections
import contextlib
import functools
import os
import sys
import re
import unittest
import tempfile
import tokenize
from glob import glob
from os import linesep
from os import linesep, getcwd, sep
from os.path import abspath, basename, dirname, isdir, join, splitext
from cStringIO import StringIO
from logilab.common import testlib
from astroid import test_utils
from pylint import checkers
from pylint.utils import PyLintASTWalker
......@@ -35,6 +38,9 @@ from pylint.reporters import BaseReporter
from pylint.interfaces import IReporter
from pylint.lint import PyLinter
import six
from six.moves import StringIO
# Utils
......@@ -89,9 +95,11 @@ class TestReporter(BaseReporter):
__implements____ = IReporter
def __init__(self):
def __init__(self): # pylint: disable=super-init-not-called
self.message_ids = {}
self.reset()
self.path_strip_prefix = getcwd() + sep
def reset(self):
self.out = StringIO()
......@@ -113,7 +121,7 @@ class TestReporter(BaseReporter):
def finalize(self):
self.messages.sort()
for msg in self.messages:
print >> self.out, msg
print(msg, file=self.out)
result = self.out.getvalue()
self.reset()
return result
......@@ -122,34 +130,15 @@ class TestReporter(BaseReporter):
"""ignore layouts"""
if sys.version_info < (2, 6):
class Message(tuple):
def __new__(cls, msg_id, line=None, node=None, args=None):
return tuple.__new__(cls, (msg_id, line, node, args))
@property
def msg_id(self):
return self[0]
@property
def line(self):
return self[1]
@property
def node(self):
return self[2]
@property
def args(self):
return self[3]
else:
class Message(collections.namedtuple('Message',
['msg_id', 'line', 'node', 'args'])):
def __new__(cls, msg_id, line=None, node=None, args=None):
return tuple.__new__(cls, (msg_id, line, node, args))
class Message(collections.namedtuple('Message',
['msg_id', 'line', 'node', 'args'])):
def __new__(cls, msg_id, line=None, node=None, args=None):
return tuple.__new__(cls, (msg_id, line, node, args))
class UnittestLinter(object):
"""A fake linter class to capture checker messages."""
# pylint: disable=unused-argument, no-self-use
def __init__(self):
self._messages = []
......@@ -161,14 +150,15 @@ class UnittestLinter(object):
finally:
self._messages = []
def add_message(self, msg_id, line=None, node=None, args=None):
def add_message(self, msg_id, line=None, node=None, args=None,
confidence=None):
self._messages.append(Message(msg_id, line, node, args))
def is_message_enabled(self, *unused_args):
return True
def add_stats(self, **kwargs):
for name, value in kwargs.iteritems():
for name, value in six.iteritems(kwargs):
self.stats[name] = value
return self.stats
......@@ -181,7 +171,7 @@ def set_config(**kwargs):
def _Wrapper(fun):
@functools.wraps(fun)
def _Forward(self):
for key, value in kwargs.iteritems():
for key, value in six.iteritems(kwargs):
setattr(self.checker.config, key, value)
if isinstance(self, CheckerTestCase):
# reopen checker in case, it may be interested in configuration change
......@@ -192,7 +182,7 @@ def set_config(**kwargs):
return _Wrapper
class CheckerTestCase(testlib.TestCase):
class CheckerTestCase(unittest.TestCase):
"""A base testcase class for unittesting individual checker classes."""
CHECKER_CLASS = None
CONFIG = {}
......@@ -200,7 +190,7 @@ class CheckerTestCase(testlib.TestCase):
def setUp(self):
self.linter = UnittestLinter()
self.checker = self.CHECKER_CLASS(self.linter) # pylint: disable=not-callable
for key, value in self.CONFIG.iteritems():
for key, value in six.iteritems(self.CONFIG):
setattr(self.checker.config, key, value)
self.checker.open()
......@@ -250,13 +240,13 @@ else:
INFO_TEST_RGX = re.compile(r'^func_i\d\d\d\d$')
def exception_str(self, ex):
def exception_str(self, ex): # pylint: disable=unused-argument
"""function used to replace default __str__ method of exception instances"""
return 'in %s\n:: %s' % (ex.file, ', '.join(ex.args))
# Test classes
class LintTestUsingModule(testlib.TestCase):
class LintTestUsingModule(unittest.TestCase):
INPUT_DIR = None
DEFAULT_PACKAGE = 'input'
package = DEFAULT_PACKAGE
......@@ -265,6 +255,7 @@ class LintTestUsingModule(testlib.TestCase):
depends = None
output = None
_TEST_TYPE = 'module'
maxDiff = None
def shortDescription(self):
values = {'mode' : self._TEST_TYPE,
......@@ -296,11 +287,11 @@ class LintTestUsingModule(testlib.TestCase):
self.linter.disable('I')
try:
self.linter.check(tocheck)
except Exception, ex:
except Exception as ex:
# need finalization to restore a correct state
self.linter.reporter.finalize()
ex.file = tocheck
print ex
print(ex)
ex.__str__ = exception_str
raise
self._check_result(self.linter.reporter.finalize())
......@@ -347,11 +338,11 @@ class LintTestUpdate(LintTestUsingModule):
def cb_test_gen(base_class):
def call(input_dir, msg_dir, module_file, messages_file, dependencies):
# pylint: disable=no-init
class LintTC(base_class):
module = module_file.replace('.py', '')
output = messages_file
depends = dependencies or None
tags = testlib.Tags(('generated', 'pylint_input_%s' % module))
INPUT_DIR = input_dir
MSG_DIR = msg_dir
return LintTC
......@@ -372,7 +363,7 @@ def make_tests(input_dir, msg_dir, filter_rgx, callbacks):
for module_file, messages_file in (
get_tests_info(input_dir, msg_dir, 'func_', '')
):
if not is_to_run(module_file):
if not is_to_run(module_file) or module_file.endswith('.pyc'):
continue
base = module_file.replace('func_', '').replace('.py', '')
......@@ -384,3 +375,38 @@ def make_tests(input_dir, msg_dir, filter_rgx, callbacks):
if test:
tests.append(test)
return tests
def tokenize_str(code):
return list(tokenize.generate_tokens(StringIO(code).readline))
@contextlib.contextmanager
def create_tempfile(content=None):
"""Create a new temporary file.
If *content* parameter is given, then it will be written
in the temporary file, before passing it back.
This is a context manager and should be used with a *with* statement.
"""
# Can't use tempfile.NamedTemporaryFile here
# because on Windows the file must be closed before writing to it,
# see http://bugs.python.org/issue14243
fd, tmp = tempfile.mkstemp()
if content:
if sys.version_info >= (3, 0):
# erff
os.write(fd, bytes(content, 'ascii'))
else:
os.write(fd, content)
try:
yield tmp
finally:
os.close(fd)
os.remove(tmp)
@contextlib.contextmanager
def create_file_backed_module(code):
"""Create an astroid module for the given code, backed by a real file."""
with create_tempfile() as temp:
module = test_utils.build_module(code)
module.file = temp
yield module
......@@ -16,14 +16,19 @@
"""some various utilities and helper classes, most of them used in the
main pylint class
"""
from __future__ import print_function
import collections
import os
import re
import sys
import tokenize
import os
from warnings import warn
import warnings
from os.path import dirname, basename, splitext, exists, isdir, join, normpath
import six
from six.moves import zip # pylint: disable=redefined-builtin
from logilab.common.interface import implements
from logilab.common.textutils import normalize_text
from logilab.common.configuration import rest_format_section
......@@ -33,7 +38,7 @@ from astroid import nodes, Module
from astroid.modutils import modpath_from_file, get_module_files, \
file_from_modpath, load_module_from_file
from pylint.interfaces import IRawChecker, ITokenChecker
from pylint.interfaces import IRawChecker, ITokenChecker, UNDEFINED
class UnknownMessage(Exception):
......@@ -51,7 +56,7 @@ MSG_TYPES = {
'E' : 'error',
'F' : 'fatal'
}
MSG_TYPES_LONG = dict([(v, k) for k, v in MSG_TYPES.iteritems()])
MSG_TYPES_LONG = {v: k for k, v in six.iteritems(MSG_TYPES)}
MSG_TYPES_STATUS = {
'I' : 0,
......@@ -65,6 +70,7 @@ MSG_TYPES_STATUS = {
_MSG_ORDER = 'EWRCIF'
MSG_STATE_SCOPE_CONFIG = 0
MSG_STATE_SCOPE_MODULE = 1
MSG_STATE_CONFIDENCE = 2
OPTION_RGX = re.compile(r'\s*#.*\bpylint:(.*)')
......@@ -75,6 +81,29 @@ class WarningScope(object):
LINE = 'line-based-msg'
NODE = 'node-based-msg'
_MsgBase = collections.namedtuple(
'_MsgBase',
['msg_id', 'symbol', 'msg', 'C', 'category', 'confidence',
'abspath', 'path', 'module', 'obj', 'line', 'column'])
class Message(_MsgBase):
"""This class represent a message to be issued by the reporters"""
def __new__(cls, msg_id, symbol, location, msg, confidence):
return _MsgBase.__new__(
cls, msg_id, symbol, msg, msg_id[0], MSG_TYPES[msg_id[0]],
confidence, *location)
def format(self, template):
"""Format the message according to the given template.
The template format is the one of the format method :
cf. http://docs.python.org/2/library/string.html#formatstrings
"""
# For some reason, _asdict on derived namedtuples does not work with
# Python 3.4. Needs some investigation.
return template.format(**dict(zip(self._fields, self)))
def get_module_and_frameid(node):
"""return the module name and the frame id in the module"""
......@@ -92,11 +121,11 @@ def get_module_and_frameid(node):
obj.reverse()
return module, '.'.join(obj)
def category_id(id):
id = id.upper()
if id in MSG_TYPES:
return id
return MSG_TYPES_LONG.get(id)
def category_id(cid):
cid = cid.upper()
if cid in MSG_TYPES:
return cid
return MSG_TYPES_LONG.get(cid)
def tokenize_module(module):
......@@ -124,8 +153,8 @@ def build_message_def(checker, msgid, msg_tuple):
# messages should have a symbol, but for backward compatibility
# they may not.
(msg, descr) = msg_tuple
warn("[pylint 0.26] description of message %s doesn't include "
"a symbolic name" % msgid, DeprecationWarning)
warnings.warn("[pylint 0.26] description of message %s doesn't include "
"a symbolic name" % msgid, DeprecationWarning)
symbol = None
options.setdefault('scope', default_scope)
return MessageDefinition(checker, msgid, msg, descr, symbol, **options)
......@@ -238,7 +267,7 @@ class MessagesHandlerMixIn(object):
msgs = self._msgs_state
msgs[msg.msgid] = False
# sync configuration object
self.config.disable_msg = [mid for mid, val in msgs.iteritems()
self.config.disable_msg = [mid for mid, val in six.iteritems(msgs)
if not val]
def enable(self, msgid, scope='package', line=None, ignore_unknown=False):
......@@ -276,14 +305,27 @@ class MessagesHandlerMixIn(object):
msgs = self._msgs_state
msgs[msg.msgid] = True
# sync configuration object
self.config.enable = [mid for mid, val in msgs.iteritems() if val]
self.config.enable = [mid for mid, val in six.iteritems(msgs) if val]
def get_message_state_scope(self, msgid, line=None, confidence=UNDEFINED):
"""Returns the scope at which a message was enabled/disabled."""
if self.config.confidence and confidence.name not in self.config.confidence:
return MSG_STATE_CONFIDENCE
try:
if line in self.file_state._module_msgs_state[msgid]:
return MSG_STATE_SCOPE_MODULE
except (KeyError, TypeError):
return MSG_STATE_SCOPE_CONFIG
def is_message_enabled(self, msg_descr, line=None):
def is_message_enabled(self, msg_descr, line=None, confidence=None):
"""return true if the message associated to the given message id is
enabled
msgid may be either a numeric or symbolic message id.
"""
if self.config.confidence and confidence:
if confidence.name not in self.config.confidence:
return False
try:
msgid = self.msgs_store.check_message_id(msg_descr).msgid
except UnknownMessage:
......@@ -298,7 +340,7 @@ class MessagesHandlerMixIn(object):
except KeyError:
return self._msgs_state.get(msgid, True)
def add_message(self, msg_descr, line=None, node=None, args=None):
def add_message(self, msg_descr, line=None, node=None, args=None, confidence=UNDEFINED):
"""Adds a message given by ID or name.
If provided, the message string is expanded using args
......@@ -328,8 +370,10 @@ class MessagesHandlerMixIn(object):
else:
col_offset = None
# should this message be displayed
if not self.is_message_enabled(msgid, line):
self.file_state.handle_ignored_message(msgid, line, node, args)
if not self.is_message_enabled(msgid, line, confidence):
self.file_state.handle_ignored_message(
self.get_message_state_scope(msgid, line, confidence),
msgid, line, node, args, confidence)
return
# update stats
msg_cat = MSG_TYPES[msgid[0]]
......@@ -347,31 +391,37 @@ class MessagesHandlerMixIn(object):
# get module and object
if node is None:
module, obj = self.current_name, ''
path = self.current_file
abspath = self.current_file
else:
module, obj = get_module_and_frameid(node)
path = node.root().file
abspath = node.root().file
path = abspath.replace(self.reporter.path_strip_prefix, '')
# add the message
self.reporter.add_message(msgid, (path, module, obj, line or 1, col_offset or 0), msg)
self.reporter.handle_message(
Message(msgid, symbol,
(abspath, path, module, obj, line or 1, col_offset or 0), msg, confidence))
def print_full_documentation(self):
"""output a full documentation in ReST format"""
print("Pylint global options and switches")
print("----------------------------------")
print("")
print("Pylint provides global options and switches.")
print("")
by_checker = {}
for checker in self.get_checkers():
if checker.name == 'master':
prefix = 'Main '
print "Options"
print '-------\n'
if checker.options:
for section, options in checker.options_by_section():
if section is None:
title = 'General options'
else:
title = '%s options' % section.capitalize()
print title
print '~' * len(title)
print(title)
print('~' * len(title))
rest_format_section(sys.stdout, None, options)
print
print("")
else:
try:
by_checker[checker.name][0] += checker.options_and_values()
......@@ -381,35 +431,49 @@ class MessagesHandlerMixIn(object):
by_checker[checker.name] = [list(checker.options_and_values()),
dict(checker.msgs),
list(checker.reports)]
for checker, (options, msgs, reports) in by_checker.iteritems():
prefix = ''
title = '%s checker' % checker
print title
print '-' * len(title)
print
print("Pylint checkers' options and switches")
print("-------------------------------------")
print("")
print("Pylint checkers can provide three set of features:")
print("")
print("* options that control their execution,")
print("* messages that they can raise,")
print("* reports that they can generate.")
print("")
print("Below is a list of all checkers and their features.")
print("")
for checker, (options, msgs, reports) in six.iteritems(by_checker):
title = '%s checker' % (checker.replace("_", " ").title())
print(title)
print('~' * len(title))
print("")
print("Verbatim name of the checker is ``%s``." % checker)
print("")
if options:
title = 'Options'
print title
print '~' * len(title)
print(title)
print('^' * len(title))
rest_format_section(sys.stdout, None, options)
print
print("")
if msgs:
title = ('%smessages' % prefix).capitalize()
print title
print '~' * len(title)
for msgid, msg in sorted(msgs.iteritems(),
key=lambda (k, v): (_MSG_ORDER.index(k[0]), k)):
title = 'Messages'
print(title)
print('~' * len(title))
for msgid, msg in sorted(six.iteritems(msgs),
key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])):
msg = build_message_def(checker, msgid, msg)
print msg.format_help(checkerref=False)
print
print(msg.format_help(checkerref=False))
print("")
if reports:
title = ('%sreports' % prefix).capitalize()
print title
print '~' * len(title)
title = 'Reports'
print(title)
print('~' * len(title))
for report in reports:
print ':%s: %s' % report[:2]
print
print
print(':%s: %s' % report[:2])
print("")
print("")
class FileState(object):
......@@ -419,12 +483,12 @@ class FileState(object):
self.base_name = modname
self._module_msgs_state = {}
self._raw_module_msgs_state = {}
self._ignored_msgs = {}
self._ignored_msgs = collections.defaultdict(set)
self._suppression_mapping = {}
def collect_block_lines(self, msgs_store, module_node):
"""Walk the AST to collect block level options line numbers."""
for msg, lines in self._module_msgs_state.iteritems():
for msg, lines in six.iteritems(self._module_msgs_state):
self._raw_module_msgs_state[msg] = lines.copy()
orig_state = self._module_msgs_state.copy()
self._module_msgs_state = {}
......@@ -458,8 +522,8 @@ class FileState(object):
firstchildlineno = node.body[0].fromlineno
else:
firstchildlineno = last
for msgid, lines in msg_state.iteritems():
for lineno, state in lines.items():
for msgid, lines in six.iteritems(msg_state):
for lineno, state in list(lines.items()):
original_lineno = lineno
if first <= lineno <= last:
# Set state for all lines for this block, if the
......@@ -471,7 +535,7 @@ class FileState(object):
else:
first_ = lineno
last_ = last
for line in xrange(first_, last_+1):
for line in range(first_, last_+1):
# do not override existing entries
if not line in self._module_msgs_state.get(msgid, ()):
if line in lines: # state change in the same block
......@@ -493,37 +557,29 @@ class FileState(object):
except KeyError:
self._module_msgs_state[msg.msgid] = {line: status}
def handle_ignored_message(self, msgid, line, node, args):
def handle_ignored_message(self, state_scope, msgid, line,
node, args, confidence): # pylint: disable=unused-argument
"""Report an ignored message.
state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG,
depending on whether the message was disabled locally in the module,
or globally. The other arguments are the same as for add_message.
"""
state_scope = self._message_state_scope(msgid, line)
if state_scope == MSG_STATE_SCOPE_MODULE:
try:
orig_line = self._suppression_mapping[(msgid, line)]
self._ignored_msgs.setdefault((msgid, orig_line), set()).add(line)
self._ignored_msgs[(msgid, orig_line)].add(line)
except KeyError:
pass
def _message_state_scope(self, msgid, line=None):
"""Returns the scope at which a message was enabled/disabled."""
try:
if line in self._module_msgs_state[msgid]:
return MSG_STATE_SCOPE_MODULE
except KeyError:
return MSG_STATE_SCOPE_CONFIG
def iter_spurious_suppression_messages(self, msgs_store):
for warning, lines in self._raw_module_msgs_state.iteritems():
for line, enable in lines.iteritems():
for warning, lines in six.iteritems(self._raw_module_msgs_state):
for line, enable in six.iteritems(lines):
if not enable and (warning, line) not in self._ignored_msgs:
yield 'useless-suppression', line, \
(msgs_store.get_msg_display_string(warning),)
# don't use iteritems here, _ignored_msgs may be modified by add_message
for (warning, from_), lines in self._ignored_msgs.items():
for (warning, from_), lines in list(self._ignored_msgs.items()):
for line in lines:
yield 'suppressed-message', line, \
(msgs_store.get_msg_display_string(warning), from_)
......@@ -544,12 +600,12 @@ class MessagesStore(object):
# message definitions. May contain several names for each definition
# object.
self._alternative_names = {}
self._msgs_by_category = {}
self._msgs_by_category = collections.defaultdict(list)
@property
def messages(self):
"""The list of all active messages."""
return self._messages.itervalues()
return six.itervalues(self._messages)
def add_renamed_message(self, old_id, old_symbol, new_symbol):
"""Register the old ID and symbol for a warning that was renamed.
......@@ -571,7 +627,7 @@ class MessagesStore(object):
are the checker id and the two last the message id in this checker
"""
chkid = None
for msgid, msg_tuple in checker.msgs.iteritems():
for msgid, msg_tuple in six.iteritems(checker.msgs):
msg = build_message_def(checker, msgid, msg_tuple)
assert msg.symbol not in self._messages, \
'Message symbol %r is already defined' % msg.symbol
......@@ -586,7 +642,7 @@ class MessagesStore(object):
for old_id, old_symbol in msg.old_names:
self._alternative_names[old_id] = msg
self._alternative_names[old_symbol] = msg
self._msgs_by_category.setdefault(msg.msgid[0], []).append(msg.msgid)
self._msgs_by_category[msg.msgid[0]].append(msg.msgid)
def check_message_id(self, msgid):
"""returns the Message object for this message.
......@@ -615,21 +671,21 @@ class MessagesStore(object):
"""display help messages for the given message identifiers"""
for msgid in msgids:
try:
print self.check_message_id(msgid).format_help(checkerref=True)
print
except UnknownMessage, ex:
print ex
print
print(self.check_message_id(msgid).format_help(checkerref=True))
print("")
except UnknownMessage as ex:
print(ex)
print("")
continue
def list_messages(self):
"""output full messages list documentation in ReST format"""
msgs = sorted(self._messages.itervalues(), key=lambda msg: msg.msgid)
msgs = sorted(six.itervalues(self._messages), key=lambda msg: msg.msgid)
for msg in msgs:
if not msg.may_be_emitted():
continue
print msg.format_help(checkerref=False)
print
print(msg.format_help(checkerref=False))
print("")
class ReportsHandlerMixIn(object):
......@@ -637,9 +693,15 @@ class ReportsHandlerMixIn(object):
related methods for the main lint class
"""
def __init__(self):
self._reports = {}
self._reports = collections.defaultdict(list)
self._reports_state = {}
def report_order(self):
""" Return a list of reports, sorted in the order
in which they must be called.
"""
return list(self._reports)
def register_report(self, reportid, r_title, r_cb, checker):
"""register a report
......@@ -649,7 +711,7 @@ class ReportsHandlerMixIn(object):
checker is the checker defining the report
"""
reportid = reportid.upper()
self._reports.setdefault(checker, []).append((reportid, r_title, r_cb))
self._reports[checker].append((reportid, r_title, r_cb))
def enable_report(self, reportid):
"""disable the report of the given id"""
......@@ -671,7 +733,7 @@ class ReportsHandlerMixIn(object):
"""render registered reports"""
sect = Section('Report',
'%s statements analysed.'% (self.stats['statement']))
for checker in self._reports:
for checker in self.report_order():
for reportid, r_title, r_cb in self._reports[checker]:
if not self.report_is_enabled(reportid):
continue
......@@ -688,7 +750,7 @@ class ReportsHandlerMixIn(object):
"""add some stats entries to the statistic dictionary
raise an AssertionError if there is a key conflict
"""
for key, value in kwargs.iteritems():
for key, value in six.iteritems(kwargs):
if key[-1] == '_':
key = key[:-1]
assert key not in self.stats
......@@ -721,7 +783,7 @@ def expand_modules(files_or_modules, black_list):
if filepath is None:
errors.append({'key' : 'ignored-builtin-module', 'mod': modname})
continue
except (ImportError, SyntaxError), ex:
except (ImportError, SyntaxError) as ex:
# FIXME p3k : the SyntaxError is a Python bug and should be
# removed as soon as possible http://bugs.python.org/issue10588
errors.append({'key': 'fatal', 'mod': modname, 'ex': ex})
......@@ -746,8 +808,8 @@ class PyLintASTWalker(object):
def __init__(self, linter):
# callbacks per node types
self.nbstatements = 1
self.visit_events = {}
self.leave_events = {}
self.visit_events = collections.defaultdict(list)
self.leave_events = collections.defaultdict(list)
self.linter = linter
def _is_method_enabled(self, method):
......@@ -773,20 +835,20 @@ class PyLintASTWalker(object):
v_meth = getattr(checker, member)
# don't use visit_methods with no activated message:
if self._is_method_enabled(v_meth):
visits.setdefault(cid, []).append(v_meth)
visits[cid].append(v_meth)
vcids.add(cid)
elif member.startswith('leave_'):
l_meth = getattr(checker, member)
# don't use leave_methods with no activated message:
if self._is_method_enabled(l_meth):
leaves.setdefault(cid, []).append(l_meth)
leaves[cid].append(l_meth)
lcids.add(cid)
visit_default = getattr(checker, 'visit_default', None)
if visit_default:
for cls in nodes.ALL_NODE_CLASSES:
cid = cls.__name__.lower()
if cid not in vcids:
visits.setdefault(cid, []).append(visit_default)
visits[cid].append(visit_default)
# for now we have no "leave_default" method in Pylint
def walk(self, astroid):
......@@ -824,8 +886,9 @@ def register_plugins(linter, directory):
except ValueError:
# empty module name (usually emacs auto-save files)
continue
except ImportError, exc:
print >> sys.stderr, "Problem importing module %s: %s" % (filename, exc)
except ImportError as exc:
print("Problem importing module %s: %s" % (filename, exc),
file=sys.stderr)
else:
if hasattr(module, 'register'):
module.register(linter)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment