Commit 9f1f040b authored by vapier@chromium.org's avatar vapier@chromium.org

pylint: upgrade to 1.4.0

This is largely a bugfix release, so should be much easier to transition.

BUG=chromium:431514
TEST=ran on some code bases and checked output

Review URL: https://codereview.chromium.org/753543006

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@293355 0039d316-1c4b-4281-b951-d872f2087c98
parent a64c0b08
URL: http://www.logilab.org/project/logilab-astng
Version: 1.2.1
Version: 1.3.2
License: GPL
License File: LICENSE.txt
......
......@@ -79,6 +79,9 @@ class AsStringRegexpPredicate(object):
If specified, the second argument is an `attrgetter` expression that will be
applied on the node first to get the actual node on which `as_string` should
be called.
WARNING: This can be fairly slow, as it has to convert every AST node back
to Python code; you should consider examining the AST directly instead.
"""
def __init__(self, regexp, expression=None):
self.regexp = re.compile(regexp)
......@@ -98,13 +101,23 @@ def inference_tip(infer_function):
.. sourcecode:: python
MANAGER.register_transform(CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func'))
predicate)
"""
def transform(node, infer_function=infer_function):
node._explicit_inference = infer_function
return node
return transform
def register_module_extender(manager, module_name, get_extension_mod):
def transform(node):
extension_module = get_extension_mod()
for name, obj in extension_module.locals.items():
node.locals[name] = obj
manager.register_transform(Module, transform, lambda n: n.name == module_name)
# load brain plugins
from os import listdir
from os.path import join, dirname
......
......@@ -16,30 +16,23 @@
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""astroid packaging information"""
distname = 'astroid'
modname = 'astroid'
numversion = (1, 2, 1)
numversion = (1, 3, 2)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.60.0']
install_requires = ['logilab-common >= 0.60.0', 'six']
license = 'LGPL'
author = 'Logilab'
author_email = 'python-projects@lists.logilab.org'
author_email = 'pylint-dev@lists.logilab.org'
mailinglist = "mailto://%s" % author_email
web = 'http://bitbucket.org/logilab/astroid'
description = "rebuild a new abstract syntax tree from Python's ast"
from os.path import join
include_dirs = ['brain',
join('test', 'regrtest_data'),
join('test', 'data'), join('test', 'data2')
]
description = "A abstract syntax tree for Python with inference support."
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
......
......@@ -148,7 +148,7 @@ class AsStringVisitor(object):
bases = bases and '(%s)' % bases or ''
else:
metaclass = node.metaclass()
if metaclass:
if metaclass and not node.has_metaclass_hack():
if bases:
bases = '(%s, metaclass=%s)' % (bases, metaclass.name)
else:
......
......@@ -24,6 +24,8 @@ __docformat__ = "restructuredtext en"
import sys
from contextlib import contextmanager
from logilab.common.decorators import cachedproperty
from astroid.exceptions import (InferenceError, AstroidError, NotFoundError,
UnresolvableName, UseInferenceDefault)
......@@ -56,63 +58,84 @@ class Proxy(object):
# Inference ##################################################################
MISSING = object()
class InferenceContext(object):
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode')
__slots__ = ('path', 'callcontext', 'boundnode', 'infered')
def __init__(self, path=None):
def __init__(self,
path=None, callcontext=None, boundnode=None, infered=None):
if path is None:
self.path = set()
self.path = frozenset()
else:
self.path = path
self.lookupname = None
self.callcontext = None
self.boundnode = None
def push(self, node):
name = self.lookupname
if (node, name) in self.path:
raise StopIteration()
self.path.add((node, name))
def clone(self):
# XXX copy lookupname/callcontext ?
clone = InferenceContext(self.path)
clone.callcontext = self.callcontext
clone.boundnode = self.boundnode
return clone
self.callcontext = callcontext
self.boundnode = boundnode
if infered is None:
self.infered = {}
else:
self.infered = infered
def push(self, key):
# This returns a NEW context with the same attributes, but a new key
# added to `path`. The intention is that it's only passed to callees
# and then destroyed; otherwise scope() may not work correctly.
# The cache will be shared, since it's the same exact dict.
if key in self.path:
# End the containing generator
raise StopIteration
return InferenceContext(
self.path.union([key]),
self.callcontext,
self.boundnode,
self.infered,
)
@contextmanager
def restore_path(self):
path = set(self.path)
yield
self.path = path
def copy_context(context):
if context is not None:
return context.clone()
else:
return InferenceContext()
def scope(self, callcontext=MISSING, boundnode=MISSING):
try:
orig = self.callcontext, self.boundnode
if callcontext is not MISSING:
self.callcontext = callcontext
if boundnode is not MISSING:
self.boundnode = boundnode
yield
finally:
self.callcontext, self.boundnode = orig
def cache_generator(self, key, generator):
results = []
for result in generator:
results.append(result)
yield result
self.infered[key] = tuple(results)
return
def _infer_stmts(stmts, context, frame=None):
def _infer_stmts(stmts, context, frame=None, lookupname=None):
"""return an iterator on statements inferred by each statement in <stmts>
"""
stmt = None
infered = False
if context is not None:
name = context.lookupname
context = context.clone()
else:
name = None
if context is None:
context = InferenceContext()
for stmt in stmts:
if stmt is YES:
yield stmt
infered = True
continue
context.lookupname = stmt._infer_name(frame, name)
kw = {}
infered_name = stmt._infer_name(frame, lookupname)
if infered_name is not None:
# only returns not None if .infer() accepts a lookupname kwarg
kw['lookupname'] = infered_name
try:
for infered in stmt.infer(context):
for infered in stmt.infer(context, **kw):
yield infered
infered = True
except UnresolvableName:
......@@ -170,20 +193,24 @@ class Instance(Proxy):
def igetattr(self, name, context=None):
"""inferred getattr"""
if not context:
context = InferenceContext()
try:
# avoid recursively inferring the same attr on the same class
if context:
context.push((self._proxied, name))
new_context = context.push((self._proxied, name))
# XXX frame should be self._proxied, or not ?
get_attr = self.getattr(name, context, lookupclass=False)
return _infer_stmts(self._wrap_attr(get_attr, context), context,
frame=self)
get_attr = self.getattr(name, new_context, lookupclass=False)
return _infer_stmts(
self._wrap_attr(get_attr, new_context),
new_context,
frame=self,
)
except NotFoundError:
try:
# fallback to class'igetattr since it has some logic to handle
# descriptors
return self._wrap_attr(self._proxied.igetattr(name, context),
context)
context)
except NotFoundError:
raise InferenceError(name)
......@@ -274,9 +301,9 @@ class BoundMethod(UnboundMethod):
return True
def infer_call_result(self, caller, context):
context = context.clone()
context.boundnode = self.bound
return self._proxied.infer_call_result(caller, context)
with context.scope(boundnode=self.bound):
for infered in self._proxied.infer_call_result(caller, context):
yield infered
class Generator(Instance):
......@@ -308,7 +335,8 @@ def path_wrapper(func):
"""wrapper function handling context"""
if context is None:
context = InferenceContext()
context.push(node)
context = context.push((node, kwargs.get('lookupname')))
yielded = set()
for res in _func(node, context, **kwargs):
# unproxy only true instance, not const, tuple, dict...
......@@ -377,7 +405,15 @@ class NodeNG(object):
return self._explicit_inference(self, context, **kwargs)
except UseInferenceDefault:
pass
return self._infer(context, **kwargs)
if not context:
return self._infer(context, **kwargs)
key = (self, kwargs.get('lookupname'), context.callcontext, context.boundnode)
if key in context.infered:
return iter(context.infered[key])
return context.cache_generator(key, self._infer(context, **kwargs))
def _repr_name(self):
"""return self.name or self.attrname or '' for nice representation"""
......@@ -415,7 +451,7 @@ class NodeNG(object):
attr = getattr(self, field)
if not attr: # None or empty listy / tuple
continue
if isinstance(attr, (list, tuple)):
if attr.__class__ in (list, tuple):
return attr[-1]
else:
return attr
......@@ -506,16 +542,28 @@ class NodeNG(object):
# FIXME: raise an exception if nearest is None ?
return nearest[0]
def set_line_info(self, lastchild):
# these are lazy because they're relatively expensive to compute for every
# single node, and they rarely get looked at
@cachedproperty
def fromlineno(self):
if self.lineno is None:
self.fromlineno = self._fixed_source_line()
return self._fixed_source_line()
else:
return self.lineno
@cachedproperty
def tolineno(self):
if not self._astroid_fields:
# can't have children
lastchild = None
else:
self.fromlineno = self.lineno
lastchild = self.last_child()
if lastchild is None:
self.tolineno = self.fromlineno
return self.fromlineno
else:
self.tolineno = lastchild.tolineno
return
return lastchild.tolineno
# TODO / FIXME:
assert self.fromlineno is not None, self
assert self.tolineno is not None, self
......@@ -530,7 +578,7 @@ class NodeNG(object):
_node = self
try:
while line is None:
_node = _node.get_children().next()
_node = next(_node.get_children())
line = _node.lineno
except StopIteration:
_node = self.parent
......
......@@ -4,6 +4,7 @@ Helps with understanding everything imported from 'gi.repository'
"""
import inspect
import itertools
import sys
import re
......@@ -111,40 +112,33 @@ def _gi_build_stub(parent):
return ret
# Overwrite Module.module_import to _actually_ import the introspected module if
# it's a gi module, then build stub code by examining its info and get an astng
# from that
from astroid.scoped_nodes import Module
_orig_import_module = Module.import_module
def _new_import_module(self, modname, relative_only=False, level=None):
# Could be a static piece of gi.repository or whatever unrelated module,
# let that fall through
try:
return _orig_import_module(self, modname, relative_only, level)
except AstroidBuildingException:
# we only consider gi.repository submodules
if not modname.startswith('gi.repository.'):
if relative_only and level is None:
level = 0
modname = self.relative_to_absolute_name(modname, level)
if not modname.startswith('gi.repository.'):
raise
def _import_gi_module(modname):
# we only consider gi.repository submodules
if not modname.startswith('gi.repository.'):
raise AstroidBuildingException()
# build astroid representation unless we already tried so
if modname not in _inspected_modules:
modnames = [modname]
# GLib and GObject have some special case handling
# in pygobject that we need to cope with
optional_modnames = []
# GLib and GObject may have some special case handling
# in pygobject that we need to cope with. However at
# least as of pygobject3-3.13.91 the _glib module doesn't
# exist anymore, so if treat these modules as optional.
if modname == 'gi.repository.GLib':
modnames.append('gi._glib')
optional_modnames.append('gi._glib')
elif modname == 'gi.repository.GObject':
modnames.append('gi._gobject')
optional_modnames.append('gi._gobject')
try:
modcode = ''
for m in modnames:
__import__(m)
modcode += _gi_build_stub(sys.modules[m])
for m in itertools.chain(modnames, optional_modnames):
try:
__import__(m)
modcode += _gi_build_stub(sys.modules[m])
except ImportError:
if m not in optional_modnames:
raise
except ImportError:
astng = _inspected_modules[modname] = None
else:
......@@ -156,4 +150,6 @@ def _new_import_module(self, modname, relative_only=False, level=None):
raise AstroidBuildingException('Failed to import module %r' % modname)
return astng
Module.import_module = _new_import_module
MANAGER.register_failed_import_hook(_import_gi_module)
from astroid import MANAGER
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def mechanize_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def mechanize_transform():
return AstroidBuilder(MANAGER).string_build('''
class Browser(object):
def open(self, url, data=None, timeout=None):
......@@ -13,8 +13,6 @@ class Browser(object):
return None
''')
module.locals['Browser'] = fake.locals['Browser']
import py2stdlib
py2stdlib.MODULE_TRANSFORMS['mechanize'] = mechanize_transform
register_module_extender(MANAGER, 'mechanize', mechanize_transform)
"""Astroid hooks for pytest."""
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def pytest_transform():
return AstroidBuilder(MANAGER).string_build('''
try:
import _pytest.mark
import _pytest.recwarn
import _pytest.runner
import _pytest.python
except ImportError:
pass
else:
deprecated_call = _pytest.recwarn.deprecated_call
exit = _pytest.runner.exit
fail = _pytest.runner.fail
fixture = _pytest.python.fixture
importorskip = _pytest.runner.importorskip
mark = _pytest.mark.MarkGenerator()
raises = _pytest.python.raises
skip = _pytest.runner.skip
yield_fixture = _pytest.python.yield_fixture
''')
register_module_extender(MANAGER, 'pytest', pytest_transform)
register_module_extender(MANAGER, 'py.test', pytest_transform)
......@@ -5,21 +5,18 @@ Currently help understanding of :
* PyQT4.QtCore
"""
from astroid import MANAGER
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def pyqt4_qtcore_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def pyqt4_qtcore_transform():
return AstroidBuilder(MANAGER).string_build('''
def SIGNAL(signal_name): pass
class QObject(object):
def emit(self, signal): pass
''')
for klass in ('QObject',):
module.locals[klass] = fake.locals[klass]
import py2stdlib
py2stdlib.MODULE_TRANSFORMS['PyQt4.QtCore'] = pyqt4_qtcore_transform
register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform)
......@@ -11,12 +11,11 @@ from textwrap import dedent
from astroid import (
MANAGER, AsStringRegexpPredicate,
UseInferenceDefault, inference_tip,
YES, InferenceError)
YES, InferenceError, register_module_extender)
from astroid import exceptions
from astroid import nodes
from astroid.builder import AstroidBuilder
MODULE_TRANSFORMS = {}
PY3K = sys.version_info > (3, 0)
PY33 = sys.version_info >= (3, 3)
......@@ -26,7 +25,7 @@ def infer_func_form(node, base_type, context=None, enum=False):
"""Specific inference function for namedtuple or Python 3 enum. """
def infer_first(node):
try:
value = node.infer(context=context).next()
value = next(node.infer(context=context))
if value is YES:
raise UseInferenceDefault()
else:
......@@ -90,39 +89,31 @@ def infer_func_form(node, base_type, context=None, enum=False):
# module specific transformation functions #####################################
def transform(module):
try:
tr = MODULE_TRANSFORMS[module.name]
except KeyError:
pass
else:
tr(module)
MANAGER.register_transform(nodes.Module, transform)
# module specific transformation functions #####################################
def hashlib_transform(module):
def hashlib_transform():
template = '''
class %s(object):
class %(name)s(object):
def __init__(self, value=''): pass
def digest(self):
return u''
return %(digest)s
def copy(self):
return self
def update(self, value): pass
def hexdigest(self):
return u''
return ''
@property
def name(self):
return %(name)r
'''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(template % hashfunc for hashfunc in algorithms)
classes = "".join(
template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'}
for hashfunc in algorithms)
return AstroidBuilder(MANAGER).string_build(classes)
fake = AstroidBuilder(MANAGER).string_build(classes)
for hashfunc in algorithms:
module.locals[hashfunc] = fake.locals[hashfunc]
def collections_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def collections_transform():
return AstroidBuilder(MANAGER).string_build('''
class defaultdict(dict):
default_factory = None
......@@ -146,11 +137,9 @@ class deque(object):
''')
for klass in ('deque', 'defaultdict'):
module.locals[klass] = fake.locals[klass]
def pkg_resources_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def pkg_resources_transform():
return AstroidBuilder(MANAGER).string_build('''
def resource_exists(package_or_requirement, resource_name):
pass
......@@ -187,11 +176,8 @@ def cleanup_resources(force=False):
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def subprocess_transform(module):
def subprocess_transform():
if PY3K:
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
init = """
......@@ -217,7 +203,7 @@ def subprocess_transform(module):
wait_signature = 'def wait(self, timeout=None)'
else:
wait_signature = 'def wait(self)'
fake = AstroidBuilder(MANAGER).string_build('''
return AstroidBuilder(MANAGER).string_build('''
class Popen(object):
returncode = pid = 0
......@@ -241,18 +227,17 @@ class Popen(object):
'communicate': communicate,
'wait_signature': wait_signature})
for func_name, func in fake.locals.items():
module.locals[func_name] = func
MODULE_TRANSFORMS['hashlib'] = hashlib_transform
MODULE_TRANSFORMS['collections'] = collections_transform
MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
MODULE_TRANSFORMS['subprocess'] = subprocess_transform
# namedtuple support ###########################################################
def looks_like_namedtuple(node):
func = node.func
if type(func) is nodes.Getattr:
return func.attrname == 'namedtuple'
if type(func) is nodes.Name:
return func.name == 'namedtuple'
return False
def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple CallFunc node"""
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
......@@ -285,11 +270,12 @@ def infer_enum(node, context=None):
context=context, enum=True)[0]
return iter([class_node.instanciate_class()])
def infer_enum_class(node, context=None):
def infer_enum_class(node):
""" Specific inference for enums. """
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
for basename in node.basenames:
# TODO: doesn't handle subclasses yet.
# TODO: doesn't handle subclasses yet. This implementation
# is a hack to support enums.
if basename not in names:
continue
if node.root().name == 'enum':
......@@ -299,22 +285,26 @@ def infer_enum_class(node, context=None):
if any(not isinstance(value, nodes.AssName)
for value in values):
continue
parent = values[0].parent
real_value = parent.value
stmt = values[0].statement()
if isinstance(stmt.targets[0], nodes.Tuple):
targets = stmt.targets[0].itered()
else:
targets = stmt.targets
new_targets = []
for target in parent.targets:
for target in targets:
# Replace all the assignments with our mocked class.
classdef = dedent('''
class %(name)s(object):
@property
def value(self):
return %(value)s
# Not the best return.
return None
@property
def name(self):
return %(name)r
%(name)s = %(value)s
''' % {'name': target.name,
'value': real_value.as_string()})
''' % {'name': target.name})
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
fake.parent = target.parent
for method in node.mymethods():
......@@ -324,8 +314,13 @@ def infer_enum_class(node, context=None):
break
return node
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func'))
looks_like_namedtuple)
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum),
AsStringRegexpPredicate('Enum', 'func'))
MANAGER.register_transform(nodes.Class, infer_enum_class)
register_module_extender(MANAGER, 'hashlib', hashlib_transform)
register_module_extender(MANAGER, 'collections', collections_transform)
register_module_extender(MANAGER, 'pkg_resourcds', pkg_resources_transform)
register_module_extender(MANAGER, 'subprocess', subprocess_transform)
......@@ -44,7 +44,7 @@ if sys.version_info >= (3, 0):
def open_source_file(filename):
with open(filename, 'rb') as byte_stream:
encoding = detect_encoding(byte_stream.readline)[0]
stream = open(filename, 'rU', encoding=encoding)
stream = open(filename, 'r', newline=None, encoding=encoding)
try:
data = stream.read()
except UnicodeError: # wrong encodingg
......@@ -115,23 +115,24 @@ class AstroidBuilder(InspectBuilder):
path is expected to be a python source file
"""
try:
_, encoding, data = open_source_file(path)
except IOError, exc:
stream, encoding, data = open_source_file(path)
except IOError as exc:
msg = 'Unable to load file %r (%s)' % (path, exc)
raise AstroidBuildingException(msg)
except SyntaxError, exc: # py3k encoding specification error
except SyntaxError as exc: # py3k encoding specification error
raise AstroidBuildingException(exc)
except LookupError, exc: # unknown encoding
except LookupError as exc: # unknown encoding
raise AstroidBuildingException(exc)
# get module name if necessary
if modname is None:
try:
modname = '.'.join(modpath_from_file(path))
except ImportError:
modname = splitext(basename(path))[0]
# build astroid representation
module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
with stream:
# get module name if necessary
if modname is None:
try:
modname = '.'.join(modpath_from_file(path))
except ImportError:
modname = splitext(basename(path))[0]
# build astroid representation
module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
def string_build(self, data, modname='', path=None):
"""build astroid from source code string and return rebuilded astroid"""
......@@ -159,7 +160,10 @@ class AstroidBuilder(InspectBuilder):
def _data_build(self, data, modname, path):
"""build tree node from data and add some informations"""
# this method could be wrapped with a pickle/cache function
node = parse(data + '\n')
try:
node = parse(data + '\n')
except TypeError as exc:
raise AstroidBuildingException(exc)
if path is not None:
node_file = abspath(path)
else:
......@@ -170,8 +174,7 @@ class AstroidBuilder(InspectBuilder):
else:
package = path and path.find('__init__.py') > -1 or False
rebuilder = TreeRebuilder(self._manager)
module = rebuilder.visit_module(node, modname, package)
module.file = module.path = node_file
module = rebuilder.visit_module(node, modname, node_file, package)
module._from_nodes = rebuilder._from_nodes
module._delayed_assattr = rebuilder._delayed_assattr
return module
......
......@@ -28,7 +28,7 @@ from astroid.manager import AstroidManager
from astroid.exceptions import (AstroidError, InferenceError, NoDefault,
NotFoundError, UnresolvableName)
from astroid.bases import (YES, Instance, InferenceContext,
_infer_stmts, copy_context, path_wrapper,
_infer_stmts, path_wrapper,
raise_if_nothing_infered)
from astroid.protocols import (
_arguments_infer_argname,
......@@ -175,93 +175,89 @@ def infer_name(self, context=None):
if not stmts:
raise UnresolvableName(self.name)
context = context.clone()
context.lookupname = self.name
return _infer_stmts(stmts, context, frame)
return _infer_stmts(stmts, context, frame, self.name)
nodes.Name._infer = path_wrapper(infer_name)
nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper
def infer_callfunc(self, context=None):
"""infer a CallFunc node by trying to guess what the function returns"""
callcontext = context.clone()
callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs)
callcontext.boundnode = None
if context is None:
context = InferenceContext()
for callee in self.func.infer(context):
if callee is YES:
yield callee
continue
try:
if hasattr(callee, 'infer_call_result'):
for infered in callee.infer_call_result(self, callcontext):
yield infered
except InferenceError:
## XXX log error ?
continue
with context.scope(
callcontext=CallContext(self.args, self.starargs, self.kwargs),
boundnode=None,
):
if callee is YES:
yield callee
continue
try:
if hasattr(callee, 'infer_call_result'):
for infered in callee.infer_call_result(self, context):
yield infered
except InferenceError:
## XXX log error ?
continue
nodes.CallFunc._infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
def infer_import(self, context=None, asname=True):
def infer_import(self, context=None, asname=True, lookupname=None):
"""infer an Import node: return the imported module/object"""
name = context.lookupname
if name is None:
if lookupname is None:
raise InferenceError()
if asname:
yield self.do_import_module(self.real_name(name))
yield self.do_import_module(self.real_name(lookupname))
else:
yield self.do_import_module(name)
yield self.do_import_module(lookupname)
nodes.Import._infer = path_wrapper(infer_import)
def infer_name_module(self, name):
context = InferenceContext()
context.lookupname = name
return self.infer(context, asname=False)
return self.infer(context, asname=False, lookupname=name)
nodes.Import.infer_name_module = infer_name_module
def infer_from(self, context=None, asname=True):
def infer_from(self, context=None, asname=True, lookupname=None):
"""infer a From nodes: return the imported module/object"""
name = context.lookupname
if name is None:
if lookupname is None:
raise InferenceError()
if asname:
name = self.real_name(name)
lookupname = self.real_name(lookupname)
module = self.do_import_module()
try:
context = copy_context(context)
context.lookupname = name
return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context)
return _infer_stmts(module.getattr(lookupname, ignore_locals=module is self.root()), context, lookupname=lookupname)
except NotFoundError:
raise InferenceError(name)
raise InferenceError(lookupname)
nodes.From._infer = path_wrapper(infer_from)
def infer_getattr(self, context=None):
"""infer a Getattr node by using getattr on the associated object"""
#context = context.clone()
if not context:
context = InferenceContext()
for owner in self.expr.infer(context):
if owner is YES:
yield owner
continue
try:
context.boundnode = owner
for obj in owner.igetattr(self.attrname, context):
yield obj
context.boundnode = None
with context.scope(boundnode=owner):
for obj in owner.igetattr(self.attrname, context):
yield obj
except (NotFoundError, InferenceError):
context.boundnode = None
pass
except AttributeError:
# XXX method / function
context.boundnode = None
pass
nodes.Getattr._infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper
def infer_global(self, context=None):
if context.lookupname is None:
def infer_global(self, context=None, lookupname=None):
if lookupname is None:
raise InferenceError()
try:
return _infer_stmts(self.root().getattr(context.lookupname), context)
return _infer_stmts(self.root().getattr(lookupname), context)
except NotFoundError:
raise InferenceError()
nodes.Global._infer = path_wrapper(infer_global)
......@@ -269,12 +265,12 @@ nodes.Global._infer = path_wrapper(infer_global)
def infer_subscript(self, context=None):
"""infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]"""
value = self.value.infer(context).next()
value = next(self.value.infer(context))
if value is YES:
yield YES
return
index = self.slice.infer(context).next()
index = next(self.slice.infer(context))
if index is YES:
yield YES
return
......@@ -287,6 +283,12 @@ def infer_subscript(self, context=None):
except (IndexError, TypeError):
yield YES
return
# Prevent inferring if the infered subscript
# is the same as the original subscripted object.
if self is assigned:
yield YES
return
for infered in assigned.infer(context):
yield infered
else:
......@@ -347,11 +349,10 @@ def infer_binop(self, context=None):
nodes.BinOp._infer = path_wrapper(infer_binop)
def infer_arguments(self, context=None):
name = context.lookupname
if name is None:
def infer_arguments(self, context=None, lookupname=None):
if lookupname is None:
raise InferenceError()
return _arguments_infer_argname(self, name, context)
return _arguments_infer_argname(self, lookupname, context)
nodes.Arguments._infer = infer_arguments
......
This diff is collapsed.
......@@ -18,16 +18,18 @@
"""This module contains some mixins for the different nodes.
"""
from logilab.common.decorators import cachedproperty
from astroid.exceptions import (AstroidBuildingException, InferenceError,
NotFoundError)
class BlockRangeMixIn(object):
"""override block range """
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self._blockstart_toline()
@cachedproperty
def blockstart_tolineno(self):
return self.lineno
def _elsed_block_range(self, lineno, orelse, last=None):
"""handle block line numbers range for try/finally, for, if and while
......@@ -105,7 +107,7 @@ class FromImportMixIn(FilterStmtsMixin):
return mymodule.import_module(modname, level=level)
except AstroidBuildingException:
raise InferenceError(modname)
except SyntaxError, ex:
except SyntaxError as ex:
raise InferenceError(str(ex))
def real_name(self, asname):
......
This diff is collapsed.
......@@ -20,6 +20,9 @@
import sys
import six
from logilab.common.decorators import cachedproperty
from astroid.exceptions import NoDefault
from astroid.bases import (NodeNG, Statement, Instance, InferenceContext,
_infer_stmts, YES, BUILTINS)
......@@ -39,7 +42,7 @@ def unpack_infer(stmt, context=None):
yield infered_elt
return
# if infered is a final node, return it and stop
infered = stmt.infer(context).next()
infered = next(stmt.infer(context))
if infered is stmt:
yield infered
return
......@@ -127,8 +130,7 @@ class LookupMixIn(object):
the lookup method
"""
frame, stmts = self.lookup(name)
context = InferenceContext()
return _infer_stmts(stmts, context, frame)
return _infer_stmts(stmts, None, frame)
def _filter_stmts(self, stmts, frame, offset):
"""filter statements to remove ignorable statements.
......@@ -146,6 +148,20 @@ class LookupMixIn(object):
myframe = self.frame().parent.frame()
else:
myframe = self.frame()
# If the frame of this node is the same as the statement
# of this node, then the node is part of a class or
# a function definition and the frame of this node should be the
# the upper frame, not the frame of the definition.
# For more information why this is important,
# see Pylint issue #295.
# For example, for 'b', the statement is the same
# as the frame / scope:
#
# def test(b=1):
# ...
if self.statement() is myframe and myframe.parent:
myframe = myframe.parent.frame()
if not myframe is frame or self is frame:
return stmts
mystmt = self.statement()
......@@ -289,6 +305,11 @@ class Arguments(NodeNG, AssignTypeMixin):
return name
return None
@cachedproperty
def fromlineno(self):
lineno = super(Arguments, self).fromlineno
return max(lineno, self.parent.fromlineno)
def format_args(self):
"""return arguments formatted as string"""
result = []
......@@ -475,7 +496,7 @@ class Const(NodeNG, Instance):
self.value = value
def getitem(self, index, context=None):
if isinstance(self.value, basestring):
if isinstance(self.value, six.string_types):
return Const(self.value[index])
raise TypeError('%r (value=%s)' % (self, self.value))
......@@ -483,7 +504,7 @@ class Const(NodeNG, Instance):
return False
def itered(self):
if isinstance(self.value, basestring):
if isinstance(self.value, six.string_types):
return self.value
raise TypeError()
......@@ -528,7 +549,7 @@ class Dict(NodeNG, Instance):
self.items = []
else:
self.items = [(const_factory(k), const_factory(v))
for k, v in items.iteritems()]
for k, v in items.items()]
def pytype(self):
return '%s.dict' % BUILTINS
......@@ -583,7 +604,8 @@ class ExceptHandler(Statement, AssignTypeMixin):
name = None
body = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
if self.name:
return self.name.tolineno
elif self.type:
......@@ -591,11 +613,6 @@ class ExceptHandler(Statement, AssignTypeMixin):
else:
return self.lineno
def set_line_info(self, lastchild):
self.fromlineno = self.lineno
self.tolineno = lastchild.tolineno
self.blockstart_tolineno = self._blockstart_toline()
def catch(self, exceptions):
if self.type is None or exceptions is None:
return True
......@@ -626,7 +643,8 @@ class For(BlockRangeMixIn, AssignTypeMixin, Statement):
orelse = None
optional_assign = True
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.iter.tolineno
......@@ -661,7 +679,8 @@ class If(BlockRangeMixIn, Statement):
body = None
orelse = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno
def block_range(self, lineno):
......@@ -812,9 +831,6 @@ class TryExcept(BlockRangeMixIn, Statement):
def _infer_name(self, frame, name):
return name
def _blockstart_toline(self):
return self.lineno
def block_range(self, lineno):
"""handle block line numbers range for try/except statements"""
last = None
......@@ -834,9 +850,6 @@ class TryFinally(BlockRangeMixIn, Statement):
body = None
finalbody = None
def _blockstart_toline(self):
return self.lineno
def block_range(self, lineno):
"""handle block line numbers range for try/finally statements"""
child = self.body[0]
......@@ -880,7 +893,8 @@ class While(BlockRangeMixIn, Statement):
body = None
orelse = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno
def block_range(self, lineno):
......@@ -894,7 +908,8 @@ class With(BlockRangeMixIn, AssignTypeMixin, Statement):
items = None
body = None
def _blockstart_toline(self):
@cachedproperty
def blockstart_tolineno(self):
return self.items[-1][0].tolineno
def get_children(self):
......
......@@ -23,7 +23,7 @@ __doctype__ = "restructuredtext en"
from astroid.exceptions import InferenceError, NoDefault, NotFoundError
from astroid.node_classes import unpack_infer
from astroid.bases import copy_context, \
from astroid.bases import InferenceContext, \
raise_if_nothing_infered, yes_if_nothing_infered, Instance, YES
from astroid.nodes import const_factory
from astroid import nodes
......@@ -91,7 +91,7 @@ BIN_OP_IMPL = {'+': lambda a, b: a + b,
'<<': lambda a, b: a << b,
'>>': lambda a, b: a >> b,
}
for key, impl in BIN_OP_IMPL.items():
for key, impl in list(BIN_OP_IMPL.items()):
BIN_OP_IMPL[key+'='] = impl
def const_infer_binary_op(self, operator, other, context):
......@@ -282,7 +282,8 @@ def _arguments_infer_argname(self, name, context):
# if there is a default value, yield it. And then yield YES to reflect
# we can't guess given argument value
try:
context = copy_context(context)
if context is None:
context = InferenceContext()
for infered in self.default_value(name).infer(context):
yield infered
yield YES
......@@ -294,13 +295,8 @@ def arguments_assigned_stmts(self, node, context, asspath=None):
if context.callcontext:
# reset call context/name
callcontext = context.callcontext
context = copy_context(context)
context.callcontext = None
for infered in callcontext.infer_argument(self.parent, node.name, context):
yield infered
return
for infered in _arguments_infer_argname(self, node.name, context):
yield infered
return callcontext.infer_argument(self.parent, node.name, context)
return _arguments_infer_argname(self, node.name, context)
nodes.Arguments.assigned_stmts = arguments_assigned_stmts
......
......@@ -25,6 +25,7 @@ import sys
from os.path import abspath
from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
ismethoddescriptor, isclass, isbuiltin, ismodule)
import six
from astroid.node_classes import CONST_CLS
from astroid.nodes import (Module, Class, Const, const_factory, From,
......@@ -57,7 +58,10 @@ def attach_dummy_node(node, name, object=_marker):
enode.object = object
_attach_local_node(node, enode, name)
EmptyNode.has_underlying_object = lambda self: self.object is not _marker
def _has_underlying_object(self):
return hasattr(self, 'object') and self.object is not _marker
EmptyNode.has_underlying_object = _has_underlying_object
def attach_const_node(node, name, value):
"""create a Const node and register it in the locals of the given
......@@ -247,10 +251,11 @@ class InspectBuilder(object):
attach_dummy_node(node, name)
continue
if ismethod(member):
member = member.im_func
member = six.get_method_function(member)
if isfunction(member):
# verify this is not an imported function
filename = getattr(member.func_code, 'co_filename', None)
filename = getattr(six.get_function_code(member),
'co_filename', None)
if filename is None:
assert isinstance(member, object)
object_build_methoddescriptor(node, member, name)
......@@ -261,8 +266,6 @@ class InspectBuilder(object):
elif isbuiltin(member):
if (not _io_discrepancy(member) and
self.imported_member(node, member, name)):
#if obj is object:
# print 'skippp', obj, name, member
continue
object_build_methoddescriptor(node, member, name)
elif isclass(member):
......@@ -299,7 +302,7 @@ class InspectBuilder(object):
modname = getattr(member, '__module__', None)
except:
# XXX use logging
print 'unexpected error while building astroid from living object'
print('unexpected error while building astroid from living object')
import traceback
traceback.print_exc()
modname = None
......
This diff is collapsed.
This diff is collapsed.
......@@ -18,6 +18,7 @@
"""this module contains some utilities to navigate in the tree or to
extract information from it
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
......@@ -109,22 +110,22 @@ def _check_children(node):
for child in node.get_children():
ok = False
if child is None:
print "Hm, child of %s is None" % node
print("Hm, child of %s is None" % node)
continue
if not hasattr(child, 'parent'):
print " ERROR: %s has child %s %x with no parent" % (
node, child, id(child))
print(" ERROR: %s has child %s %x with no parent" % (
node, child, id(child)))
elif not child.parent:
print " ERROR: %s has child %s %x with parent %r" % (
node, child, id(child), child.parent)
print(" ERROR: %s has child %s %x with parent %r" % (
node, child, id(child), child.parent))
elif child.parent is not node:
print " ERROR: %s %x has child %s %x with wrong parent %s" % (
node, id(node), child, id(child), child.parent)
print(" ERROR: %s %x has child %s %x with wrong parent %s" % (
node, id(node), child, id(child), child.parent))
else:
ok = True
if not ok:
print "lines;", node.lineno, child.lineno
print "of module", node.root(), node.root().name
print("lines;", node.lineno, child.lineno)
print("of module", node.root(), node.root().name)
raise AstroidBuildingException
_check_children(child)
......
URL: http://www.logilab.org/project/logilab-common
Version: 0.63.1
Version: 0.63.2
License: GPL
License File: LICENSE.txt
......
......@@ -25,7 +25,7 @@ modname = 'common'
subpackage_of = 'logilab'
subpackage_master = True
numversion = (0, 63, 0)
numversion = (0, 63, 2)
version = '.'.join([str(num) for num in numversion])
license = 'LGPL' # 2.1 or later
......@@ -43,6 +43,8 @@ include_dirs = [join('test', 'data')]
install_requires = [
'six >= 1.4.0',
]
test_require = ['pytz']
if sys.version_info < (2, 7):
install_requires.append('unittest2 >= 0.5.1')
if os.name == 'nt':
......
......@@ -33,15 +33,17 @@ Example::
help_do_pionce = ("pionce", "pionce duree", _("met ton corps en veille"))
def do_pionce(self):
print 'nap is good'
print('nap is good')
help_do_ronfle = ("ronfle", "ronfle volume", _("met les autres en veille"))
def do_ronfle(self):
print 'fuuuuuuuuuuuu rhhhhhrhrhrrh'
print('fuuuuuuuuuuuu rhhhhhrhrhrrh')
cl = BookShell()
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
from six.moves import builtins, input
......@@ -66,7 +68,7 @@ def init_readline(complete_method, histfile=None):
import atexit
atexit.register(readline.write_history_file, histfile)
except:
print 'readline is not available :-('
print('readline is not available :-(')
class Completer :
......@@ -157,10 +159,10 @@ class CLIHelper:
return self.commands.keys()
def _print_help(self, cmd, syntax, explanation):
print _('Command %s') % cmd
print _('Syntax: %s') % syntax
print '\t', explanation
print
print(_('Command %s') % cmd)
print(_('Syntax: %s') % syntax)
print('\t', explanation)
print()
# predefined commands #####################################################
......@@ -170,20 +172,20 @@ class CLIHelper:
if command in self._command_help:
self._print_help(*self._command_help[command])
elif command is None or command not in self._topics:
print _("Use help <topic> or help <command>.")
print _("Available topics are:")
print(_("Use help <topic> or help <command>."))
print(_("Available topics are:"))
topics = sorted(self._topics.keys())
for topic in topics:
print '\t', topic
print
print _("Available commands are:")
print('\t', topic)
print()
print(_("Available commands are:"))
commands = self.commands.keys()
commands.sort()
for command in commands:
print '\t', command[len(self.CMD_PREFIX):]
print('\t', command[len(self.CMD_PREFIX):])
else:
print _('Available commands about %s:') % command
print(_('Available commands about %s:') % command)
print
for command_help_method in self._topics[command]:
try:
......@@ -194,8 +196,8 @@ class CLIHelper:
except:
import traceback
traceback.print_exc()
print 'ERROR in help method %s'% (
command_help_method.__name__)
print('ERROR in help method %s'% (
command_help_method.__name__))
help_do_help = ("help", "help [topic|command]",
_("print help message for the given topic/command or \
......
......@@ -409,21 +409,20 @@ def rest_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using as ReST formatted output"""
encoding = _get_encoding(encoding, stream)
if section:
print >> stream, '%s\n%s' % (section, "'"*len(section))
print('%s\n%s' % (section, "'"*len(section)), file=stream)
if doc:
print >> stream, _encode(normalize_text(doc, line_len=79, indent=''),
encoding)
print >> stream
print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream)
print(file=stream)
for optname, optdict, value in options:
help = optdict.get('help')
print >> stream, ':%s:' % optname
print(':%s:' % optname, file=stream)
if help:
help = normalize_text(help, line_len=79, indent=' ')
print >> stream, _encode(help, encoding)
print(_encode(help, encoding), file=stream)
if value:
value = _encode(format_option_value(optdict, value), encoding)
print >> stream, ''
print >> stream, ' Default: ``%s``' % value.replace("`` ", "```` ``")
print(file=stream)
print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
# Options Manager ##############################################################
......
......@@ -51,7 +51,7 @@ def setugid(user):
os.environ['HOME'] = passwd.pw_dir
def daemonize(pidfile=None, uid=None, umask=077):
def daemonize(pidfile=None, uid=None, umask=0o77):
"""daemonize a Unix process. Set paranoid umask by default.
Return 1 in the original process, 2 in the first fork, and None for the
......@@ -71,9 +71,6 @@ def daemonize(pidfile=None, uid=None, umask=077):
return 2
# move to the root to avoit mount pb
os.chdir('/')
# set umask if specified
if umask is not None:
os.umask(umask)
# redirect standard descriptors
null = os.open('/dev/null', os.O_RDWR)
for i in range(3):
......@@ -95,7 +92,9 @@ def daemonize(pidfile=None, uid=None, umask=077):
f = file(pidfile, 'w')
f.write(str(os.getpid()))
f.close()
os.chmod(pidfile, 0644)
# set umask if specified
if umask is not None:
os.umask(umask)
# change process uid
if uid:
setugid(uid)
......
......@@ -314,7 +314,7 @@ def ustrftime(somedate, fmt='%Y-%m-%d'):
def utcdatetime(dt):
if dt.tzinfo is None:
return dt
return datetime(*dt.utctimetuple()[:7])
return (dt.replace(tzinfo=None) - dt.utcoffset())
def utctime(dt):
if dt.tzinfo is None:
......
......@@ -30,6 +30,7 @@ Usage:
http://www.physics.ox.ac.uk/users/santoso/Software.Repository.html
page says code is "available as is without any warranty or support".
"""
from __future__ import print_function
import struct
import os, os.path
......@@ -79,7 +80,7 @@ class Dbase:
def open(self, db_name):
filesize = os.path.getsize(db_name)
if filesize <= 68:
raise IOError, 'The file is not large enough to be a dbf file'
raise IOError('The file is not large enough to be a dbf file')
self.fdb = open(db_name, 'rb')
......@@ -152,7 +153,7 @@ class Dbase:
This function accept record number from 0 to N-1
"""
if rec_no < 0 or rec_no > self.num_records:
raise Exception, 'Unable to extract data outside the range'
raise Exception('Unable to extract data outside the range')
offset = self.header['Record Size'] * rec_no
data = self.db_data[offset:offset+self.row_len]
......@@ -227,4 +228,4 @@ def readDbf(filename):
if __name__=='__main__':
rec = readDbf('dbf/sptable.dbf')
for line in rec:
print '%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip())
print('%s %s' % (line['GENUS'].strip(), line['SPECIES'].strip()))
......@@ -125,11 +125,12 @@ class DeprecationManager(object):
return self.class_deprecated(version)(old_name, (new_class,), clsdict)
except (NameError, TypeError):
# old-style class
warn = self.warn
class DeprecatedClass(new_class):
"""FIXME: There might be a better way to handle old/new-style class
"""
def __init__(self, *args, **kwargs):
self.warn(version, message, stacklevel=3)
warn(version, message, stacklevel=3)
new_class.__init__(self, *args, **kwargs)
return DeprecatedClass
......
......@@ -29,6 +29,8 @@ Example:
With mymod.build that defines two functions run and add_options
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
from warnings import warn
......@@ -55,9 +57,9 @@ class OptionParser(optparse.OptionParser):
def print_main_help(self):
optparse.OptionParser.print_help(self)
print '\ncommands:'
print('\ncommands:')
for cmdname, (_, help) in self._commands.items():
print '% 10s - %s' % (cmdname, help)
print('% 10s - %s' % (cmdname, help))
def parse_command(self, args):
if len(args) == 0:
......@@ -78,7 +80,7 @@ class OptionParser(optparse.OptionParser):
# optparse inserts self.description between usage and options help
self.description = help
if isinstance(mod_or_f, str):
exec 'from %s import run, add_options' % mod_or_f
exec('from %s import run, add_options' % mod_or_f)
else:
run, add_options = mod_or_f
add_options(self)
......
......@@ -119,12 +119,14 @@ from time import time, clock
import warnings
import types
from inspect import isgeneratorfunction, isclass
from contextlib import contextmanager
from logilab.common.fileutils import abspath_listdir
from logilab.common import textutils
from logilab.common import testlib, STD_BLACKLIST
# use the same unittest module as testlib
from logilab.common.testlib import unittest, start_interactive_mode
from logilab.common.deprecation import deprecated
import doctest
import unittest as unittest_legacy
......@@ -145,28 +147,41 @@ except ImportError:
CONF_FILE = 'pytestconf.py'
## coverage hacks, do not read this, do not read this, do not read this
## coverage pausing tools
@contextmanager
def replace_trace(trace=None):
"""A context manager that temporary replaces the trace function"""
oldtrace = sys.gettrace()
sys.settrace(trace)
try:
yield
finally:
# specific hack to work around a bug in pycoverage, see
# https://bitbucket.org/ned/coveragepy/issue/123
if (oldtrace is not None and not callable(oldtrace) and
hasattr(oldtrace, 'pytrace')):
oldtrace = oldtrace.pytrace
sys.settrace(oldtrace)
def pause_trace():
"""A context manager that temporary pauses any tracing"""
return replace_trace()
# hey, but this is an aspect, right ?!!!
class TraceController(object):
nesting = 0
ctx_stack = []
@classmethod
@deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
def pause_tracing(cls):
if not cls.nesting:
cls.tracefunc = staticmethod(getattr(sys, '__settrace__', sys.settrace))
cls.oldtracer = getattr(sys, '__tracer__', None)
sys.__notrace__ = True
cls.tracefunc(None)
cls.nesting += 1
pause_tracing = classmethod(pause_tracing)
cls.ctx_stack.append(pause_trace())
cls.ctx_stack[-1].__enter__()
@classmethod
@deprecated('[lgc 0.63.1] Use the pause_trace() context manager')
def resume_tracing(cls):
cls.nesting -= 1
assert cls.nesting >= 0
if not cls.nesting:
cls.tracefunc(cls.oldtracer)
delattr(sys, '__notrace__')
resume_tracing = classmethod(resume_tracing)
cls.ctx_stack.pop().__exit__(None, None, None)
pause_tracing = TraceController.pause_tracing
......@@ -174,20 +189,18 @@ resume_tracing = TraceController.resume_tracing
def nocoverage(func):
"""Function decorator that pauses tracing functions"""
if hasattr(func, 'uncovered'):
return func
func.uncovered = True
def not_covered(*args, **kwargs):
pause_tracing()
try:
with pause_trace():
return func(*args, **kwargs)
finally:
resume_tracing()
not_covered.uncovered = True
return not_covered
## end of coverage hacks
## end of coverage pausing tools
TESTFILE_RE = re.compile("^((unit)?test.*|smoketest)\.py$")
......@@ -1082,8 +1095,14 @@ class NonStrictTestLoader(unittest.TestLoader):
testCaseClass)
return [testname for testname in testnames if not is_skipped(testname)]
# The 2 functions below are modified versions of the TestSuite.run method
# that is provided with unittest2 for python 2.6, in unittest2/suite.py
# It is used to monkeypatch the original implementation to support
# extra runcondition and options arguments (see in testlib.py)
def _ts_run(self, result, runcondition=None, options=None):
self._wrapped_run(result,runcondition=runcondition, options=options)
self._wrapped_run(result, runcondition=runcondition, options=options)
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
return result
......@@ -1097,10 +1116,17 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None):
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
# --- modifications to deal with _wrapped_run ---
# original code is:
#
# if not debug:
# test(result)
# else:
# test.debug()
if hasattr(test, '_wrapped_run'):
try:
test._wrapped_run(result, debug, runcondition=runcondition, options=options)
......@@ -1113,6 +1139,25 @@ def _ts_wrapped_run(self, result, debug=False, runcondition=None, options=None):
test(result)
else:
test.debug()
# --- end of modifications to deal with _wrapped_run ---
return result
if sys.version_info >= (2, 7):
# The function below implements a modified version of the
# TestSuite.run method that is provided with python 2.7, in
# unittest/suite.py
def _ts_run(self, result, debug=False, runcondition=None, options=None):
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
self._wrapped_run(result, debug, runcondition, options)
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
def enable_dbc(*args):
......
......@@ -42,13 +42,13 @@ def layout_title(layout):
"""
for child in layout.children:
if isinstance(child, Title):
return ' '.join([node.data for node in get_nodes(child, Text)])
return u' '.join([node.data for node in get_nodes(child, Text)])
def build_summary(layout, level=1):
"""make a summary for the report, including X level"""
assert level > 0
level -= 1
summary = List(klass='summary')
summary = List(klass=u'summary')
for child in layout.children:
if not isinstance(child, Section):
continue
......@@ -57,7 +57,7 @@ def build_summary(layout, level=1):
continue
if not child.id:
child.id = label.replace(' ', '-')
node = Link('#'+child.id, label=label or child.id)
node = Link(u'#'+child.id, label=label or child.id)
# FIXME: Three following lines produce not very compliant
# docbook: there are some useless <para><para>. They might be
# replaced by the three commented lines but this then produces
......@@ -99,7 +99,7 @@ class BaseWriter(object):
for child in getattr(layout, 'children', ()):
child.accept(self)
def writeln(self, string=''):
def writeln(self, string=u''):
"""write a line in the output buffer"""
self.write(string + linesep)
......@@ -132,7 +132,7 @@ class BaseWriter(object):
result[-1].append(cell)
# fill missing cells
while len(result[-1]) < cols:
result[-1].append('')
result[-1].append(u'')
return result
def compute_content(self, layout):
......@@ -147,7 +147,7 @@ class BaseWriter(object):
stream.write(data)
except UnicodeEncodeError:
stream.write(data.encode(self.encoding))
def writeln(data=''):
def writeln(data=u''):
try:
stream.write(data+linesep)
except UnicodeEncodeError:
......
......@@ -27,8 +27,8 @@ from logilab.common.textutils import linesep
from logilab.common.ureports import BaseWriter
TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^']
BULLETS = ['*', '-']
TITLE_UNDERLINES = [u'', u'=', u'-', u'`', u'.', u'~', u'^']
BULLETS = [u'*', u'-']
class TextWriter(BaseWriter):
"""format layouts as text
......@@ -48,13 +48,13 @@ class TextWriter(BaseWriter):
if self.pending_urls:
self.writeln()
for label, url in self.pending_urls:
self.writeln('.. _`%s`: %s' % (label, url))
self.writeln(u'.. _`%s`: %s' % (label, url))
self.pending_urls = []
self.section -= 1
self.writeln()
def visit_title(self, layout):
title = ''.join(list(self.compute_content(layout)))
title = u''.join(list(self.compute_content(layout)))
self.writeln(title)
try:
self.writeln(TITLE_UNDERLINES[self.section] * len(title))
......@@ -88,19 +88,19 @@ class TextWriter(BaseWriter):
def default_table(self, layout, table_content, cols_width):
"""format a table"""
cols_width = [size+1 for size in cols_width]
format_strings = ' '.join(['%%-%ss'] * len(cols_width))
format_strings = u' '.join([u'%%-%ss'] * len(cols_width))
format_strings = format_strings % tuple(cols_width)
format_strings = format_strings.split(' ')
table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n'
headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n'
table_linesep = u'\n+' + u'+'.join([u'-'*w for w in cols_width]) + u'+\n'
headsep = u'\n+' + u'+'.join([u'='*w for w in cols_width]) + u'+\n'
# FIXME: layout.cheaders
self.write(table_linesep)
for i in range(len(table_content)):
self.write('|')
self.write(u'|')
line = table_content[i]
for j in range(len(line)):
self.write(format_strings[j] % line[j])
self.write('|')
self.write(u'|')
if i == 0 and layout.rheaders:
self.write(headsep)
else:
......@@ -109,7 +109,7 @@ class TextWriter(BaseWriter):
def field_table(self, layout, table_content, cols_width):
"""special case for field table"""
assert layout.cols == 2
format_string = '%s%%-%ss: %%s' % (linesep, cols_width[0])
format_string = u'%s%%-%ss: %%s' % (linesep, cols_width[0])
for field, value in table_content:
self.write(format_string % (field, value))
......@@ -120,14 +120,14 @@ class TextWriter(BaseWriter):
indent = ' ' * self.list_level
self.list_level += 1
for child in layout.children:
self.write('%s%s%s ' % (linesep, indent, bullet))
self.write(u'%s%s%s ' % (linesep, indent, bullet))
child.accept(self)
self.list_level -= 1
def visit_link(self, layout):
"""add a hyperlink"""
if layout.label != layout.url:
self.write('`%s`_' % layout.label)
self.write(u'`%s`_' % layout.label)
self.pending_urls.append( (layout.label, layout.url) )
else:
self.write(layout.url)
......@@ -135,11 +135,11 @@ class TextWriter(BaseWriter):
def visit_verbatimtext(self, layout):
"""display a verbatim layout as text (so difficult ;)
"""
self.writeln('::\n')
self.writeln(u'::\n')
for line in layout.data.splitlines():
self.writeln(' ' + line)
self.writeln(u' ' + line)
self.writeln()
def visit_text(self, layout):
"""add some text"""
self.write(layout.data)
self.write(u'%s' % layout.data)
from __future__ import print_function
import logging
import urllib2
......@@ -84,4 +86,4 @@ if __name__ == '__main__':
# test with url sys.argv[1]
h = HTTPGssapiAuthHandler()
response = urllib2.build_opener(h, ch).open(sys.argv[1])
print '\nresponse: %s\n--------------\n' % response.code, response.info()
print('\nresponse: %s\n--------------\n' % response.code, response.info())
URL: http://www.pylint.org/
Version: 1.3.1
Version: 1.4.0
License: GPL
License File: LICENSE.txt
......@@ -7,4 +7,4 @@ Description:
This directory contains the pylint module.
Local Modifications:
None
- applied upstream fix https://bitbucket.org/logilab/pylint/commits/5df347467ee0
......@@ -15,6 +15,8 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
from .__pkginfo__ import version as __version__
def run_pylint():
"""run pylint"""
from pylint.lint import Run
......
......@@ -15,18 +15,14 @@
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""pylint packaging information"""
import sys
from __future__ import absolute_import
modname = distname = 'pylint'
numversion = (1, 3, 1)
numversion = (1, 4, 0)
version = '.'.join([str(num) for num in numversion])
if sys.version_info < (2, 6):
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.2.1',
'StringFormat']
else:
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.2.1']
install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.3.2', 'six']
license = 'GPL'
description = "python code static checker"
......
......@@ -30,7 +30,9 @@ Base id of standard checkers (used in msg and report ids):
12: logging
13: string_format
14: string_constant
15-50: not yet used: reserved for future internal checkers.
15: stdlib
16: python3
17-50: not yet used: reserved for future internal checkers.
51-99: perhaps used: reserved for external checkers
The raw_metrics checker has no number associated since it doesn't emit any
......@@ -46,6 +48,8 @@ from logilab.common.configuration import OptionsProviderMixIn
from pylint.reporters import diff_string
from pylint.utils import register_plugins
from pylint.interfaces import UNDEFINED
def table_lines_from_stats(stats, old_stats, columns):
"""get values listed in <columns> from <stats> and <old_stats>,
......@@ -55,7 +59,7 @@ def table_lines_from_stats(stats, old_stats, columns):
lines = []
for m_type in columns:
new = stats[m_type]
format = str
format = str # pylint: disable=redefined-builtin
if isinstance(new, float):
format = lambda num: '%.3f' % num
old = old_stats.get(m_type)
......@@ -80,6 +84,8 @@ class BaseChecker(OptionsProviderMixIn):
msgs = {}
# reports issued by this checker
reports = ()
# mark this checker as enabled or not.
enabled = True
def __init__(self, linter=None):
"""checker instances should have the linter as argument
......@@ -90,9 +96,9 @@ class BaseChecker(OptionsProviderMixIn):
OptionsProviderMixIn.__init__(self)
self.linter = linter
def add_message(self, msg_id, line=None, node=None, args=None):
def add_message(self, msg_id, line=None, node=None, args=None, confidence=UNDEFINED):
"""add a message of a given type"""
self.linter.add_message(msg_id, line, node, args)
self.linter.add_message(msg_id, line, node, args, confidence)
# dummy methods implementing the IChecker interface
......@@ -103,31 +109,6 @@ class BaseChecker(OptionsProviderMixIn):
"""called after visiting project (i.e set of modules)"""
class BaseRawChecker(BaseChecker):
"""base class for raw checkers"""
def process_module(self, node):
"""process a module
the module's content is accessible via the stream object
stream must implement the readline method
"""
warnings.warn("Modules that need access to the tokens should "
"use the ITokenChecker interface.",
DeprecationWarning)
stream = node.file_stream
stream.seek(0) # XXX may be removed with astroid > 0.23
if sys.version_info <= (3, 0):
self.process_tokens(tokenize.generate_tokens(stream.readline))
else:
self.process_tokens(tokenize.tokenize(stream.readline))
def process_tokens(self, tokens):
"""should be overridden by subclasses"""
raise NotImplementedError()
class BaseTokenChecker(BaseChecker):
"""Base class for checkers that want to have access to the token stream."""
......
This diff is collapsed.
This diff is collapsed.
......@@ -15,14 +15,15 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""check for signs of poor design"""
import re
from collections import defaultdict
from astroid import Function, If, InferenceError
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
import re
# regexp for ignored argument name
IGNORED_ARGUMENT_NAMES = re.compile('_.*')
......@@ -174,7 +175,7 @@ class MisdesignChecker(BaseChecker):
"""initialize visit variables"""
self.stats = self.linter.add_stats()
self._returns = []
self._branches = []
self._branches = defaultdict(int)
self._used_abstracts = {}
self._used_ifaces = {}
self._abstracts = []
......@@ -200,7 +201,6 @@ class MisdesignChecker(BaseChecker):
def visit_class(self, node):
"""check size of inheritance hierarchy and number of instance attributes
"""
self._inc_branch()
# Is the total inheritance hierarchy is 7 or less?
nb_parents = len(list(node.ancestors()))
if nb_parents > self.config.max_parents:
......@@ -241,12 +241,9 @@ class MisdesignChecker(BaseChecker):
def leave_class(self, node):
"""check number of public methods"""
nb_public_methods = 0
special_methods = set()
for method in node.methods():
for method in node.mymethods():
if not method.name.startswith('_'):
nb_public_methods += 1
if method.name.startswith("__"):
special_methods.add(method.name)
# Does the class contain less than 20 public methods ?
if nb_public_methods > self.config.max_public_methods:
self.add_message('too-many-public-methods', node=node,
......@@ -257,20 +254,19 @@ class MisdesignChecker(BaseChecker):
return
# Does the class contain more than 5 public methods ?
if nb_public_methods < self.config.min_public_methods:
self.add_message('R0903', node=node,
self.add_message('too-few-public-methods', node=node,
args=(nb_public_methods,
self.config.min_public_methods))
@check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals', 'too-many-statements')
'too-many-arguments', 'too-many-locals',
'too-many-statements')
def visit_function(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
"""
self._inc_branch()
# init branch and returns counters
self._returns.append(0)
self._branches.append(0)
# check number of arguments
args = node.args.args
if args is not None:
......@@ -291,7 +287,9 @@ class MisdesignChecker(BaseChecker):
# init statements counter
self._stmts = 1
@check_messages('too-many-return-statements', 'too-many-branches', 'too-many-arguments', 'too-many-locals', 'too-many-statements')
@check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals',
'too-many-statements')
def leave_function(self, node):
"""most of the work is done here on close:
checks for max returns, branch, return in __init__
......@@ -300,7 +298,7 @@ class MisdesignChecker(BaseChecker):
if returns > self.config.max_returns:
self.add_message('too-many-return-statements', node=node,
args=(returns, self.config.max_returns))
branches = self._branches.pop()
branches = self._branches[node]
if branches > self.config.max_branches:
self.add_message('too-many-branches', node=node,
args=(branches, self.config.max_branches))
......@@ -327,12 +325,12 @@ class MisdesignChecker(BaseChecker):
branches = len(node.handlers)
if node.orelse:
branches += 1
self._inc_branch(branches)
self._inc_branch(node, branches)
self._stmts += branches
def visit_tryfinally(self, _):
def visit_tryfinally(self, node):
"""increments the branches counter"""
self._inc_branch(2)
self._inc_branch(node, 2)
self._stmts += 2
def visit_if(self, node):
......@@ -342,7 +340,7 @@ class MisdesignChecker(BaseChecker):
if node.orelse and (len(node.orelse) > 1 or
not isinstance(node.orelse[0], If)):
branches += 1
self._inc_branch(branches)
self._inc_branch(node, branches)
self._stmts += branches
def visit_while(self, node):
......@@ -350,15 +348,13 @@ class MisdesignChecker(BaseChecker):
branches = 1
if node.orelse:
branches += 1
self._inc_branch(branches)
self._inc_branch(node, branches)
visit_for = visit_while
def _inc_branch(self, branchesnum=1):
def _inc_branch(self, node, branchesnum=1):
"""increments the branches counter"""
branches = self._branches
for i in xrange(len(branches)):
branches[i] += branchesnum
self._branches[node.scope()] += branchesnum
# FIXME: make a nice report...
......
This diff is collapsed.
This diff is collapsed.
......@@ -16,6 +16,10 @@
"""imports checkers for Python code"""
import sys
from collections import defaultdict
import six
from six.moves import map # pylint: disable=redefined-builtin
from logilab.common.graph import get_cycles, DotBackend
from logilab.common.ureports import VerbatimText, Paragraph
......@@ -27,8 +31,16 @@ from astroid.modutils import get_module_part, is_standard_module
from pylint.interfaces import IAstroidChecker
from pylint.utils import EmptyReport
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
from pylint.checkers.utils import check_messages, is_import_error
def _except_import_error(node):
"""
Check if the try-except node has an ImportError handler.
Return True if an ImportError handler was infered, False otherwise.
"""
if not isinstance(node, astroid.TryExcept):
return
return any(map(is_import_error, node.handlers))
def get_first_import(node, context, name, base, level):
"""return the node where [base.]<name> is imported or None if not found
......@@ -98,14 +110,14 @@ def dependencies_graph(filename, dep_info):
done = {}
printer = DotBackend(filename[:-4], rankdir='LR')
printer.emit('URL="." node[shape="box"]')
for modname, dependencies in sorted(dep_info.iteritems()):
for modname, dependencies in sorted(six.iteritems(dep_info)):
done[modname] = 1
printer.emit_node(modname)
for modname in dependencies:
if modname not in done:
done[modname] = 1
printer.emit_node(modname)
for depmodname, dependencies in sorted(dep_info.iteritems()):
for depmodname, dependencies in sorted(six.iteritems(dep_info)):
for modname in dependencies:
printer.emit_edge(modname, depmodname)
printer.generate(filename)
......@@ -220,20 +232,21 @@ given file (report RP0402 must not be disabled)'}
self.linter.add_stats(dependencies={})
self.linter.add_stats(cycles=[])
self.stats = self.linter.stats
self.import_graph = {}
self.import_graph = defaultdict(set)
def close(self):
"""called before visiting project (i.e set of modules)"""
# don't try to compute cycles if the associated message is disabled
if self.linter.is_message_enabled('cyclic-import'):
for cycle in get_cycles(self.import_graph):
vertices = list(self.import_graph)
for cycle in get_cycles(self.import_graph, vertices=vertices):
self.add_message('cyclic-import', args=' -> '.join(cycle))
def visit_import(self, node):
"""triggered when an import statement is seen"""
modnode = node.root()
for name, _ in node.names:
importedmodnode = self.get_imported_module(modnode, node, name)
importedmodnode = self.get_imported_module(node, name)
if importedmodnode is None:
continue
self._check_relative_import(modnode, node, importedmodnode, name)
......@@ -260,7 +273,7 @@ given file (report RP0402 must not be disabled)'}
if name == '*':
self.add_message('wildcard-import', args=basename, node=node)
modnode = node.root()
importedmodnode = self.get_imported_module(modnode, node, basename)
importedmodnode = self.get_imported_module(node, basename)
if importedmodnode is None:
return
self._check_relative_import(modnode, node, importedmodnode, basename)
......@@ -270,15 +283,16 @@ given file (report RP0402 must not be disabled)'}
self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
self._check_reimport(node, name, basename, node.level)
def get_imported_module(self, modnode, importnode, modname):
def get_imported_module(self, importnode, modname):
try:
return importnode.do_import_module(modname)
except astroid.InferenceError, ex:
except astroid.InferenceError as ex:
if str(ex) != modname:
args = '%r (%s)' % (modname, ex)
else:
args = repr(modname)
self.add_message("import-error", args=args, node=importnode)
if not _except_import_error(importnode.parent):
self.add_message("import-error", args=args, node=importnode)
def _check_relative_import(self, modnode, importnode, importedmodnode,
importedasname):
......@@ -295,7 +309,8 @@ given file (report RP0402 must not be disabled)'}
return False
if importedmodnode.name != importedasname:
# this must be a relative import...
self.add_message('relative-import', args=(importedasname, importedmodnode.name),
self.add_message('relative-import',
args=(importedasname, importedmodnode.name),
node=importnode)
def _add_imported_module(self, node, importedmodname):
......@@ -315,8 +330,8 @@ given file (report RP0402 must not be disabled)'}
if not context_name in importedmodnames:
importedmodnames.add(context_name)
# update import graph
mgraph = self.import_graph.setdefault(context_name, set())
if not importedmodname in mgraph:
mgraph = self.import_graph[context_name]
if importedmodname not in mgraph:
mgraph.add(importedmodname)
def _check_deprecated_module(self, node, mod_path):
......@@ -343,7 +358,7 @@ given file (report RP0402 must not be disabled)'}
def report_external_dependencies(self, sect, _, dummy):
"""return a verbatim layout for displaying dependencies"""
dep_info = make_tree_defs(self._external_dependencies_info().iteritems())
dep_info = make_tree_defs(six.iteritems(self._external_dependencies_info()))
if not dep_info:
raise EmptyReport()
tree_str = repr_tree_defs(dep_info)
......@@ -375,7 +390,7 @@ given file (report RP0402 must not be disabled)'}
if self.__ext_dep_info is None:
package = self.linter.current_name
self.__ext_dep_info = result = {}
for importee, importers in self.stats['dependencies'].iteritems():
for importee, importers in six.iteritems(self.stats['dependencies']):
if not importee.startswith(package):
result[importee] = importers
return self.__ext_dep_info
......@@ -387,7 +402,7 @@ given file (report RP0402 must not be disabled)'}
if self.__int_dep_info is None:
package = self.linter.current_name
self.__int_dep_info = result = {}
for importee, importers in self.stats['dependencies'].iteritems():
for importee, importers in six.iteritems(self.stats['dependencies']):
if importee.startswith(package):
result[importee] = importers
return self.__int_dep_info
......
This diff is collapsed.
......@@ -21,6 +21,7 @@ import re
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker
import six
MSGS = {
......@@ -72,8 +73,8 @@ class EncodingChecker(BaseChecker):
def _check_encoding(self, lineno, line, file_encoding):
try:
return unicode(line, file_encoding)
except UnicodeDecodeError, ex:
return six.text_type(line, file_encoding)
except UnicodeDecodeError as ex:
self.add_message('invalid-encoded-data', line=lineno,
args=(file_encoding, ex.args[2]))
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -17,6 +17,7 @@
* pylint.d (PYLINTHOME)
"""
from __future__ import with_statement
from __future__ import print_function
import pickle
import os
......@@ -52,7 +53,7 @@ def load_results(base):
try:
with open(data_file, _PICK_LOAD) as stream:
return pickle.load(stream)
except:
except Exception: # pylint: disable=broad-except
return {}
if sys.version_info < (3, 0):
......@@ -66,13 +67,13 @@ def save_results(results, base):
try:
os.mkdir(PYLINT_HOME)
except OSError:
print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME
print('Unable to create directory %s' % PYLINT_HOME, file=sys.stderr)
data_file = get_pdata_path(base, 1)
try:
with open(data_file, _PICK_DUMP) as stream:
pickle.dump(results, stream)
except (IOError, OSError), ex:
print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex)
except (IOError, OSError) as ex:
print('Unable to create file %s: %s' % (data_file, ex), file=sys.stderr)
# location of the configuration file ##########################################
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment