diff -Nru astroid-1.4.9/astroid/arguments.py astroid-1.5.3/astroid/arguments.py --- astroid-1.4.9/astroid/arguments.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/arguments.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,20 +1,11 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + + +import six from astroid import bases from astroid import context as contextmod @@ -22,7 +13,6 @@ from astroid import nodes from astroid import util -import six class CallSite(object): @@ -44,11 +34,11 @@ self.positional_arguments = [ arg for arg in self._unpacked_args - if arg is not util.YES + if arg is not util.Uninferable ] self.keyword_arguments = { key: value for key, value in self._unpacked_kwargs.items() - if value is not util.YES + if value is not util.Uninferable } @classmethod @@ -87,29 +77,29 @@ try: inferred = next(value.infer(context=context)) except exceptions.InferenceError: - values[name] = util.YES + values[name] = util.Uninferable continue if not isinstance(inferred, nodes.Dict): # Not something we can work with. - values[name] = util.YES + values[name] = util.Uninferable continue for dict_key, dict_value in inferred.items: try: dict_key = next(dict_key.infer(context=context)) except exceptions.InferenceError: - values[name] = util.YES + values[name] = util.Uninferable continue if not isinstance(dict_key, nodes.Const): - values[name] = util.YES + values[name] = util.Uninferable continue if not isinstance(dict_key.value, six.string_types): - values[name] = util.YES + values[name] = util.Uninferable continue if dict_key.value in values: # The name is already in the dictionary - values[dict_key.value] = util.YES + values[dict_key.value] = util.Uninferable self.duplicated_keywords.add(dict_key.value) continue values[dict_key.value] = dict_value @@ -126,14 +116,14 @@ try: inferred = next(arg.value.infer(context=context)) except exceptions.InferenceError: - values.append(util.YES) + values.append(util.Uninferable) continue - if inferred is util.YES: - values.append(util.YES) + if inferred is util.Uninferable: + values.append(util.Uninferable) continue if not hasattr(inferred, 'elts'): - values.append(util.YES) + values.append(util.Uninferable) continue values.extend(inferred.elts) else: @@ -141,9 +131,18 @@ return values def infer_argument(self, funcnode, name, context): - """infer a function argument value according to the call context""" + """infer a function argument value according to the call context + + Arguments: + funcnode: The function being called. + name: The name of the argument whose value is being inferred. + context: TODO + """ if name in self.duplicated_keywords: - raise exceptions.InferenceError(name) + raise exceptions.InferenceError('The arguments passed to {func!r} ' + ' have duplicate keywords.', + call_site=self, func=funcnode, + arg=name, context=context) # Look into the keywords first, maybe it's already there. try: @@ -154,7 +153,11 @@ # Too many arguments given and no variable arguments. if len(self.positional_arguments) > len(funcnode.args.args): if not funcnode.args.vararg: - raise exceptions.InferenceError(name) + raise exceptions.InferenceError('Too many positional arguments ' + 'passed to {func!r} that does ' + 'not have *args.', + call_site=self, func=funcnode, + arg=name, context=context) positional = self.positional_arguments[:len(funcnode.args.args)] vararg = self.positional_arguments[len(funcnode.args.args):] @@ -183,6 +186,16 @@ else: # XXX can do better ? boundnode = funcnode.parent.frame() + + if isinstance(boundnode, nodes.ClassDef): + # Verify that we're accessing a method + # of the metaclass through a class, as in + # `cls.metaclass_method`. In this case, the + # first argument is always the class. + method_scope = funcnode.parent.scope() + if method_scope is boundnode.metaclass(): + return iter((boundnode, )) + if funcnode.type == 'method': if not isinstance(boundnode, bases.Instance): boundnode = bases.Instance(boundnode) @@ -204,25 +217,34 @@ # It wants all the keywords that were passed into # the call site. if self.has_invalid_keywords(): - raise exceptions.InferenceError - kwarg = nodes.Dict() - kwarg.lineno = funcnode.args.lineno - kwarg.col_offset = funcnode.args.col_offset - kwarg.parent = funcnode.args - items = [(nodes.const_factory(key), value) - for key, value in kwargs.items()] - kwarg.items = items + raise exceptions.InferenceError( + "Inference failed to find values for all keyword arguments " + "to {func!r}: {unpacked_kwargs!r} doesn't correspond to " + "{keyword_arguments!r}.", + keyword_arguments=self.keyword_arguments, + unpacked_kwargs=self._unpacked_kwargs, + call_site=self, func=funcnode, arg=name, context=context) + kwarg = nodes.Dict(lineno=funcnode.args.lineno, + col_offset=funcnode.args.col_offset, + parent=funcnode.args) + kwarg.postinit([(nodes.const_factory(key), value) + for key, value in kwargs.items()]) return iter((kwarg, )) elif funcnode.args.vararg == name: # It wants all the args that were passed into # the call site. if self.has_invalid_arguments(): - raise exceptions.InferenceError - args = nodes.Tuple() - args.lineno = funcnode.args.lineno - args.col_offset = funcnode.args.col_offset - args.parent = funcnode.args - args.elts = vararg + raise exceptions.InferenceError( + "Inference failed to find values for all positional " + "arguments to {func!r}: {unpacked_args!r} doesn't " + "correspond to {positional_arguments!r}.", + positional_arguments=self.positional_arguments, + unpacked_args=self._unpacked_args, + call_site=self, func=funcnode, arg=name, context=context) + args = nodes.Tuple(lineno=funcnode.args.lineno, + col_offset=funcnode.args.col_offset, + parent=funcnode.args) + args.postinit(vararg) return iter((args, )) # Check if it's a default parameter. @@ -230,4 +252,6 @@ return funcnode.args.default_value(name).infer(context) except exceptions.NoDefault: pass - raise exceptions.InferenceError(name) + raise exceptions.InferenceError('No value found for argument {name} to ' + '{func!r}', call_site=self, + func=funcnode, arg=name, context=context) diff -Nru astroid-1.4.9/astroid/as_string.py astroid-1.5.3/astroid/as_string.py --- astroid-1.4.9/astroid/as_string.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/as_string.py 2017-03-11 13:48:20.000000000 +0000 @@ -1,23 +1,14 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2013-2016 Claudiu Popa +# Copyright (c) 2013-2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """This module renders Astroid nodes as string: -* :func:`to_code` function return equivalent (hopefuly valid) python string +* :func:`to_code` function return equivalent (hopefully valid) python string * :func:`dump` function return an internal representation of nodes found in the tree, useful for debugging or understanding the tree structure @@ -26,54 +17,16 @@ import six -INDENT = ' ' # 4 spaces ; keep indentation variable - -def dump(node, ids=False): - """print a nice astroid tree representation. - - :param ids: if true, we also print the ids (usefull for debugging) - """ - result = [] - _repr_tree(node, result, ids=ids) - return "\n".join(result) - -def _repr_tree(node, result, indent='', _done=None, ids=False): - """built a tree representation of a node as a list of lines""" - if _done is None: - _done = set() - if not hasattr(node, '_astroid_fields'): # not a astroid node - return - if node in _done: - result.append(indent + 'loop in tree: %s' % node) - return - _done.add(node) - node_str = str(node) - if ids: - node_str += ' . \t%x' % id(node) - result.append(indent + node_str) - indent += INDENT - for field in node._astroid_fields: - value = getattr(node, field) - if isinstance(value, (list, tuple)): - result.append(indent + field + " = [") - for child in value: - if isinstance(child, (list, tuple)): - # special case for Dict # FIXME - _repr_tree(child[0], result, indent, _done, ids) - _repr_tree(child[1], result, indent, _done, ids) - result.append(indent + ',') - else: - _repr_tree(child, result, indent, _done, ids) - result.append(indent + "]") - else: - result.append(indent + field + " = ") - _repr_tree(value, result, indent, _done, ids) +# pylint: disable=unused-argument class AsStringVisitor(object): """Visitor to render an Astroid node as a valid python code string""" + def __init__(self, indent): + self.indent = indent + def __call__(self, node): """Makes this visitor behave as a simple function""" return node.accept(self) @@ -81,7 +34,7 @@ def _stmt_list(self, stmts): """return a list of nodes to string""" stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr]) - return INDENT + stmts.replace('\n', '\n'+INDENT) + return self.indent + stmts.replace('\n', '\n'+self.indent) ## visit_ methods ########################################### @@ -114,6 +67,15 @@ """return an astroid.AugAssign node as string""" return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self)) + def visit_annassign(self, node): + """Return an astroid.AugAssign node as string""" + + target = node.target.accept(self) + annotation = node.annotation.accept(self) + if node.value is None: + return '%s: %s' % (target, annotation) + return '%s: %s = %s' % (target, annotation, node.value.accept(self)) + def visit_repr(self, node): """return an astroid.Repr node as string""" return '`%s`' % node.value.accept(self) @@ -145,10 +107,10 @@ def visit_classdef(self, node): """return an astroid.ClassDef node as string""" - decorate = node.decorators and node.decorators.accept(self) or '' + decorate = node.decorators.accept(self) if node.decorators else '' bases = ', '.join([n.accept(self) for n in node.bases]) if sys.version_info[0] == 2: - bases = bases and '(%s)' % bases or '' + bases = '(%s)' % bases if bases else '' else: metaclass = node.metaclass() if metaclass and not node.has_metaclass_hack(): @@ -157,8 +119,8 @@ else: bases = '(metaclass=%s)' % metaclass.name else: - bases = bases and '(%s)' % bases or '' - docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' + bases = '(%s)' % bases if bases else '' + docs = '\n%s"""%s"""' % (self.indent, node.doc) if node.doc else '' return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs, self._stmt_list(node.body)) @@ -205,13 +167,13 @@ def _visit_dict(self, node): for key, value in node.items: - key = key.accept(self) - value = value.accept(self) - if key == '**': - # It can only be a DictUnpack node. - yield key + value - else: - yield '%s: %s' % (key, value) + key = key.accept(self) + value = value.accept(self) + if key == '**': + # It can only be a DictUnpack node. + yield key + value + else: + yield '%s: %s' % (key, value) def visit_dictunpack(self, node): return '**' @@ -279,8 +241,8 @@ def visit_functiondef(self, node): """return an astroid.Function node as string""" - decorate = node.decorators and node.decorators.accept(self) or '' - docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or '' + decorate = node.decorators.accept(self) if node.decorators else '' + docs = '\n%s"""%s"""' % (self.indent, node.doc) if node.doc else '' return_annotation = '' if six.PY3 and node.returns: return_annotation = '->' + node.returns.as_string() @@ -345,7 +307,7 @@ def visit_module(self, node): """return an astroid.Module node as string""" - docs = node.doc and '"""%s"""\n\n' % node.doc or '' + docs = '"""%s"""\n\n' % node.doc if node.doc else '' return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n' def visit_name(self, node): @@ -382,8 +344,8 @@ """return an astroid.Return node as string""" if node.value: return 'return %s' % node.value.accept(self) - else: - return 'return' + + return 'return' def visit_index(self, node): """return a astroid.Index node as string""" @@ -400,9 +362,9 @@ def visit_slice(self, node): """return a astroid.Slice node as string""" - lower = node.lower and node.lower.accept(self) or '' - upper = node.upper and node.upper.accept(self) or '' - step = node.step and node.step.accept(self) or '' + lower = node.lower.accept(self) if node.lower else '' + upper = node.upper.accept(self) if node.upper else'' + step = node.step.accept(self) if node.step else '' if step: return '%s:%s:%s' % (lower, upper, step) return '%s:%s' % (lower, upper) @@ -456,18 +418,17 @@ def visit_yield(self, node): """yield an ast.Yield node as string""" - yi_val = node.value and (" " + node.value.accept(self)) or "" + yi_val = (" " + node.value.accept(self)) if node.value else "" expr = 'yield' + yi_val if node.parent.is_statement: return expr - else: - return "(%s)" % (expr,) + + return "(%s)" % (expr,) def visit_starred(self, node): """return Starred node as string""" return "*" + node.value.accept(self) - # These aren't for real AST nodes, but for inference objects. def visit_frozenset(self, node): @@ -476,12 +437,12 @@ def visit_super(self, node): return node.parent.accept(self) - def visit_yes(self, node): - return "Uninferable" + def visit_uninferable(self, node): + return str(node) -class AsStringVisitor3k(AsStringVisitor): - """AsStringVisitor3k overwrites some AsStringVisitor methods""" +class AsStringVisitor3(AsStringVisitor): + """AsStringVisitor3 overwrites some AsStringVisitor methods""" def visit_excepthandler(self, node): if node.type: @@ -509,15 +470,15 @@ def visit_yieldfrom(self, node): """ Return an astroid.YieldFrom node as string. """ - yi_val = node.value and (" " + node.value.accept(self)) or "" + yi_val = (" " + node.value.accept(self)) if node.value else "" expr = 'yield from' + yi_val if node.parent.is_statement: return expr - else: - return "(%s)" % (expr,) + + return "(%s)" % (expr,) def visit_asyncfunctiondef(self, node): - function = super(AsStringVisitor3k, self).visit_functiondef(node) + function = super(AsStringVisitor3, self).visit_functiondef(node) return 'async ' + function.strip() def visit_await(self, node): @@ -529,6 +490,24 @@ def visit_asyncfor(self, node): return 'async %s' % self.visit_for(node) + def visit_joinedstr(self, node): + # Special treatment for constants, + # as we want to join literals not reprs + string = ''.join( + value.value if type(value).__name__ == 'Const' + else value.accept(self) + for value in node.values + ) + return "f'%s'" % string + + def visit_formattedvalue(self, node): + return '{%s}' % node.value.accept(self) + + def visit_comprehension(self, node): + """return an astroid.Comprehension node as string""" + return '%s%s' % ('async ' if node.is_async else '', + super(AsStringVisitor3, self).visit_comprehension(node)) + def _import_string(names): """return a list of (name, asname) formatted as a string""" @@ -538,11 +517,11 @@ _names.append('%s as %s' % (name, asname)) else: _names.append(name) - return ', '.join(_names) + return ', '.join(_names) if sys.version_info >= (3, 0): - AsStringVisitor = AsStringVisitor3k + AsStringVisitor = AsStringVisitor3 -# this visitor is stateless, thus it can be reused -to_code = AsStringVisitor() +# This sets the default indent to 4 spaces. +to_code = AsStringVisitor(' ') diff -Nru astroid-1.4.9/astroid/astpeephole.py astroid-1.5.3/astroid/astpeephole.py --- astroid-1.4.9/astroid/astpeephole.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/astpeephole.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,20 +1,8 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Small AST optimizations.""" import _ast @@ -34,7 +22,7 @@ class ASTPeepholeOptimizer(object): """Class for applying small optimizations to generate new AST.""" - def optimize_binop(self, node): + def optimize_binop(self, node, parent=None): """Optimize BinOps with string Const nodes on the lhs. This fixes an infinite recursion crash, where multiple @@ -77,10 +65,10 @@ # If we have inconsistent types, bail out. known = type(ast_nodes[0]) - if any(type(element) is not known + if any(not isinstance(element, known) for element in ast_nodes[1:]): return value = known().join(reversed(ast_nodes)) - newnode = nodes.Const(value) + newnode = nodes.Const(value, node.lineno, node.col_offset, parent) return newnode diff -Nru astroid-1.4.9/astroid/bases.py astroid-1.5.3/astroid/bases.py --- astroid-1.4.9/astroid/bases.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/bases.py 2017-04-20 10:37:44.000000000 +0000 @@ -1,39 +1,35 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """This module contains base classes and functions for the nodes and some inference utils. """ -import functools + +import collections import sys -import warnings -import wrapt +import six from astroid import context as contextmod -from astroid import decorators as decoratorsmod from astroid import exceptions from astroid import util +objectmodel = util.lazy_import('interpreter.objectmodel') +BUILTINS = six.moves.builtins.__name__ +manager = util.lazy_import('manager') +MANAGER = manager.AstroidManager() if sys.version_info >= (3, 0): BUILTINS = 'builtins' + BOOL_SPECIAL_METHOD = '__bool__' else: BUILTINS = '__builtin__' + BOOL_SPECIAL_METHOD = '__nonzero__' PROPERTIES = {BUILTINS + '.property', 'abc.abstractproperty'} # List of possible property names. We use this list in order # to see if a method is a property or not. This should be @@ -54,7 +50,7 @@ if PROPERTIES.intersection(meth.decoratornames()): return True stripped = {name.split(".")[-1] for name in meth.decoratornames() - if name is not util.YES} + if name is not util.Uninferable} return any(name in stripped for name in POSSIBLE_PROPERTIES) @@ -90,7 +86,7 @@ context = contextmod.InferenceContext() for stmt in stmts: - if stmt is util.YES: + if stmt is util.Uninferable: yield stmt inferred = True continue @@ -99,36 +95,64 @@ for inferred in stmt.infer(context=context): yield inferred inferred = True - except exceptions.UnresolvableName: + except exceptions.NameInferenceError: continue except exceptions.InferenceError: - yield util.YES + yield util.Uninferable inferred = True if not inferred: - raise exceptions.InferenceError(str(stmt)) + raise exceptions.InferenceError( + 'Inference failed for all members of {stmts!r}.', + stmts=stmts, frame=frame, context=context) + + +def _infer_method_result_truth(instance, method_name, context): + # Get the method from the instance and try to infer + # its return's truth value. + meth = next(instance.igetattr(method_name, context=context), None) + if meth and hasattr(meth, 'infer_call_result'): + if not meth.callable(): + return util.Uninferable + for value in meth.infer_call_result(instance, context=context): + if value is util.Uninferable: + return value + + inferred = next(value.infer(context=context)) + return inferred.bool_value() + return util.Uninferable + +class BaseInstance(Proxy): + """An instance base class, which provides lookup methods for potential instances.""" + + special_attributes = None + + def display_type(self): + return 'Instance of' -class Instance(Proxy): - """a special node representing a class instance""" def getattr(self, name, context=None, lookupclass=True): try: values = self._proxied.instance_attr(name, context) - except exceptions.NotFoundError: - if name == '__class__': - return [self._proxied] + except exceptions.AttributeInferenceError: + if self.special_attributes and name in self.special_attributes: + return [self.special_attributes.lookup(name)] + if lookupclass: - # class attributes not available through the instance - # unless they are explicitly defined - if name in ('__name__', '__bases__', '__mro__', '__subclasses__'): - return self._proxied.local_attr(name) - return self._proxied.getattr(name, context) - raise exceptions.NotFoundError(name) + # Class attributes not available through the instance + # unless they are explicitly defined. + return self._proxied.getattr(name, context, + class_context=False) + + util.reraise(exceptions.AttributeInferenceError(target=self, + attribute=name, + context=context)) # since we've no context information, return matching class members as # well if lookupclass: try: - return values + self._proxied.getattr(name, context) - except exceptions.NotFoundError: + return values + self._proxied.getattr(name, context, + class_context=False) + except exceptions.AttributeInferenceError: pass return values @@ -138,22 +162,23 @@ context = contextmod.InferenceContext() try: # avoid recursively inferring the same attr on the same class - context.push((self._proxied, name)) + if context.push((self._proxied, name)): + return + # XXX frame should be self._proxied, or not ? get_attr = self.getattr(name, context, lookupclass=False) - return _infer_stmts( - self._wrap_attr(get_attr, context), - context, - frame=self, - ) - except exceptions.NotFoundError: + for stmt in _infer_stmts(self._wrap_attr(get_attr, context), + context, frame=self): + yield stmt + except exceptions.AttributeInferenceError: try: - # fallback to class'igetattr since it has some logic to handle + # fallback to class.igetattr since it has some logic to handle # descriptors - return self._wrap_attr(self._proxied.igetattr(name, context), - context) - except exceptions.NotFoundError: - raise exceptions.InferenceError(name) + attrs = self._proxied.igetattr(name, context, class_context=False) + for stmt in self._wrap_attr(attrs, context): + yield stmt + except exceptions.AttributeInferenceError as error: + util.reraise(exceptions.InferenceError(**vars(error))) def _wrap_attr(self, attrs, context=None): """wrap bound methods of attrs in a InstanceMethod proxies""" @@ -182,13 +207,21 @@ """infer what a class instance is returning when called""" inferred = False for node in self._proxied.igetattr('__call__', context): - if node is util.YES or not node.callable(): + if node is util.Uninferable or not node.callable(): continue for res in node.infer_call_result(caller, context): inferred = True yield res if not inferred: - raise exceptions.InferenceError() + raise exceptions.InferenceError(node=self, caller=caller, + context=context) + + +class Instance(BaseInstance): + """A special node representing a class instance.""" + + # pylint: disable=unnecessary-lambda + special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel()) def __repr__(self): return '' % (self._proxied.root().name, @@ -200,9 +233,9 @@ def callable(self): try: - self._proxied.getattr('__call__') + self._proxied.getattr('__call__', class_context=False) return True - except exceptions.NotFoundError: + except exceptions.AttributeInferenceError: return False def pytype(self): @@ -211,15 +244,45 @@ def display_type(self): return 'Instance of' + def bool_value(self): + """Infer the truth value for an Instance + + The truth value of an instance is determined by these conditions: + + * if it implements __bool__ on Python 3 or __nonzero__ + on Python 2, then its bool value will be determined by + calling this special method and checking its result. + * when this method is not defined, __len__() is called, if it + is defined, and the object is considered true if its result is + nonzero. If a class defines neither __len__() nor __bool__(), + all its instances are considered true. + """ + context = contextmod.InferenceContext() + context.callcontext = contextmod.CallContext(args=[]) + context.boundnode = self + + try: + result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context) + except (exceptions.InferenceError, exceptions.AttributeInferenceError): + # Fallback to __len__. + try: + result = _infer_method_result_truth(self, '__len__', context) + except (exceptions.AttributeInferenceError, exceptions.InferenceError): + return True + return result # TODO(cpopa): this is set in inference.py # The circular dependency hell goes deeper and deeper. - # pylint: disable=unused-argument def getitem(self, index, context=None): pass + class UnboundMethod(Proxy): """a special node representing a method not bound to an instance""" + + # pylint: disable=unnecessary-lambda + special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel()) + def __repr__(self): frame = self._proxied.parent.frame() return '<%s %s of %s at 0x%s' % (self.__class__.__name__, @@ -230,13 +293,13 @@ return False def getattr(self, name, context=None): - if name == 'im_func': - return [self._proxied] + if name in self.special_attributes: + return [self.special_attributes.lookup(name)] return self._proxied.getattr(name, context) def igetattr(self, name, context=None): - if name == 'im_func': - return iter((self._proxied,)) + if name in self.special_attributes: + return iter((self.special_attributes.lookup(name), )) return self._proxied.igetattr(name, context) def infer_call_result(self, caller, context): @@ -245,12 +308,19 @@ if (self._proxied.name == '__new__' and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): infer = caller.args[0].infer() if caller.args else [] - return ((x is util.YES and x or Instance(x)) for x in infer) + return (Instance(x) if x is not util.Uninferable else x for x in infer) return self._proxied.infer_call_result(caller, context) + def bool_value(self): + return True + class BoundMethod(UnboundMethod): """a special node representing a method bound to an instance""" + + # pylint: disable=unnecessary-lambda + special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel()) + def __init__(self, proxy, bound): UnboundMethod.__init__(self, proxy) self.bound = bound @@ -258,20 +328,109 @@ def is_bound(self): return True - def infer_call_result(self, caller, context=None): + def _infer_type_new_call(self, caller, context): + """Try to infer what type.__new__(mcs, name, bases, attrs) returns. + + In order for such call to be valid, the metaclass needs to be + a subtype of ``type``, the name needs to be a string, the bases + needs to be a tuple of classes and the attributes a dictionary + of strings to values. + """ + from astroid import node_classes + # Verify the metaclass + mcs = next(caller.args[0].infer(context=context)) + if mcs.__class__.__name__ != 'ClassDef': + # Not a valid first argument. + return + if not mcs.is_subtype_of("%s.type" % BUILTINS): + # Not a valid metaclass. + return + + # Verify the name + name = next(caller.args[1].infer(context=context)) + if name.__class__.__name__ != 'Const': + # Not a valid name, needs to be a const. + return + if not isinstance(name.value, str): + # Needs to be a string. + return + + # Verify the bases + bases = next(caller.args[2].infer(context=context)) + if bases.__class__.__name__ != 'Tuple': + # Needs to be a tuple. + return + inferred_bases = [next(elt.infer(context=context)) + for elt in bases.elts] + if any(base.__class__.__name__ != 'ClassDef' + for base in inferred_bases): + # All the bases needs to be Classes + return + + # Verify the attributes. + attrs = next(caller.args[3].infer(context=context)) + if attrs.__class__.__name__ != 'Dict': + # Needs to be a dictionary. + return + cls_locals = collections.defaultdict(list) + for key, value in attrs.items: + key = next(key.infer(context=context)) + value = next(value.infer(context=context)) + if key.__class__.__name__ != 'Const': + # Something invalid as an attribute. + return + if not isinstance(key.value, str): + # Not a proper attribute. + return + cls_locals[key.value].append(value) + + # Build the class from now. + cls = mcs.__class__(name=name.value, lineno=caller.lineno, + col_offset=caller.col_offset, + parent=caller) + empty = node_classes.Pass() + cls.postinit(bases=bases.elts, body=[empty], decorators=[], + newstyle=True, metaclass=mcs, keywords=[]) + cls.locals = cls_locals + return cls + def infer_call_result(self, caller, context=None): if context is None: context = contextmod.InferenceContext() context = context.clone() context.boundnode = self.bound + + if (self.bound.__class__.__name__ == 'ClassDef' + and self.bound.name == 'type' + and self.name == '__new__' + and len(caller.args) == 4 + # TODO(cpopa): this check shouldn't be needed. + and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): + + # Check if we have an ``type.__new__(mcs, name, bases, attrs)`` call. + new_cls = self._infer_type_new_call(caller, context) + if new_cls: + return iter((new_cls, )) + return super(BoundMethod, self).infer_call_result(caller, context) + def bool_value(self): + return True + -class Generator(Instance): +class Generator(BaseInstance): """a special node representing a generator. Proxied class is set once for all in raw_building. """ + + # pylint: disable=unnecessary-lambda + special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel()) + + # pylint: disable=super-init-not-called + def __init__(self, parent=None): + self.parent = parent + def callable(self): return False @@ -281,356 +440,11 @@ def display_type(self): return 'Generator' + def bool_value(self): + return True + def __repr__(self): return '' % (self._proxied.name, self.lineno, id(self)) def __str__(self): return 'Generator(%s)' % (self._proxied.name) - - -# decorators ################################################################## - -def path_wrapper(func): - """return the given infer function wrapped to handle the path""" - @functools.wraps(func) - def wrapped(node, context=None, _func=func, **kwargs): - """wrapper function handling context""" - if context is None: - context = contextmod.InferenceContext() - context.push(node) - yielded = set() - for res in _func(node, context, **kwargs): - # unproxy only true instance, not const, tuple, dict... - if res.__class__ is Instance: - ares = res._proxied - else: - ares = res - if ares not in yielded: - yield res - yielded.add(ares) - return wrapped - -@wrapt.decorator -def yes_if_nothing_inferred(func, instance, args, kwargs): - inferred = False - for node in func(*args, **kwargs): - inferred = True - yield node - if not inferred: - yield util.YES - -@wrapt.decorator -def raise_if_nothing_inferred(func, instance, args, kwargs): - inferred = False - for node in func(*args, **kwargs): - inferred = True - yield node - if not inferred: - raise exceptions.InferenceError() - - -# Node ###################################################################### - -class NodeNG(object): - """Base Class for all Astroid node classes. - - It represents a node of the new abstract syntax tree. - """ - is_statement = False - optional_assign = False # True for For (and for Comprehension if py <3.0) - is_function = False # True for FunctionDef nodes - # attributes below are set by the builder module or by raw factories - lineno = None - fromlineno = None - tolineno = None - col_offset = None - # parent node in the tree - parent = None - # attributes containing child node(s) redefined in most concrete classes: - _astroid_fields = () - # instance specific inference function infer(node, context) - _explicit_inference = None - - def infer(self, context=None, **kwargs): - """main interface to the interface system, return a generator on infered - values. - - If the instance has some explicit inference function set, it will be - called instead of the default interface. - """ - if self._explicit_inference is not None: - # explicit_inference is not bound, give it self explicitly - try: - return self._explicit_inference(self, context, **kwargs) - except exceptions.UseInferenceDefault: - pass - - if not context: - return self._infer(context, **kwargs) - - key = (self, context.lookupname, - context.callcontext, context.boundnode) - if key in context.inferred: - return iter(context.inferred[key]) - - return context.cache_generator(key, self._infer(context, **kwargs)) - - def _repr_name(self): - """return self.name or self.attrname or '' for nice representation""" - return getattr(self, 'name', getattr(self, 'attrname', '')) - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, self._repr_name()) - - def __repr__(self): - return '<%s(%s) l.%s [%s] at 0x%x>' % (self.__class__.__name__, - self._repr_name(), - self.fromlineno, - self.root().name, - id(self)) - - - def accept(self, visitor): - func = getattr(visitor, "visit_" + self.__class__.__name__.lower()) - return func(self) - - def get_children(self): - for field in self._astroid_fields: - attr = getattr(self, field) - if attr is None: - continue - if isinstance(attr, (list, tuple)): - for elt in attr: - yield elt - else: - yield attr - - def last_child(self): - """an optimized version of list(get_children())[-1]""" - for field in self._astroid_fields[::-1]: - attr = getattr(self, field) - if not attr: # None or empty listy / tuple - continue - if isinstance(attr, (list, tuple)): - return attr[-1] - else: - return attr - return None - - def parent_of(self, node): - """return true if i'm a parent of the given node""" - parent = node.parent - while parent is not None: - if self is parent: - return True - parent = parent.parent - return False - - def statement(self): - """return the first parent node marked as statement node""" - if self.is_statement: - return self - return self.parent.statement() - - def frame(self): - """return the first parent frame node (i.e. Module, FunctionDef or - ClassDef) - - """ - return self.parent.frame() - - def scope(self): - """return the first node defining a new scope (i.e. Module, - FunctionDef, ClassDef, Lambda but also GenExpr) - - """ - return self.parent.scope() - - def root(self): - """return the root node of the tree, (i.e. a Module)""" - if self.parent: - return self.parent.root() - return self - - def child_sequence(self, child): - """search for the right sequence where the child lies in""" - for field in self._astroid_fields: - node_or_sequence = getattr(self, field) - if node_or_sequence is child: - return [node_or_sequence] - # /!\ compiler.ast Nodes have an __iter__ walking over child nodes - if (isinstance(node_or_sequence, (tuple, list)) - and child in node_or_sequence): - return node_or_sequence - - msg = 'Could not find %s in %s\'s children' - raise exceptions.AstroidError(msg % (repr(child), repr(self))) - - def locate_child(self, child): - """return a 2-uple (child attribute name, sequence or node)""" - for field in self._astroid_fields: - node_or_sequence = getattr(self, field) - # /!\ compiler.ast Nodes have an __iter__ walking over child nodes - if child is node_or_sequence: - return field, child - if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence: - return field, node_or_sequence - msg = 'Could not find %s in %s\'s children' - raise exceptions.AstroidError(msg % (repr(child), repr(self))) - # FIXME : should we merge child_sequence and locate_child ? locate_child - # is only used in are_exclusive, child_sequence one time in pylint. - - def next_sibling(self): - """return the next sibling statement""" - return self.parent.next_sibling() - - def previous_sibling(self): - """return the previous sibling statement""" - return self.parent.previous_sibling() - - def nearest(self, nodes): - """return the node which is the nearest before this one in the - given list of nodes - """ - myroot = self.root() - mylineno = self.fromlineno - nearest = None, 0 - for node in nodes: - assert node.root() is myroot, \ - 'nodes %s and %s are not from the same module' % (self, node) - lineno = node.fromlineno - if node.fromlineno > mylineno: - break - if lineno > nearest[1]: - nearest = node, lineno - # FIXME: raise an exception if nearest is None ? - return nearest[0] - - # these are lazy because they're relatively expensive to compute for every - # single node, and they rarely get looked at - - @decoratorsmod.cachedproperty - def fromlineno(self): - if self.lineno is None: - return self._fixed_source_line() - else: - return self.lineno - - @decoratorsmod.cachedproperty - def tolineno(self): - if not self._astroid_fields: - # can't have children - lastchild = None - else: - lastchild = self.last_child() - if lastchild is None: - return self.fromlineno - else: - return lastchild.tolineno - - # TODO / FIXME: - assert self.fromlineno is not None, self - assert self.tolineno is not None, self - - def _fixed_source_line(self): - """return the line number where the given node appears - - we need this method since not all nodes have the lineno attribute - correctly set... - """ - line = self.lineno - _node = self - try: - while line is None: - _node = next(_node.get_children()) - line = _node.lineno - except StopIteration: - _node = self.parent - while _node and line is None: - line = _node.lineno - _node = _node.parent - return line - - def block_range(self, lineno): - """handle block line numbers range for non block opening statements - """ - return lineno, self.tolineno - - def set_local(self, name, stmt): - """delegate to a scoped parent handling a locals dictionary""" - self.parent.set_local(name, stmt) - - def nodes_of_class(self, klass, skip_klass=None): - """return an iterator on nodes which are instance of the given class(es) - - klass may be a class object or a tuple of class objects - """ - if isinstance(self, klass): - yield self - for child_node in self.get_children(): - if skip_klass is not None and isinstance(child_node, skip_klass): - continue - for matching in child_node.nodes_of_class(klass, skip_klass): - yield matching - - def _infer_name(self, frame, name): - # overridden for ImportFrom, Import, Global, TryExcept and Arguments - return None - - def _infer(self, context=None): - """we don't know how to resolve a statement by default""" - # this method is overridden by most concrete classes - raise exceptions.InferenceError(self.__class__.__name__) - - def inferred(self): - '''return list of inferred values for a more simple inference usage''' - return list(self.infer()) - - def infered(self): - warnings.warn('%s.infered() is deprecated and slated for removal ' - 'in astroid 2.0, use %s.inferred() instead.' - % (type(self).__name__, type(self).__name__), - PendingDeprecationWarning, stacklevel=2) - return self.inferred() - - def instanciate_class(self): - """instanciate a node if it is a ClassDef node, else return self""" - return self - - def has_base(self, node): - return False - - def callable(self): - return False - - def eq(self, value): - return False - - def as_string(self): - from astroid.as_string import to_code - return to_code(self) - - def repr_tree(self, ids=False): - from astroid.as_string import dump - return dump(self) - - -class Statement(NodeNG): - """Statement node adding a few attributes""" - is_statement = True - - def next_sibling(self): - """return the next sibling statement""" - stmts = self.parent.child_sequence(self) - index = stmts.index(self) - try: - return stmts[index +1] - except IndexError: - pass - - def previous_sibling(self): - """return the previous sibling statement""" - stmts = self.parent.child_sequence(self) - index = stmts.index(self) - if index >= 1: - return stmts[index -1] diff -Nru astroid-1.4.9/astroid/brain/brain_builtin_inference.py astroid-1.5.3/astroid/brain/brain_builtin_inference.py --- astroid-1.4.9/astroid/brain/brain_builtin_inference.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_builtin_inference.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,16 +1,24 @@ +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Astroid hooks for various builtins.""" -import sys from functools import partial +import sys from textwrap import dedent import six -from astroid import (MANAGER, UseInferenceDefault, - inference_tip, YES, InferenceError, UnresolvableName) +from astroid import (MANAGER, UseInferenceDefault, AttributeInferenceError, + inference_tip, InferenceError, NameInferenceError) from astroid import arguments +from astroid.builder import AstroidBuilder +from astroid import helpers from astroid import nodes from astroid import objects -from astroid.builder import AstroidBuilder +from astroid import scoped_nodes from astroid import util def _extend_str(class_node, rvalue): @@ -62,7 +70,7 @@ code = code.format(rvalue=rvalue) fake = AstroidBuilder(MANAGER).string_build(code)['whatever'] for method in fake.mymethods(): - class_node._locals[method.name] = [method] + class_node.locals[method.name] = [method] method.parent = class_node def extend_builtins(class_transforms): @@ -118,10 +126,10 @@ inferred = next(arg.infer(context=context)) except (InferenceError, StopIteration): raise UseInferenceDefault() - if inferred is util.YES: + if inferred is util.Uninferable: raise UseInferenceDefault() transformed = transform(inferred) - if not transformed or transformed is util.YES: + if not transformed or transformed is util.Uninferable: raise UseInferenceDefault() return transformed @@ -146,7 +154,7 @@ elts = arg.value else: return - return klass(elts=build_elts(elts)) + return klass.from_constants(elts=build_elts(elts)) def _infer_builtin(node, context, @@ -164,25 +172,31 @@ infer_tuple = partial( _infer_builtin, klass=nodes.Tuple, - iterables=(nodes.List, nodes.Set), + iterables=(nodes.List, nodes.Set, objects.FrozenSet, + objects.DictItems, objects.DictKeys, + objects.DictValues), build_elts=tuple) infer_list = partial( _infer_builtin, klass=nodes.List, - iterables=(nodes.Tuple, nodes.Set), + iterables=(nodes.Tuple, nodes.Set, objects.FrozenSet, + objects.DictItems, objects.DictKeys, + objects.DictValues), build_elts=list) infer_set = partial( _infer_builtin, klass=nodes.Set, - iterables=(nodes.List, nodes.Tuple), + iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, + objects.DictKeys), build_elts=set) infer_frozenset = partial( _infer_builtin, klass=objects.FrozenSet, - iterables=(nodes.List, nodes.Tuple, nodes.Set), + iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, + objects.DictKeys), build_elts=frozenset) @@ -191,7 +205,7 @@ (nodes.List, nodes.Tuple, nodes.Set)) try: inferred = next(arg.infer(context)) - except (InferenceError, UnresolvableName): + except (InferenceError, NameInferenceError): raise UseInferenceDefault() if isinstance(inferred, nodes.Dict): items = inferred.items @@ -251,19 +265,11 @@ else: raise UseInferenceDefault() - empty = nodes.Dict() - empty.items = items - return empty - - -def _node_class(node): - klass = node.frame() - while klass is not None and not isinstance(klass, nodes.ClassDef): - if klass.parent is None: - klass = None - else: - klass = klass.parent.frame() - return klass + value = nodes.Dict(col_offset=node.col_offset, + lineno=node.lineno, + parent=node.parent) + value.postinit(items) + return value def infer_super(node, context=None): @@ -276,7 +282,7 @@ * if the super call is not inside a function (classmethod or method), then the default inference will be used. - * if the super arguments can't be infered, the default inference + * if the super arguments can't be inferred, the default inference will be used. """ if len(node.args) == 1: @@ -291,7 +297,7 @@ # Not interested in staticmethods. raise UseInferenceDefault - cls = _node_class(scope) + cls = scoped_nodes.get_wrapping_class(scope) if not len(node.args): mro_pointer = cls # In we are in a classmethod, the interpreter will fill @@ -311,7 +317,7 @@ except InferenceError: raise UseInferenceDefault - if mro_pointer is YES or mro_type is YES: + if mro_pointer is util.Uninferable or mro_type is util.Uninferable: # No way we could understand this. raise UseInferenceDefault @@ -320,17 +326,171 @@ self_class=cls, scope=scope) super_obj.parent = node - return iter([super_obj]) + return super_obj -# Builtins inference -MANAGER.register_transform(nodes.Call, - inference_tip(infer_super), - lambda n: (isinstance(n.func, nodes.Name) and - n.func.name == 'super')) +def _infer_getattr_args(node, context): + if len(node.args) not in (2, 3): + # Not a valid getattr call. + raise UseInferenceDefault + + try: + # TODO(cpopa): follow all the values of the first argument? + obj = next(node.args[0].infer(context=context)) + attr = next(node.args[1].infer(context=context)) + except InferenceError: + raise UseInferenceDefault + + if obj is util.Uninferable or attr is util.Uninferable: + # If one of the arguments is something we can't infer, + # then also make the result of the getattr call something + # which is unknown. + return util.Uninferable, util.Uninferable + + is_string = (isinstance(attr, nodes.Const) and + isinstance(attr.value, six.string_types)) + if not is_string: + raise UseInferenceDefault + + return obj, attr.value + +def infer_getattr(node, context=None): + """Understand getattr calls + + If one of the arguments is an Uninferable object, then the + result will be an Uninferable object. Otherwise, the normal attribute + lookup will be done. + """ + obj, attr = _infer_getattr_args(node, context) + if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'igetattr'): + return util.Uninferable + + try: + return next(obj.igetattr(attr, context=context)) + except (StopIteration, InferenceError, AttributeInferenceError): + if len(node.args) == 3: + # Try to infer the default and return it instead. + try: + return next(node.args[2].infer(context=context)) + except InferenceError: + raise UseInferenceDefault + + raise UseInferenceDefault + + +def infer_hasattr(node, context=None): + """Understand hasattr calls + + This always guarantees three possible outcomes for calling + hasattr: Const(False) when we are sure that the object + doesn't have the intended attribute, Const(True) when + we know that the object has the attribute and Uninferable + when we are unsure of the outcome of the function call. + """ + try: + obj, attr = _infer_getattr_args(node, context) + if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'getattr'): + return util.Uninferable + obj.getattr(attr, context=context) + except UseInferenceDefault: + # Can't infer something from this function call. + return util.Uninferable + except AttributeInferenceError: + # Doesn't have it. + return nodes.Const(False) + return nodes.Const(True) + + +def infer_callable(node, context=None): + """Understand callable calls + + This follows Python's semantics, where an object + is callable if it provides an attribute __call__, + even though that attribute is something which can't be + called. + """ + if len(node.args) != 1: + # Invalid callable call. + raise UseInferenceDefault + + argument = node.args[0] + try: + inferred = next(argument.infer(context=context)) + except InferenceError: + return util.Uninferable + if inferred is util.Uninferable: + return util.Uninferable + return nodes.Const(inferred.callable()) + + +def infer_bool(node, context=None): + """Understand bool calls.""" + if len(node.args) > 1: + # Invalid bool call. + raise UseInferenceDefault + + if not node.args: + return nodes.Const(False) + + argument = node.args[0] + try: + inferred = next(argument.infer(context=context)) + except InferenceError: + return util.Uninferable + if inferred is util.Uninferable: + return util.Uninferable + + bool_value = inferred.bool_value() + if bool_value is util.Uninferable: + return util.Uninferable + return nodes.Const(bool_value) + + +def infer_type(node, context=None): + """Understand the one-argument form of *type*.""" + if len(node.args) != 1: + raise UseInferenceDefault + + return helpers.object_type(node.args[0], context) + + +def infer_slice(node, context=None): + """Understand `slice` calls.""" + args = node.args + if not 0 < len(args) <= 3: + raise UseInferenceDefault + + args = list(map(helpers.safe_infer, args)) + for arg in args: + if not arg or arg is util.Uninferable: + raise UseInferenceDefault + if not isinstance(arg, nodes.Const): + raise UseInferenceDefault + if not isinstance(arg.value, (type(None), int)): + raise UseInferenceDefault + + if len(args) < 3: + # Make sure we have 3 arguments. + args.extend([None] * (3 - len(args))) + + slice_node = nodes.Slice(lineno=node.lineno, + col_offset=node.col_offset, + parent=node.parent) + slice_node.postinit(*args) + return slice_node + + +# Builtins inference +register_builtin_transform(infer_bool, 'bool') +register_builtin_transform(infer_super, 'super') +register_builtin_transform(infer_callable, 'callable') +register_builtin_transform(infer_getattr, 'getattr') +register_builtin_transform(infer_hasattr, 'hasattr') register_builtin_transform(infer_tuple, 'tuple') register_builtin_transform(infer_set, 'set') register_builtin_transform(infer_list, 'list') register_builtin_transform(infer_dict, 'dict') register_builtin_transform(infer_frozenset, 'frozenset') +register_builtin_transform(infer_type, 'type') +register_builtin_transform(infer_slice, 'slice') diff -Nru astroid-1.4.9/astroid/brain/brain_collections.py astroid-1.5.3/astroid/brain/brain_collections.py --- astroid-1.4.9/astroid/brain/brain_collections.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_collections.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,43 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import astroid + + +def _collections_transform(): + return astroid.parse(''' + class defaultdict(dict): + default_factory = None + def __missing__(self, key): pass + def __getitem__(self, key): return default_factory + + class deque(object): + maxlen = 0 + def __init__(self, iterable=None, maxlen=None): + self.iterable = iterable + def append(self, x): pass + def appendleft(self, x): pass + def clear(self): pass + def count(self, x): return 0 + def extend(self, iterable): pass + def extendleft(self, iterable): pass + def pop(self): pass + def popleft(self): pass + def remove(self, value): pass + def reverse(self): pass + def rotate(self, n): pass + def __iter__(self): return self + def __reversed__(self): return self.iterable[::-1] + def __getitem__(self, index): pass + def __setitem__(self, index, value): pass + def __delitem__(self, index): pass + + def OrderedDict(dict): + def __reversed__(self): return self[::-1] + ''') + + +astroid.register_module_extender(astroid.MANAGER, 'collections', _collections_transform) + diff -Nru astroid-1.4.9/astroid/brain/brain_dateutil.py astroid-1.5.3/astroid/brain/brain_dateutil.py --- astroid-1.4.9/astroid/brain/brain_dateutil.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_dateutil.py 2016-12-18 10:31:38.000000000 +0000 @@ -1,15 +1,21 @@ -"""Astroid hooks for dateutil""" - -import textwrap - -from astroid import MANAGER, register_module_extender -from astroid.builder import AstroidBuilder - -def dateutil_transform(): - return AstroidBuilder(MANAGER).string_build(textwrap.dedent(''' - import datetime - def parse(timestr, parserinfo=None, **kwargs): - return datetime.datetime() - ''')) - -register_module_extender(MANAGER, 'dateutil.parser', dateutil_transform) +# Copyright (c) 2015-2016 Claudiu Popa +# Copyright (c) 2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +"""Astroid hooks for dateutil""" + +import textwrap + +from astroid import MANAGER, register_module_extender +from astroid.builder import AstroidBuilder + +def dateutil_transform(): + return AstroidBuilder(MANAGER).string_build(textwrap.dedent(''' + import datetime + def parse(timestr, parserinfo=None, **kwargs): + return datetime.datetime() + ''')) + +register_module_extender(MANAGER, 'dateutil.parser', dateutil_transform) diff -Nru astroid-1.4.9/astroid/brain/brain_fstrings.py astroid-1.5.3/astroid/brain/brain_fstrings.py --- astroid-1.4.9/astroid/brain/brain_fstrings.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_fstrings.py 2017-03-13 07:37:37.000000000 +0000 @@ -0,0 +1,61 @@ +# Copyright (c) 2017 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import sys + +import astroid + + +def _clone_node_with_lineno(node, parent, lineno): + cls = node.__class__ + other_fields = node._other_fields + _astroid_fields = node._astroid_fields + init_params = { + 'lineno': lineno, + 'col_offset': node.col_offset, + 'parent': parent + } + postinit_params = { + param: getattr(node, param) + for param in _astroid_fields + } + if other_fields: + init_params.update({ + param: getattr(node, param) + for param in other_fields + }) + new_node = cls(**init_params) + if hasattr(node, 'postinit'): + new_node.postinit(**postinit_params) + return new_node + + +def _transform_formatted_value(node): + if node.value and node.value.lineno == 1: + if node.lineno != node.value.lineno: + new_node = astroid.FormattedValue( + lineno=node.lineno, + col_offset=node.col_offset, + parent=node.parent + ) + new_value = _clone_node_with_lineno( + node=node.value, + lineno=node.lineno, + parent=new_node + ) + new_node.postinit(value=new_value, + format_spec=node.format_spec) + return new_node + + +if sys.version_info[:2] >= (3, 6): + # TODO: this fix tries to *patch* http://bugs.python.org/issue29051 + # The problem is that FormattedValue.value, which is a Name node, + # has wrong line numbers, usually 1. This creates problems for pylint, + # which expects correct line numbers for things such as message control. + astroid.MANAGER.register_transform( + astroid.FormattedValue, + _transform_formatted_value) + diff -Nru astroid-1.4.9/astroid/brain/brain_functools.py astroid-1.5.3/astroid/brain/brain_functools.py --- astroid-1.4.9/astroid/brain/brain_functools.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_functools.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,75 @@ +# Copyright (c) 2016 Claudiu Popa + +"""Astroid hooks for understanding functools library module.""" + +import astroid +from astroid import BoundMethod +from astroid import extract_node +from astroid import helpers +from astroid.interpreter import objectmodel +from astroid import MANAGER + + +LRU_CACHE = 'functools.lru_cache' + + +class LruWrappedModel(objectmodel.FunctionModel): + """Special attribute model for functions decorated with functools.lru_cache. + + The said decorators patches at decoration time some functions onto + the decorated function. + """ + + @property + def py__wrapped__(self): + return self._instance + + @property + def pycache_info(self): + cache_info = extract_node(''' + from functools import _CacheInfo + _CacheInfo(0, 0, 0, 0) + ''') + class CacheInfoBoundMethod(BoundMethod): + def infer_call_result(self, caller, context=None): + yield helpers.safe_infer(cache_info) + + return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance) + + @property + def pycache_clear(self): + node = extract_node('''def cache_clear(self): pass''') + return BoundMethod(proxy=node, bound=self._instance.parent.scope()) + + +def _transform_lru_cache(node, context=None): + # TODO: this is not ideal, since the node should be immutable, + # but due to https://github.com/PyCQA/astroid/issues/354, + # there's not much we can do now. + # Replacing the node would work partially, because, + # in pylint, the old node would still be available, leading + # to spurious false positives. + node.special_attributes = LruWrappedModel()(node) + return + + +def _looks_like_lru_cache(node): + """Check if the given function node is decorated with lru_cache.""" + if not node.decorators: + return False + + for decorator in node.decorators.nodes: + if not isinstance(decorator, astroid.Call): + continue + + func = helpers.safe_infer(decorator.func) + if func in (None, astroid.Uninferable): + continue + + if isinstance(func, astroid.FunctionDef) and func.qname() == LRU_CACHE: + return True + return False + + +MANAGER.register_transform(astroid.FunctionDef, _transform_lru_cache, + _looks_like_lru_cache) diff -Nru astroid-1.4.9/astroid/brain/brain_gi.py astroid-1.5.3/astroid/brain/brain_gi.py --- astroid-1.4.9/astroid/brain/brain_gi.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_gi.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,3 +1,8 @@ +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Astroid hooks for the Python 2 GObject introspection bindings. Helps with understanding everything imported from 'gi.repository' @@ -9,7 +14,7 @@ import re import warnings -from astroid import MANAGER, AstroidBuildingException, nodes +from astroid import MANAGER, AstroidBuildingError, nodes from astroid.builder import AstroidBuilder @@ -64,7 +69,7 @@ ret = "" if constants: - ret += "# %s contants\n\n" % parent.__name__ + ret += "# %s constants\n\n" % parent.__name__ for name in sorted(constants): if name[0].isdigit(): # GDK has some busted constant names like @@ -114,7 +119,7 @@ def _import_gi_module(modname): # we only consider gi.repository submodules if not modname.startswith('gi.repository.'): - raise AstroidBuildingException() + raise AstroidBuildingError(modname=modname) # build astroid representation unless we already tried so if modname not in _inspected_modules: modnames = [modname] @@ -133,16 +138,17 @@ modcode = '' for m in itertools.chain(modnames, optional_modnames): try: - __import__(m) with warnings.catch_warnings(): # Just inspecting the code can raise gi deprecation # warnings, so ignore them. try: - from gi import PyGIDeprecationWarning + from gi import PyGIDeprecationWarning, PyGIWarning warnings.simplefilter("ignore", PyGIDeprecationWarning) + warnings.simplefilter("ignore", PyGIWarning) except Exception: pass + __import__(m) modcode += _gi_build_stub(sys.modules[m]) except ImportError: if m not in optional_modnames: @@ -155,7 +161,7 @@ else: astng = _inspected_modules[modname] if astng is None: - raise AstroidBuildingException('Failed to import module %r' % modname) + raise AstroidBuildingError(modname=modname) return astng def _looks_like_require_version(node): diff -Nru astroid-1.4.9/astroid/brain/brain_hashlib.py astroid-1.5.3/astroid/brain/brain_hashlib.py --- astroid-1.4.9/astroid/brain/brain_hashlib.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_hashlib.py 2016-12-18 10:31:38.000000000 +0000 @@ -0,0 +1,40 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import six + +import astroid + + +def _hashlib_transform(): + template = ''' + class %(name)s(object): + def __init__(self, value=''): pass + def digest(self): + return %(digest)s + def copy(self): + return self + def update(self, value): pass + def hexdigest(self): + return '' + @property + def name(self): + return %(name)r + @property + def block_size(self): + return 1 + @property + def digest_size(self): + return 1 + ''' + algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') + classes = "".join( + template % {'name': hashfunc, 'digest': 'b""' if six.PY3 else '""'} + for hashfunc in algorithms) + return astroid.parse(classes) + + +astroid.register_module_extender(astroid.MANAGER, 'hashlib', _hashlib_transform) + diff -Nru astroid-1.4.9/astroid/brain/brain_io.py astroid-1.5.3/astroid/brain/brain_io.py --- astroid-1.4.9/astroid/brain/brain_io.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_io.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,43 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +'''Astroid brain hints for some of the _io C objects.''' + +import astroid + + +BUFFERED = {'BufferedWriter', 'BufferedReader'} +TextIOWrapper = 'TextIOWrapper' +FileIO = 'FileIO' +BufferedWriter = 'BufferedWriter' + + +def _generic_io_transform(node, name, cls): + '''Transform the given name, by adding the given *class* as a member of the node.''' + + io_module = astroid.MANAGER.ast_from_module_name('_io') + attribute_object = io_module[cls] + instance = attribute_object.instantiate_class() + node.locals[name] = [instance] + + +def _transform_text_io_wrapper(node): + # This is not always correct, since it can vary with the type of the descriptor, + # being stdout, stderr or stdin. But we cannot get access to the name of the + # stream, which is why we are using the BufferedWriter class as a default + # value + return _generic_io_transform(node, name='buffer', cls=BufferedWriter) + + +def _transform_buffered(node): + return _generic_io_transform(node, name='raw', cls=FileIO) + + +astroid.MANAGER.register_transform(astroid.ClassDef, + _transform_buffered, + lambda node: node.name in BUFFERED) +astroid.MANAGER.register_transform(astroid.ClassDef, + _transform_text_io_wrapper, + lambda node: node.name == TextIOWrapper) diff -Nru astroid-1.4.9/astroid/brain/brain_mechanize.py astroid-1.5.3/astroid/brain/brain_mechanize.py --- astroid-1.4.9/astroid/brain/brain_mechanize.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_mechanize.py 2016-12-18 10:31:38.000000000 +0000 @@ -1,3 +1,8 @@ +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder diff -Nru astroid-1.4.9/astroid/brain/brain_multiprocessing.py astroid-1.5.3/astroid/brain/brain_multiprocessing.py --- astroid-1.4.9/astroid/brain/brain_multiprocessing.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_multiprocessing.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,104 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import sys + +import astroid +from astroid import exceptions + + +PY34 = sys.version_info >= (3, 4) + + +def _multiprocessing_transform(): + module = astroid.parse(''' + from multiprocessing.managers import SyncManager + def Manager(): + return SyncManager() + ''') + if not PY34: + return module + + # On Python 3.4, multiprocessing uses a getattr lookup inside contexts, + # in order to get the attributes they need. Since it's extremely + # dynamic, we use this approach to fake it. + node = astroid.parse(''' + from multiprocessing.context import DefaultContext, BaseContext + default = DefaultContext() + base = BaseContext() + ''') + try: + context = next(node['default'].infer()) + base = next(node['base'].infer()) + except exceptions.InferenceError: + return module + + for node in (context, base): + for key, value in node.locals.items(): + if key.startswith("_"): + continue + + value = value[0] + if isinstance(value, astroid.FunctionDef): + # We need to rebound this, since otherwise + # it will have an extra argument (self). + value = astroid.BoundMethod(value, node) + module[key] = value + return module + + +def _multiprocessing_managers_transform(): + return astroid.parse(''' + import array + import threading + import multiprocessing.pool as pool + + import six + + class Namespace(object): + pass + + class Value(object): + def __init__(self, typecode, value, lock=True): + self._typecode = typecode + self._value = value + def get(self): + return self._value + def set(self, value): + self._value = value + def __repr__(self): + return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value) + value = property(get, set) + + def Array(typecode, sequence, lock=True): + return array.array(typecode, sequence) + + class SyncManager(object): + Queue = JoinableQueue = six.moves.queue.Queue + Event = threading.Event + RLock = threading.RLock + BoundedSemaphore = threading.BoundedSemaphore + Condition = threading.Condition + Barrier = threading.Barrier + Pool = pool.Pool + list = list + dict = dict + Value = Value + Array = Array + Namespace = Namespace + __enter__ = lambda self: self + __exit__ = lambda *args: args + + def start(self, initializer=None, initargs=None): + pass + def shutdown(self): + pass + ''') + + +astroid.register_module_extender(astroid.MANAGER, 'multiprocessing.managers', + _multiprocessing_managers_transform) +astroid.register_module_extender(astroid.MANAGER, 'multiprocessing', + _multiprocessing_transform) diff -Nru astroid-1.4.9/astroid/brain/brain_namedtuple_enum.py astroid-1.5.3/astroid/brain/brain_namedtuple_enum.py --- astroid-1.4.9/astroid/brain/brain_namedtuple_enum.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_namedtuple_enum.py 2017-04-12 05:57:16.000000000 +0000 @@ -0,0 +1,233 @@ +# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +"""Astroid hooks for the Python standard library.""" + +import functools +import sys +import keyword +from textwrap import dedent + +from astroid import ( + MANAGER, UseInferenceDefault, inference_tip, + InferenceError) +from astroid import arguments +from astroid import exceptions +from astroid import nodes +from astroid.builder import AstroidBuilder, extract_node +from astroid import util + +PY3K = sys.version_info > (3, 0) +PY33 = sys.version_info >= (3, 3) +PY34 = sys.version_info >= (3, 4) + +# general function + +def infer_func_form(node, base_type, context=None, enum=False): + """Specific inference function for namedtuple or Python 3 enum. """ + def infer_first(node): + if node is util.Uninferable: + raise UseInferenceDefault + try: + value = next(node.infer(context=context)) + if value is util.Uninferable: + raise UseInferenceDefault() + else: + return value + except StopIteration: + raise InferenceError() + + # node is a Call node, class name as first argument and generated class + # attributes as second argument + if len(node.args) != 2: + # something weird here, go back to class implementation + raise UseInferenceDefault() + # namedtuple or enums list of attributes can be a list of strings or a + # whitespace-separate string + try: + name = infer_first(node.args[0]).value + names = infer_first(node.args[1]) + try: + attributes = names.value.replace(',', ' ').split() + except AttributeError: + if not enum: + attributes = [infer_first(const).value for const in names.elts] + else: + # Enums supports either iterator of (name, value) pairs + # or mappings. + # TODO: support only list, tuples and mappings. + if hasattr(names, 'items') and isinstance(names.items, list): + attributes = [infer_first(const[0]).value + for const in names.items + if isinstance(const[0], nodes.Const)] + elif hasattr(names, 'elts'): + # Enums can support either ["a", "b", "c"] + # or [("a", 1), ("b", 2), ...], but they can't + # be mixed. + if all(isinstance(const, nodes.Tuple) + for const in names.elts): + attributes = [infer_first(const.elts[0]).value + for const in names.elts + if isinstance(const, nodes.Tuple)] + else: + attributes = [infer_first(const).value + for const in names.elts] + else: + raise AttributeError + if not attributes: + raise AttributeError + except (AttributeError, exceptions.InferenceError): + raise UseInferenceDefault() + + # If we can't infer the name of the class, don't crash, up to this point + # we know it is a namedtuple anyway. + name = name or 'Uninferable' + # we want to return a Class node instance with proper attributes set + class_node = nodes.ClassDef(name, 'docstring') + class_node.parent = node.parent + # set base class=tuple + class_node.bases.append(base_type) + # XXX add __init__(*attributes) method + for attr in attributes: + fake_node = nodes.EmptyNode() + fake_node.parent = class_node + fake_node.attrname = attr + class_node.instance_attrs[attr] = [fake_node] + return class_node, name, attributes + + +def _looks_like(node, name): + func = node.func + if isinstance(func, nodes.Attribute): + return func.attrname == name + if isinstance(func, nodes.Name): + return func.name == name + return False + +_looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple') +_looks_like_enum = functools.partial(_looks_like, name='Enum') + + +def infer_named_tuple(node, context=None): + """Specific inference function for namedtuple Call node""" + class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, + context=context) + call_site = arguments.CallSite.from_call(node) + func = next(extract_node('import collections; collections.namedtuple').infer()) + try: + rename = next(call_site.infer_argument(func, 'rename', context)).bool_value() + except InferenceError: + rename = False + + if rename: + attributes = _get_renamed_namedtuple_atributes(attributes) + + field_def = (" {name} = property(lambda self: self[{index:d}], " + "doc='Alias for field number {index:d}')") + field_defs = '\n'.join(field_def.format(name=name, index=index) + for index, name in enumerate(attributes)) + fake = AstroidBuilder(MANAGER).string_build(''' +class %(name)s(tuple): + __slots__ = () + _fields = %(fields)r + def _asdict(self): + return self.__dict__ + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + return new(cls, iterable) + def _replace(self, **kwds): + return self + def __getnewargs__(self): + return tuple(self) +%(field_defs)s + ''' % {'name': name, 'fields': attributes, 'field_defs': field_defs}) + class_node.locals['_asdict'] = fake.body[0].locals['_asdict'] + class_node.locals['_make'] = fake.body[0].locals['_make'] + class_node.locals['_replace'] = fake.body[0].locals['_replace'] + class_node.locals['_fields'] = fake.body[0].locals['_fields'] + for attr in attributes: + class_node.locals[attr] = fake.body[0].locals[attr] + # we use UseInferenceDefault, we can't be a generator so return an iterator + return iter([class_node]) + + +def _get_renamed_namedtuple_atributes(field_names): + names = list(field_names) + seen = set() + for i, name in enumerate(field_names): + if (not all(c.isalnum() or c == '_' for c in name) or keyword.iskeyword(name) + or not name or name[0].isdigit() or name.startswith('_') or name in seen): + names[i] = '_%d' % i + seen.add(name) + return tuple(names) + + +def infer_enum(node, context=None): + """ Specific inference function for enum Call node. """ + enum_meta = extract_node(''' + class EnumMeta(object): + 'docstring' + def __call__(self, node): + class EnumAttribute(object): + name = '' + value = 0 + return EnumAttribute() + ''') + class_node = infer_func_form(node, enum_meta, + context=context, enum=True)[0] + return iter([class_node.instantiate_class()]) + + +def infer_enum_class(node): + """ Specific inference for enums. """ + names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) + for basename in node.basenames: + # TODO: doesn't handle subclasses yet. This implementation + # is a hack to support enums. + if basename not in names: + continue + if node.root().name == 'enum': + # Skip if the class is directly from enum module. + break + for local, values in node.locals.items(): + if any(not isinstance(value, nodes.AssignName) + for value in values): + continue + + stmt = values[0].statement() + if isinstance(stmt.targets[0], nodes.Tuple): + targets = stmt.targets[0].itered() + else: + targets = stmt.targets + + new_targets = [] + for target in targets: + # Replace all the assignments with our mocked class. + classdef = dedent(''' + class %(name)s(%(types)s): + @property + def value(self): + # Not the best return. + return None + @property + def name(self): + return %(name)r + ''' % {'name': target.name, 'types': ', '.join(node.basenames)}) + fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] + fake.parent = target.parent + for method in node.mymethods(): + fake.locals[method.name] = [method] + new_targets.append(fake.instantiate_class()) + node.locals[local] = new_targets + break + return node + + +MANAGER.register_transform(nodes.Call, inference_tip(infer_named_tuple), + _looks_like_namedtuple) +MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), + _looks_like_enum) +MANAGER.register_transform(nodes.ClassDef, infer_enum_class) diff -Nru astroid-1.4.9/astroid/brain/brain_nose.py astroid-1.5.3/astroid/brain/brain_nose.py --- astroid-1.4.9/astroid/brain/brain_nose.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_nose.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,20 +1,8 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Hooks for nose library.""" @@ -55,7 +43,7 @@ def _nose_tools_transform(node): for method_name, method in _nose_tools_functions(): - node._locals[method_name] = [method] + node.locals[method_name] = [method] def _nose_tools_trivial_transform(): diff -Nru astroid-1.4.9/astroid/brain/brain_numpy.py astroid-1.5.3/astroid/brain/brain_numpy.py --- astroid-1.4.9/astroid/brain/brain_numpy.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_numpy.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,20 +1,8 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# astroid is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Astroid hooks for numpy.""" diff -Nru astroid-1.4.9/astroid/brain/brain_pkg_resources.py astroid-1.5.3/astroid/brain/brain_pkg_resources.py --- astroid-1.4.9/astroid/brain/brain_pkg_resources.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_pkg_resources.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,70 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + + +import astroid +from astroid import parse +from astroid import inference_tip +from astroid import register_module_extender +from astroid import MANAGER + + +def pkg_resources_transform(): + return parse(''' +def require(*requirements): + return pkg_resources.working_set.require(*requirements) + +def run_script(requires, script_name): + return pkg_resources.working_set.run_script(requires, script_name) + +def iter_entry_points(group, name=None): + return pkg_resources.working_set.iter_entry_points(group, name) + +def resource_exists(package_or_requirement, resource_name): + return get_provider(package_or_requirement).has_resource(resource_name) + +def resource_isdir(package_or_requirement, resource_name): + return get_provider(package_or_requirement).resource_isdir( + resource_name) + +def resource_filename(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name) + +def resource_stream(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name) + +def resource_string(package_or_requirement, resource_name): + return get_provider(package_or_requirement).get_resource_string( + self, resource_name) + +def resource_listdir(package_or_requirement, resource_name): + return get_provider(package_or_requirement).resource_listdir( + resource_name) + +def extraction_error(): + pass + +def get_cache_path(archive_name, names=()): + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + return target_path + +def postprocess(tempname, filename): + pass + +def set_extraction_path(path): + pass + +def cleanup_resources(force=False): + pass + +def get_distribution(dist): + return Distribution(dist) + +''') + +register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) diff -Nru astroid-1.4.9/astroid/brain/brain_pytest.py astroid-1.5.3/astroid/brain/brain_pytest.py --- astroid-1.4.9/astroid/brain/brain_pytest.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_pytest.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,3 +1,9 @@ +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Astroid hooks for pytest.""" from __future__ import absolute_import from astroid import MANAGER, register_module_extender diff -Nru astroid-1.4.9/astroid/brain/brain_qt.py astroid-1.5.3/astroid/brain/brain_qt.py --- astroid-1.4.9/astroid/brain/brain_qt.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_qt.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,3 +1,8 @@ +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Astroid hooks for the PyQT library.""" from astroid import MANAGER, register_module_extender @@ -7,9 +12,13 @@ def _looks_like_signal(node, signal_name='pyqtSignal'): - if '__class__' in node._instance_attrs: - cls = node._instance_attrs['__class__'][0] - return cls.name == signal_name + if '__class__' in node.instance_attrs: + try: + cls = node.instance_attrs['__class__'][0] + return cls.name == signal_name + except AttributeError: + # return False if the cls does not have a name attribute + pass return False @@ -24,9 +33,9 @@ pass ''') signal_cls = module['pyqtSignal'] - node._instance_attrs['emit'] = signal_cls['emit'] - node._instance_attrs['disconnect'] = signal_cls['disconnect'] - node._instance_attrs['connect'] = signal_cls['connect'] + node.instance_attrs['emit'] = signal_cls['emit'] + node.instance_attrs['disconnect'] = signal_cls['disconnect'] + node.instance_attrs['connect'] = signal_cls['connect'] def pyqt4_qtcore_transform(): diff -Nru astroid-1.4.9/astroid/brain/brain_re.py astroid-1.5.3/astroid/brain/brain_re.py --- astroid-1.4.9/astroid/brain/brain_re.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_re.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,34 @@ +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER +import sys +import astroid + +PY36 = sys.version_info >= (3, 6) + +if PY36: + # Since Python 3.6 there is the RegexFlag enum + # where every entry will be exposed via updating globals() + + def _re_transform(): + return astroid.parse(''' + import sre_compile + ASCII = sre_compile.SRE_FLAG_ASCII + IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE + LOCALE = sre_compile.SRE_FLAG_LOCALE + UNICODE = sre_compile.SRE_FLAG_UNICODE + MULTILINE = sre_compile.SRE_FLAG_MULTILINE + DOTALL = sre_compile.SRE_FLAG_DOTALL + VERBOSE = sre_compile.SRE_FLAG_VERBOSE + A = ASCII + I = IGNORECASE + L = LOCALE + U = UNICODE + M = MULTILINE + S = DOTALL + X = VERBOSE + TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE + T = TEMPLATE + DEBUG = sre_compile.SRE_FLAG_DEBUG + ''') + + astroid.register_module_extender(astroid.MANAGER, 're', _re_transform) diff -Nru astroid-1.4.9/astroid/brain/brain_six.py astroid-1.5.3/astroid/brain/brain_six.py --- astroid-1.4.9/astroid/brain/brain_six.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_six.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,29 +1,17 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# astroid is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2014-2016 Claudiu Popa -"""Astroid hooks for six.moves.""" +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + + +"""Astroid hooks for six module.""" import sys from textwrap import dedent from astroid import MANAGER, register_module_extender from astroid.builder import AstroidBuilder -from astroid.exceptions import AstroidBuildingException, InferenceError +from astroid.exceptions import AstroidBuildingError, InferenceError from astroid import nodes @@ -113,7 +101,7 @@ splitquery = _urllib.splitquery splittag = _urllib.splittag splituser = _urllib.splituser - uses_fragment = _urlparse.uses_fragment + uses_fragment = _urlparse.uses_fragment uses_netloc = _urlparse.uses_netloc uses_params = _urlparse.uses_params uses_query = _urlparse.uses_query @@ -254,7 +242,7 @@ def _six_fail_hook(modname): if modname != 'six.moves': - raise AstroidBuildingException + raise AstroidBuildingError(modname=modname) module = AstroidBuilder(MANAGER).string_build(_IMPORTS) module.name = 'six.moves' return module diff -Nru astroid-1.4.9/astroid/brain/brain_ssl.py astroid-1.5.3/astroid/brain/brain_ssl.py --- astroid-1.4.9/astroid/brain/brain_ssl.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_ssl.py 2016-12-18 10:31:38.000000000 +0000 @@ -1,3 +1,8 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Astroid hooks for the ssl library.""" from astroid import MANAGER, register_module_extender diff -Nru astroid-1.4.9/astroid/brain/brain_stdlib.py astroid-1.5.3/astroid/brain/brain_stdlib.py --- astroid-1.4.9/astroid/brain/brain_stdlib.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_stdlib.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,473 +0,0 @@ - -"""Astroid hooks for the Python 2 standard library. - -Currently help understanding of : - -* hashlib.md5 and hashlib.sha1 -""" - -import functools -import sys -from textwrap import dedent - -from astroid import ( - MANAGER, UseInferenceDefault, inference_tip, BoundMethod, - InferenceError, register_module_extender) -from astroid import exceptions -from astroid import nodes -from astroid.builder import AstroidBuilder -from astroid import util -from astroid import test_utils - -PY3K = sys.version_info > (3, 0) -PY33 = sys.version_info >= (3, 3) -PY34 = sys.version_info >= (3, 4) - -# general function - -def infer_func_form(node, base_type, context=None, enum=False): - """Specific inference function for namedtuple or Python 3 enum. """ - def infer_first(node): - if node is util.YES: - raise UseInferenceDefault - try: - value = next(node.infer(context=context)) - if value is util.YES: - raise UseInferenceDefault() - else: - return value - except StopIteration: - raise InferenceError() - - # node is a Call node, class name as first argument and generated class - # attributes as second argument - if len(node.args) != 2: - # something weird here, go back to class implementation - raise UseInferenceDefault() - # namedtuple or enums list of attributes can be a list of strings or a - # whitespace-separate string - try: - name = infer_first(node.args[0]).value - names = infer_first(node.args[1]) - try: - attributes = names.value.replace(',', ' ').split() - except AttributeError: - if not enum: - attributes = [infer_first(const).value for const in names.elts] - else: - # Enums supports either iterator of (name, value) pairs - # or mappings. - # TODO: support only list, tuples and mappings. - if hasattr(names, 'items') and isinstance(names.items, list): - attributes = [infer_first(const[0]).value - for const in names.items - if isinstance(const[0], nodes.Const)] - elif hasattr(names, 'elts'): - # Enums can support either ["a", "b", "c"] - # or [("a", 1), ("b", 2), ...], but they can't - # be mixed. - if all(isinstance(const, nodes.Tuple) - for const in names.elts): - attributes = [infer_first(const.elts[0]).value - for const in names.elts - if isinstance(const, nodes.Tuple)] - else: - attributes = [infer_first(const).value - for const in names.elts] - else: - raise AttributeError - if not attributes: - raise AttributeError - except (AttributeError, exceptions.InferenceError): - raise UseInferenceDefault() - - # If we can't iner the name of the class, don't crash, up to this point - # we know it is a namedtuple anyway. - name = name or 'Uninferable' - # we want to return a Class node instance with proper attributes set - class_node = nodes.ClassDef(name, 'docstring') - class_node.parent = node.parent - # set base class=tuple - class_node.bases.append(base_type) - # XXX add __init__(*attributes) method - for attr in attributes: - fake_node = nodes.EmptyNode() - fake_node.parent = class_node - fake_node.attrname = attr - class_node._instance_attrs[attr] = [fake_node] - return class_node, name, attributes - - -# module specific transformation functions ##################################### - -def hashlib_transform(): - template = ''' - -class %(name)s(object): - def __init__(self, value=''): pass - def digest(self): - return %(digest)s - def copy(self): - return self - def update(self, value): pass - def hexdigest(self): - return '' - @property - def name(self): - return %(name)r - @property - def block_size(self): - return 1 - @property - def digest_size(self): - return 1 -''' - algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') - classes = "".join( - template % {'name': hashfunc, 'digest': 'b""' if PY3K else '""'} - for hashfunc in algorithms) - return AstroidBuilder(MANAGER).string_build(classes) - - -def collections_transform(): - return AstroidBuilder(MANAGER).string_build(''' - -class defaultdict(dict): - default_factory = None - def __missing__(self, key): pass - -class deque(object): - maxlen = 0 - def __init__(self, iterable=None, maxlen=None): - self.iterable = iterable - def append(self, x): pass - def appendleft(self, x): pass - def clear(self): pass - def count(self, x): return 0 - def extend(self, iterable): pass - def extendleft(self, iterable): pass - def pop(self): pass - def popleft(self): pass - def remove(self, value): pass - def reverse(self): pass - def rotate(self, n): pass - def __iter__(self): return self - def __reversed__(self): return self.iterable[::-1] - def __getitem__(self, index): pass - def __setitem__(self, index, value): pass - def __delitem__(self, index): pass -''') - - -def pkg_resources_transform(): - return AstroidBuilder(MANAGER).string_build(''' -def require(*requirements): - return pkg_resources.working_set.require(*requirements) - -def run_script(requires, script_name): - return pkg_resources.working_set.run_script(requires, script_name) - -def iter_entry_points(group, name=None): - return pkg_resources.working_set.iter_entry_points(group, name) - -def resource_exists(package_or_requirement, resource_name): - return get_provider(package_or_requirement).has_resource(resource_name) - -def resource_isdir(package_or_requirement, resource_name): - return get_provider(package_or_requirement).resource_isdir( - resource_name) - -def resource_filename(package_or_requirement, resource_name): - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name) - -def resource_stream(package_or_requirement, resource_name): - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name) - -def resource_string(package_or_requirement, resource_name): - return get_provider(package_or_requirement).get_resource_string( - self, resource_name) - -def resource_listdir(package_or_requirement, resource_name): - return get_provider(package_or_requirement).resource_listdir( - resource_name) - -def extraction_error(): - pass - -def get_cache_path(archive_name, names=()): - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - return target_path - -def postprocess(tempname, filename): - pass - -def set_extraction_path(path): - pass - -def cleanup_resources(force=False): - pass - -''') - - -def subprocess_transform(): - if PY3K: - communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) - communicate_signature = 'def communicate(self, input=None, timeout=None)' - init = """ - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0, restore_signals=True, - start_new_session=False, pass_fds=()): - pass - """ - else: - communicate = ('string', 'string') - communicate_signature = 'def communicate(self, input=None)' - init = """ - def __init__(self, args, bufsize=0, executable=None, - stdin=None, stdout=None, stderr=None, - preexec_fn=None, close_fds=False, shell=False, - cwd=None, env=None, universal_newlines=False, - startupinfo=None, creationflags=0): - pass - """ - if PY33: - wait_signature = 'def wait(self, timeout=None)' - else: - wait_signature = 'def wait(self)' - if PY3K: - ctx_manager = ''' - def __enter__(self): return self - def __exit__(self, *args): pass - ''' - else: - ctx_manager = '' - code = dedent(''' - - class Popen(object): - returncode = pid = 0 - stdin = stdout = stderr = file() - - %(init)s - - %(communicate_signature)s: - return %(communicate)r - %(wait_signature)s: - return self.returncode - def poll(self): - return self.returncode - def send_signal(self, signal): - pass - def terminate(self): - pass - def kill(self): - pass - %(ctx_manager)s - ''' % {'init': init, - 'communicate': communicate, - 'communicate_signature': communicate_signature, - 'wait_signature': wait_signature, - 'ctx_manager': ctx_manager}) - return AstroidBuilder(MANAGER).string_build(code) - - -# namedtuple support ########################################################### - -def _looks_like(node, name): - func = node.func - if isinstance(func, nodes.Attribute): - return func.attrname == name - if isinstance(func, nodes.Name): - return func.name == name - return False - -_looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple') -_looks_like_enum = functools.partial(_looks_like, name='Enum') - - -def infer_named_tuple(node, context=None): - """Specific inference function for namedtuple Call node""" - class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, - context=context) - fake = AstroidBuilder(MANAGER).string_build(''' -class %(name)s(tuple): - _fields = %(fields)r - def _asdict(self): - return self.__dict__ - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - return new(cls, iterable) - def _replace(self, **kwds): - return self - ''' % {'name': name, 'fields': attributes}) - class_node._locals['_asdict'] = fake.body[0]._locals['_asdict'] - class_node._locals['_make'] = fake.body[0]._locals['_make'] - class_node._locals['_replace'] = fake.body[0]._locals['_replace'] - class_node._locals['_fields'] = fake.body[0]._locals['_fields'] - # we use UseInferenceDefault, we can't be a generator so return an iterator - return iter([class_node]) - - -def infer_enum(node, context=None): - """ Specific inference function for enum Call node. """ - enum_meta = test_utils.extract_node(''' - class EnumMeta(object): - 'docstring' - def __call__(self, node): - class EnumAttribute(object): - name = '' - value = 0 - return EnumAttribute() - ''') - class_node = infer_func_form(node, enum_meta, - context=context, enum=True)[0] - return iter([class_node.instantiate_class()]) - - -def infer_enum_class(node): - """ Specific inference for enums. """ - names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) - for basename in node.basenames: - # TODO: doesn't handle subclasses yet. This implementation - # is a hack to support enums. - if basename not in names: - continue - if node.root().name == 'enum': - # Skip if the class is directly from enum module. - break - for local, values in node._locals.items(): - if any(not isinstance(value, nodes.AssignName) - for value in values): - continue - - stmt = values[0].statement() - if isinstance(stmt.targets[0], nodes.Tuple): - targets = stmt.targets[0].itered() - else: - targets = stmt.targets - - new_targets = [] - for target in targets: - # Replace all the assignments with our mocked class. - classdef = dedent(''' - class %(name)s(%(types)s): - @property - def value(self): - # Not the best return. - return None - @property - def name(self): - return %(name)r - ''' % {'name': target.name, 'types': ', '.join(node.basenames)}) - fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] - fake.parent = target.parent - for method in node.mymethods(): - fake._locals[method.name] = [method] - new_targets.append(fake.instantiate_class()) - node._locals[local] = new_targets - break - return node - -def multiprocessing_transform(): - module = AstroidBuilder(MANAGER).string_build(dedent(''' - from multiprocessing.managers import SyncManager - def Manager(): - return SyncManager() - ''')) - if not PY34: - return module - - # On Python 3.4, multiprocessing uses a getattr lookup inside contexts, - # in order to get the attributes they need. Since it's extremely - # dynamic, we use this approach to fake it. - node = AstroidBuilder(MANAGER).string_build(dedent(''' - from multiprocessing.context import DefaultContext, BaseContext - default = DefaultContext() - base = BaseContext() - ''')) - try: - context = next(node['default'].infer()) - base = next(node['base'].infer()) - except InferenceError: - return module - - for node in (context, base): - for key, value in node._locals.items(): - if key.startswith("_"): - continue - - value = value[0] - if isinstance(value, nodes.FunctionDef): - # We need to rebound this, since otherwise - # it will have an extra argument (self). - value = BoundMethod(value, node) - module[key] = value - return module - -def multiprocessing_managers_transform(): - return AstroidBuilder(MANAGER).string_build(dedent(''' - import array - import threading - import multiprocessing.pool as pool - - import six - - class Namespace(object): - pass - - class Value(object): - def __init__(self, typecode, value, lock=True): - self._typecode = typecode - self._value = value - def get(self): - return self._value - def set(self, value): - self._value = value - def __repr__(self): - return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value) - value = property(get, set) - - def Array(typecode, sequence, lock=True): - return array.array(typecode, sequence) - - class SyncManager(object): - Queue = JoinableQueue = six.moves.queue.Queue - Event = threading.Event - RLock = threading.RLock - BoundedSemaphore = threading.BoundedSemaphore - Condition = threading.Condition - Barrier = threading.Barrier - Pool = pool.Pool - list = list - dict = dict - Value = Value - Array = Array - Namespace = Namespace - __enter__ = lambda self: self - __exit__ = lambda *args: args - - def start(self, initializer=None, initargs=None): - pass - def shutdown(self): - pass - ''')) - - -MANAGER.register_transform(nodes.Call, inference_tip(infer_named_tuple), - _looks_like_namedtuple) -MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), - _looks_like_enum) -MANAGER.register_transform(nodes.ClassDef, infer_enum_class) -register_module_extender(MANAGER, 'hashlib', hashlib_transform) -register_module_extender(MANAGER, 'collections', collections_transform) -register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) -register_module_extender(MANAGER, 'subprocess', subprocess_transform) -register_module_extender(MANAGER, 'multiprocessing.managers', - multiprocessing_managers_transform) -register_module_extender(MANAGER, 'multiprocessing', multiprocessing_transform) diff -Nru astroid-1.4.9/astroid/brain/brain_subprocess.py astroid-1.5.3/astroid/brain/brain_subprocess.py --- astroid-1.4.9/astroid/brain/brain_subprocess.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_subprocess.py 2017-04-20 10:22:05.000000000 +0000 @@ -0,0 +1,94 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import sys +import textwrap + +import six + +import astroid + + +PY33 = sys.version_info >= (3, 3) +PY36 = sys.version_info >= (3, 6) + + +def _subprocess_transform(): + if six.PY3: + communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) + communicate_signature = 'def communicate(self, input=None, timeout=None)' + if PY36: + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0, restore_signals=True, + start_new_session=False, pass_fds=(), *, + encoding=None, errors=None): + pass + """ + else: + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0, restore_signals=True, + start_new_session=False, pass_fds=()): + pass + """ + else: + communicate = ('string', 'string') + communicate_signature = 'def communicate(self, input=None)' + init = """ + def __init__(self, args, bufsize=0, executable=None, + stdin=None, stdout=None, stderr=None, + preexec_fn=None, close_fds=False, shell=False, + cwd=None, env=None, universal_newlines=False, + startupinfo=None, creationflags=0): + pass + """ + if PY33: + wait_signature = 'def wait(self, timeout=None)' + else: + wait_signature = 'def wait(self)' + if six.PY3: + ctx_manager = ''' + def __enter__(self): return self + def __exit__(self, *args): pass + ''' + else: + ctx_manager = '' + code = textwrap.dedent(''' + class Popen(object): + returncode = pid = 0 + stdin = stdout = stderr = file() + + %(communicate_signature)s: + return %(communicate)r + %(wait_signature)s: + return self.returncode + def poll(self): + return self.returncode + def send_signal(self, signal): + pass + def terminate(self): + pass + def kill(self): + pass + %(ctx_manager)s + ''' % {'communicate': communicate, + 'communicate_signature': communicate_signature, + 'wait_signature': wait_signature, + 'ctx_manager': ctx_manager}) + + init_lines = textwrap.dedent(init).splitlines() + indented_init = '\n'.join([' ' * 4 + line for line in init_lines]) + code += indented_init + return astroid.parse(code) + + +astroid.register_module_extender(astroid.MANAGER, 'subprocess', _subprocess_transform) diff -Nru astroid-1.4.9/astroid/brain/brain_threading.py astroid-1.5.3/astroid/brain/brain_threading.py --- astroid-1.4.9/astroid/brain/brain_threading.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_threading.py 2017-06-01 22:07:25.000000000 +0000 @@ -0,0 +1,26 @@ +# Copyright (c) 2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import astroid + + +def _thread_transform(): + return astroid.parse(''' + class lock(object): + def acquire(self, blocking=True): + pass + def release(self): + pass + def __enter__(self): + return True + def __exit__(self, *args): + pass + + def Lock(): + return lock() + ''') + + +astroid.register_module_extender(astroid.MANAGER, 'threading', _thread_transform) diff -Nru astroid-1.4.9/astroid/brain/brain_typing.py astroid-1.5.3/astroid/brain/brain_typing.py --- astroid-1.4.9/astroid/brain/brain_typing.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/brain/brain_typing.py 2017-04-14 09:52:41.000000000 +0000 @@ -0,0 +1,89 @@ +# Copyright (c) 2016 David Euresti + +"""Astroid hooks for typing.py support.""" +import textwrap + +from astroid import ( + MANAGER, UseInferenceDefault, extract_node, inference_tip, + nodes, InferenceError) +from astroid.nodes import List, Tuple + + +TYPING_NAMEDTUPLE_BASENAMES = { + 'NamedTuple', + 'typing.NamedTuple' +} + + +def infer_typing_namedtuple(node, context=None): + """Infer a typing.NamedTuple(...) call.""" + # This is essentially a namedtuple with different arguments + # so we extract the args and infer a named tuple. + try: + func = next(node.func.infer()) + except InferenceError: + raise UseInferenceDefault + + if func.qname() != 'typing.NamedTuple': + raise UseInferenceDefault + + if len(node.args) != 2: + raise UseInferenceDefault + + if not isinstance(node.args[1], (List, Tuple)): + raise UseInferenceDefault + + names = [] + for elt in node.args[1].elts: + if not isinstance(elt, (List, Tuple)): + raise UseInferenceDefault + if len(elt.elts) != 2: + raise UseInferenceDefault + names.append(elt.elts[0].as_string()) + + typename = node.args[0].as_string() + node = extract_node('namedtuple(%(typename)s, (%(fields)s,)) ' % + {'typename': typename, 'fields': ",".join(names)}) + return node.infer(context=context) + + +def infer_typing_namedtuple_class(node, context=None): + """Infer a subclass of typing.NamedTuple""" + + # Check if it has the corresponding bases + if not set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES: + raise UseInferenceDefault + + annassigns_fields = [ + annassign.target.name for annassign in node.body + if isinstance(annassign, nodes.AnnAssign) + ] + code = textwrap.dedent(''' + from collections import namedtuple + namedtuple({typename!r}, {fields!r}) + ''').format( + typename=node.name, + fields=",".join(annassigns_fields) + ) + node = extract_node(code) + return node.infer(context=context) + + +def looks_like_typing_namedtuple(node): + func = node.func + if isinstance(func, nodes.Attribute): + return func.attrname == 'NamedTuple' + if isinstance(func, nodes.Name): + return func.name == 'NamedTuple' + return False + + +MANAGER.register_transform( + nodes.Call, + inference_tip(infer_typing_namedtuple), + looks_like_typing_namedtuple +) +MANAGER.register_transform( + nodes.ClassDef, + inference_tip(infer_typing_namedtuple_class) +) diff -Nru astroid-1.4.9/astroid/builder.py astroid-1.5.3/astroid/builder.py --- astroid-1.4.9/astroid/builder.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/builder.py 2017-06-01 22:07:19.000000000 +0000 @@ -1,31 +1,22 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2015 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """The AstroidBuilder makes astroid from living object and / or from _ast The builder is not thread safe and can't be used to parse different sources at the same time. """ -from __future__ import with_statement -import _ast +import re import os import sys import textwrap +import _ast from astroid import bases from astroid import exceptions @@ -33,32 +24,34 @@ from astroid import modutils from astroid import raw_building from astroid import rebuilder +from astroid import nodes from astroid import util +# The name of the transient function that is used to +# wrap expressions to be extracted when calling +# extract_node. +_TRANSIENT_FUNCTION = '__' + +# The comment used to select a statement to be extracted +# when calling extract_node. +_STATEMENT_SELECTOR = '#@' + def _parse(string): return compile(string, "", 'exec', _ast.PyCF_ONLY_AST) if sys.version_info >= (3, 0): - # pylint: disable=no-name-in-module; We don't understand flows yet. from tokenize import detect_encoding def open_source_file(filename): with open(filename, 'rb') as byte_stream: encoding = detect_encoding(byte_stream.readline)[0] stream = open(filename, 'r', newline=None, encoding=encoding) - try: - data = stream.read() - except UnicodeError: # wrong encoding - # detect_encoding returns utf-8 if no encoding specified - msg = 'Wrong (%s) or no encoding specified' % encoding - raise exceptions.AstroidBuildingException(msg) + data = stream.read() return stream, encoding, data else: - import re - _ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)") def _guess_encoding(string): @@ -83,6 +76,17 @@ MANAGER = manager.AstroidManager() +def _can_assign_attr(node, attrname): + try: + slots = node.slots() + except NotImplementedError: + pass + else: + if slots and attrname not in set(slot.value for slot in slots): + return False + return True + + class AstroidBuilder(raw_building.InspectBuilder): """Class for building an astroid tree from source code or from a live module. @@ -92,7 +96,7 @@ applied after the tree was built from source or from a live object, by default being True. """ - + # pylint: disable=redefined-outer-name def __init__(self, manager=None, apply_transforms=True): super(AstroidBuilder, self).__init__() self._manager = manager or MANAGER @@ -124,12 +128,18 @@ try: stream, encoding, data = open_source_file(path) except IOError as exc: - msg = 'Unable to load file %r (%s)' % (path, exc) - raise exceptions.AstroidBuildingException(msg) - except SyntaxError as exc: # py3k encoding specification error - raise exceptions.AstroidBuildingException(exc) - except LookupError as exc: # unknown encoding - raise exceptions.AstroidBuildingException(exc) + util.reraise(exceptions.AstroidBuildingError( + 'Unable to load file {path}:\n{error}', + modname=modname, path=path, error=exc)) + except (SyntaxError, LookupError) as exc: + util.reraise(exceptions.AstroidSyntaxError( + 'Python 3 encoding specification error or unknown encoding:\n' + '{error}', modname=modname, path=path, error=exc)) + except UnicodeError: # wrong encoding + # detect_encoding returns utf-8 if no encoding specified + util.reraise(exceptions.AstroidBuildingError( + 'Wrong or no encoding specified for {filename}.', + filename=path)) with stream: # get module name if necessary if modname is None: @@ -144,7 +154,7 @@ def string_build(self, data, modname='', path=None): """Build astroid from source code string.""" module = self._data_build(data, modname, path) - module.source_code = data.encode('utf-8') + module.file_bytes = data.encode('utf-8') return self._post_build(module, 'utf-8') def _post_build(self, module, encoding): @@ -155,7 +165,7 @@ for from_node in module._import_from_nodes: if from_node.modname == '__future__': for symbol, _ in from_node.names: - module._future_imports.add(symbol) + module.future_imports.add(symbol) self.add_from_names_to_locals(from_node) # handle delayed assattr nodes for delayed in module._delayed_assattr: @@ -171,7 +181,9 @@ try: node = _parse(data + '\n') except (TypeError, ValueError, SyntaxError) as exc: - raise exceptions.AstroidBuildingException(exc) + util.reraise(exceptions.AstroidSyntaxError( + 'Parsing Python code failed:\n{error}', + source=data, modname=modname, path=path, error=exc)) if path is not None: node_file = os.path.abspath(path) else: @@ -180,7 +192,7 @@ modname = modname[:-9] package = True else: - package = path and path.find('__init__.py') > -1 or False + package = path is not None and os.path.splitext(os.path.basename(path))[0] == '__init__' builder = rebuilder.TreeRebuilder(self._manager) module = builder.visit_module(node, modname, node_file, package) module._import_from_nodes = builder._import_from_nodes @@ -200,14 +212,14 @@ if name == '*': try: imported = node.do_import_module() - except exceptions.InferenceError: + except exceptions.AstroidBuildingError: continue - for name in imported._public_names(): + for name in imported.public_names(): node.parent.set_local(name, node) - sort_locals(node.parent.scope()._locals[name]) + sort_locals(node.parent.scope().locals[name]) else: node.parent.set_local(asname or name, node) - sort_locals(node.parent.scope()._locals[asname or name]) + sort_locals(node.parent.scope().locals[asname or name]) def delayed_assattr(self, node): """Visit a AssAttr node @@ -217,20 +229,22 @@ try: frame = node.frame() for inferred in node.expr.infer(): - if inferred is util.YES: + if inferred is util.Uninferable: continue try: if inferred.__class__ is bases.Instance: inferred = inferred._proxied - iattrs = inferred._instance_attrs + iattrs = inferred.instance_attrs + if not _can_assign_attr(inferred, node.attrname): + continue elif isinstance(inferred, bases.Instance): # Const, Tuple, ... we may be wrong, may be not, but # anyway we don't want to pollute builtin's namespace continue elif inferred.is_function: - iattrs = inferred._instance_attrs + iattrs = inferred.instance_attrs else: - iattrs = inferred._locals + iattrs = inferred.locals except AttributeError: # XXX log error continue @@ -239,7 +253,7 @@ continue # get assign in __init__ first XXX useful ? if (frame.name == '__init__' and values and - not values[0].frame().name == '__init__'): + values[0].frame().name != '__init__'): values.insert(0, node) else: values.append(node) @@ -247,6 +261,10 @@ pass +def build_namespace_package_module(name, path): + return nodes.Module(name, doc='', path=path, package=True) + + def parse(code, module_name='', path=None, apply_transforms=True): """Parses a source string in order to obtain an astroid AST from it @@ -261,3 +279,159 @@ builder = AstroidBuilder(manager=MANAGER, apply_transforms=apply_transforms) return builder.string_build(code, modname=module_name, path=path) + + +def _extract_expressions(node): + """Find expressions in a call to _TRANSIENT_FUNCTION and extract them. + + The function walks the AST recursively to search for expressions that + are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an + expression, it completely removes the function call node from the tree, + replacing it by the wrapped expression inside the parent. + + :param node: An astroid node. + :type node: astroid.bases.NodeNG + :yields: The sequence of wrapped expressions on the modified tree + expression can be found. + """ + if (isinstance(node, nodes.Call) + and isinstance(node.func, nodes.Name) + and node.func.name == _TRANSIENT_FUNCTION): + real_expr = node.args[0] + real_expr.parent = node.parent + # Search for node in all _astng_fields (the fields checked when + # get_children is called) of its parent. Some of those fields may + # be lists or tuples, in which case the elements need to be checked. + # When we find it, replace it by real_expr, so that the AST looks + # like no call to _TRANSIENT_FUNCTION ever took place. + for name in node.parent._astroid_fields: + child = getattr(node.parent, name) + if isinstance(child, (list, tuple)): + for idx, compound_child in enumerate(child): + if compound_child is node: + child[idx] = real_expr + elif child is node: + setattr(node.parent, name, real_expr) + yield real_expr + else: + for child in node.get_children(): + for result in _extract_expressions(child): + yield result + + +def _find_statement_by_line(node, line): + """Extracts the statement on a specific line from an AST. + + If the line number of node matches line, it will be returned; + otherwise its children are iterated and the function is called + recursively. + + :param node: An astroid node. + :type node: astroid.bases.NodeNG + :param line: The line number of the statement to extract. + :type line: int + :returns: The statement on the line, or None if no statement for the line + can be found. + :rtype: astroid.bases.NodeNG or None + """ + if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)): + # This is an inaccuracy in the AST: the nodes that can be + # decorated do not carry explicit information on which line + # the actual definition (class/def), but .fromline seems to + # be close enough. + node_line = node.fromlineno + else: + node_line = node.lineno + + if node_line == line: + return node + + for child in node.get_children(): + result = _find_statement_by_line(child, line) + if result: + return result + + return None + + +def extract_node(code, module_name=''): + """Parses some Python code as a module and extracts a designated AST node. + + Statements: + To extract one or more statement nodes, append #@ to the end of the line + + Examples: + >>> def x(): + >>> def y(): + >>> return 1 #@ + + The return statement will be extracted. + + >>> class X(object): + >>> def meth(self): #@ + >>> pass + + The function object 'meth' will be extracted. + + Expressions: + To extract arbitrary expressions, surround them with the fake + function call __(...). After parsing, the surrounded expression + will be returned and the whole AST (accessible via the returned + node's parent attribute) will look like the function call was + never there in the first place. + + Examples: + >>> a = __(1) + + The const node will be extracted. + + >>> def x(d=__(foo.bar)): pass + + The node containing the default argument will be extracted. + + >>> def foo(a, b): + >>> return 0 < __(len(a)) < b + + The node containing the function call 'len' will be extracted. + + If no statements or expressions are selected, the last toplevel + statement will be returned. + + If the selected statement is a discard statement, (i.e. an expression + turned into a statement), the wrapped expression is returned instead. + + For convenience, singleton lists are unpacked. + + :param str code: A piece of Python code that is parsed as + a module. Will be passed through textwrap.dedent first. + :param str module_name: The name of the module. + :returns: The designated node from the parse tree, or a list of nodes. + :rtype: astroid.bases.NodeNG, or a list of nodes. + """ + def _extract(node): + if isinstance(node, nodes.Expr): + return node.value + + return node + + requested_lines = [] + for idx, line in enumerate(code.splitlines()): + if line.strip().endswith(_STATEMENT_SELECTOR): + requested_lines.append(idx + 1) + + tree = parse(code, module_name=module_name) + extracted = [] + if requested_lines: + for line in requested_lines: + extracted.append(_find_statement_by_line(tree, line)) + + # Modifies the tree. + extracted.extend(_extract_expressions(tree)) + + if not extracted: + extracted.append(tree.body[-1]) + + extracted = [_extract(node) for node in extracted] + if len(extracted) == 1: + return extracted[0] + return extracted diff -Nru astroid-1.4.9/astroid/context.py astroid-1.5.3/astroid/context.py --- astroid-1.4.9/astroid/context.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/context.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,24 +1,13 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """Various context related utilities, including inference and call contexts.""" import contextlib +import pprint class InferenceContext(object): @@ -34,8 +23,10 @@ def push(self, node): name = self.lookupname if (node, name) in self.path: - raise StopIteration() + return True + self.path.add((node, name)) + return False def clone(self): # XXX copy lookupname/callcontext ? @@ -59,6 +50,12 @@ yield self.path = path + def __str__(self): + state = ('%s=%s' % (field, pprint.pformat(getattr(self, field), + width=80 - len(field))) + for field in self.__slots__) + return '%s(%s)' % (type(self).__name__, ',\n '.join(state)) + class CallContext(object): """Holds information for a call site.""" @@ -77,5 +74,5 @@ def copy_context(context): if context is not None: return context.clone() - else: - return InferenceContext() + + return InferenceContext() diff -Nru astroid-1.4.9/astroid/decorators.py astroid-1.5.3/astroid/decorators.py --- astroid-1.4.9/astroid/decorators.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/decorators.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,28 +1,20 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . -# -# The code in this file was originally part of logilab-common, licensed under -# the same license. +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015 Florian Bruhin +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """ A few useful function/method decorators.""" +import functools + import wrapt +from astroid import context as contextmod +from astroid import exceptions +from astroid import util + @wrapt.decorator def cached(func, instance, args, kwargs): @@ -57,8 +49,8 @@ try: wrapped.__name__ except AttributeError: - raise TypeError('%s must have a __name__ attribute' % - wrapped) + util.reraise(TypeError('%s must have a __name__ attribute' + % wrapped)) self.wrapped = wrapped @property @@ -73,3 +65,83 @@ val = self.wrapped(inst) setattr(inst, self.wrapped.__name__, val) return val + + +def path_wrapper(func): + """return the given infer function wrapped to handle the path""" + # TODO: switch this to wrapt after the monkey-patching is fixed (ceridwen) + @functools.wraps(func) + def wrapped(node, context=None, _func=func, **kwargs): + """wrapper function handling context""" + if context is None: + context = contextmod.InferenceContext() + if context.push(node): + return + + yielded = set() + generator = _func(node, context, **kwargs) + try: + while True: + res = next(generator) + # unproxy only true instance, not const, tuple, dict... + if res.__class__.__name__ == 'Instance': + ares = res._proxied + else: + ares = res + if ares not in yielded: + yield res + yielded.add(ares) + except StopIteration as error: + # Explicit StopIteration to return error information, see + # comment in raise_if_nothing_inferred. + if error.args: + raise StopIteration(error.args[0]) + else: + raise StopIteration + + return wrapped + + +@wrapt.decorator +def yes_if_nothing_inferred(func, instance, args, kwargs): + inferred = False + for node in func(*args, **kwargs): + inferred = True + yield node + if not inferred: + yield util.Uninferable + + +@wrapt.decorator +def raise_if_nothing_inferred(func, instance, args, kwargs): + '''All generators wrapped with raise_if_nothing_inferred *must* + explicitly raise StopIteration with information to create an + appropriate structured InferenceError. + + ''' + # TODO: Explicitly raising StopIteration in a generator will cause + # a RuntimeError in Python >=3.7, as per + # http://legacy.python.org/dev/peps/pep-0479/ . Before 3.7 is + # released, this code will need to use one of four possible + # solutions: a decorator that restores the current behavior as + # described in + # http://legacy.python.org/dev/peps/pep-0479/#sub-proposal-decorator-to-explicitly-request-current-behaviour + # , dynamic imports or exec to generate different code for + # different versions, drop support for all Python versions <3.3, + # or refactoring to change how these decorators work. In any + # event, after dropping support for Python <3.3 this code should + # be refactored to use `yield from`. + inferred = False + try: + generator = func(*args, **kwargs) + while True: + yield next(generator) + inferred = True + except StopIteration as error: + if not inferred: + if error.args: + # pylint: disable=not-a-mapping + raise exceptions.InferenceError(**error.args[0]) + else: + raise exceptions.InferenceError( + 'StopIteration raised without any error information.') diff -Nru astroid-1.4.9/astroid/exceptions.py astroid-1.5.3/astroid/exceptions.py --- astroid-1.4.9/astroid/exceptions.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/exceptions.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,37 +1,118 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . -"""this module contains exceptions used in the astroid library +# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER +"""this module contains exceptions used in the astroid library """ +from astroid import util -__doctype__ = "restructuredtext en" class AstroidError(Exception): - """base exception class for all astroid related exceptions""" + """base exception class for all astroid related exceptions + + AstroidError and its subclasses are structured, intended to hold + objects representing state when the exception is thrown. Field + values are passed to the constructor as keyword-only arguments. + Each subclass has its own set of standard fields, but use your + best judgment to decide whether a specific exception instance + needs more or fewer fields for debugging. Field values may be + used to lazily generate the error message: self.message.format() + will be called with the field names and values supplied as keyword + arguments. + """ + def __init__(self, message='', **kws): + super(AstroidError, self).__init__(message) + self.message = message + for key, value in kws.items(): + setattr(self, key, value) + + def __str__(self): + return self.message.format(**vars(self)) + + +class AstroidBuildingError(AstroidError): + """exception class when we are unable to build an astroid representation + + Standard attributes: + modname: Name of the module that AST construction failed for. + error: Exception raised during construction. + """ + + # pylint: disable=useless-super-delegation; https://github.com/PyCQA/pylint/issues/1085 + def __init__(self, message='Failed to import module {modname}.', **kws): + super(AstroidBuildingError, self).__init__(message, **kws) + + +class AstroidImportError(AstroidBuildingError): + """Exception class used when a module can't be imported by astroid.""" + + +class TooManyLevelsError(AstroidImportError): + """Exception class which is raised when a relative import was beyond the top-level. + + Standard attributes: + level: The level which was attempted. + name: the name of the module on which the relative import was attempted. + """ + level = None + name = None + + # pylint: disable=useless-super-delegation; https://github.com/PyCQA/pylint/issues/1085 + def __init__(self, message='Relative import with too many levels ' + '({level}) for module {name!r}', **kws): + super(TooManyLevelsError, self).__init__(message, **kws) + + +class AstroidSyntaxError(AstroidBuildingError): + """Exception class used when a module can't be parsed.""" + + +class NoDefault(AstroidError): + """raised by function's `default_value` method when an argument has + no default value + + Standard attributes: + func: Function node. + name: Name of argument without a default. + """ + func = None + name = None + + # pylint: disable=useless-super-delegation; https://github.com/PyCQA/pylint/issues/1085 + def __init__(self, message='{func!r} has no default for {name!r}.', **kws): + super(NoDefault, self).__init__(message, **kws) -class AstroidBuildingException(AstroidError): - """exception class when we are unable to build an astroid representation""" class ResolveError(AstroidError): - """base class of astroid resolution/inference error""" + """Base class of astroid resolution/inference error. + + ResolveError is not intended to be raised. + + Standard attributes: + context: InferenceContext object. + """ + context = None + class MroError(ResolveError): - """Error raised when there is a problem with method resolution of a class.""" + """Error raised when there is a problem with method resolution of a class. + + Standard attributes: + mros: A sequence of sequences containing ClassDef nodes. + cls: ClassDef node whose MRO resolution failed. + context: InferenceContext object. + """ + mros = () + cls = None + + def __str__(self): + mro_names = ", ".join("({})".format(", ".join(b.name for b in m)) + for m in self.mros) + return self.message.format(mros=mro_names, cls=self.cls) class DuplicateBasesError(MroError): @@ -43,29 +124,92 @@ class SuperError(ResolveError): - """Error raised when there is a problem with a super call.""" + """Error raised when there is a problem with a super call. -class SuperArgumentTypeError(SuperError): - """Error raised when the super arguments are invalid.""" + Standard attributes: + super_: The Super instance that raised the exception. + context: InferenceContext object. + """ + super_ = None + def __str__(self): + return self.message.format(**vars(self.super_)) -class NotFoundError(ResolveError): - """raised when we are unable to resolve a name""" class InferenceError(ResolveError): - """raised when we are unable to infer a node""" + """raised when we are unable to infer a node + + Standard attributes: + node: The node inference was called on. + context: InferenceContext object. + """ + node = None + context = None + + # pylint: disable=useless-super-delegation; https://github.com/PyCQA/pylint/issues/1085 + def __init__(self, message='Inference failed for {node!r}.', **kws): + super(InferenceError, self).__init__(message, **kws) + + +# Why does this inherit from InferenceError rather than ResolveError? +# Changing it causes some inference tests to fail. +class NameInferenceError(InferenceError): + """Raised when a name lookup fails, corresponds to NameError. + + Standard attributes: + name: The name for which lookup failed, as a string. + scope: The node representing the scope in which the lookup occurred. + context: InferenceContext object. + """ + name = None + scope = None + + # pylint: disable=useless-super-delegation; https://github.com/PyCQA/pylint/issues/1085 + def __init__(self, message='{name!r} not found in {scope!r}.', **kws): + super(NameInferenceError, self).__init__(message, **kws) + + +class AttributeInferenceError(ResolveError): + """Raised when an attribute lookup fails, corresponds to AttributeError. + + Standard attributes: + target: The node for which lookup failed. + attribute: The attribute for which lookup failed, as a string. + context: InferenceContext object. + """ + target = None + attribute = None + + # pylint: disable=useless-super-delegation; https://github.com/PyCQA/pylint/issues/1085 + def __init__(self, message='{attribute!r} not found on {target!r}.', **kws): + super(AttributeInferenceError, self).__init__(message, **kws) + class UseInferenceDefault(Exception): """exception to be raised in custom inference function to indicate that it should go back to the default behaviour """ -class UnresolvableName(InferenceError): - """raised when we are unable to resolve a name""" -class NoDefault(AstroidError): - """raised by function's `default_value` method when an argument has - no default value - """ +class _NonDeducibleTypeHierarchy(Exception): + """Raised when is_subtype / is_supertype can't deduce the relation between two types.""" + + +class AstroidIndexError(AstroidError): + """Raised when an Indexable / Mapping does not have an index / key.""" + + +class AstroidTypeError(AstroidError): + """Raised when a TypeError would be expected in Python code.""" + + +# Backwards-compatibility aliases +OperationError = util.BadOperationMessage +UnaryOperationError = util.BadUnaryOperationMessage +BinaryOperationError = util.BadBinaryOperationMessage +SuperArgumentTypeError = SuperError +UnresolvableName = NameInferenceError +NotFoundError = AttributeInferenceError +AstroidBuildingException = AstroidBuildingError diff -Nru astroid-1.4.9/astroid/helpers.py astroid-1.5.3/astroid/helpers.py --- astroid-1.4.9/astroid/helpers.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/helpers.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,173 @@ +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + + +""" +Various helper utilities. +""" + +import six + +from astroid import bases +from astroid import context as contextmod +from astroid import exceptions +from astroid import manager +from astroid import nodes +from astroid import raw_building +from astroid import scoped_nodes +from astroid import util + + +BUILTINS = six.moves.builtins.__name__ + + +def _build_proxy_class(cls_name, builtins): + proxy = raw_building.build_class(cls_name) + proxy.parent = builtins + return proxy + + +def _function_type(function, builtins): + if isinstance(function, scoped_nodes.Lambda): + if function.root().name == BUILTINS: + cls_name = 'builtin_function_or_method' + else: + cls_name = 'function' + elif isinstance(function, bases.BoundMethod): + if six.PY2: + cls_name = 'instancemethod' + else: + cls_name = 'method' + elif isinstance(function, bases.UnboundMethod): + if six.PY2: + cls_name = 'instancemethod' + else: + cls_name = 'function' + return _build_proxy_class(cls_name, builtins) + + +def _object_type(node, context=None): + astroid_manager = manager.AstroidManager() + builtins = astroid_manager.astroid_cache[BUILTINS] + context = context or contextmod.InferenceContext() + + for inferred in node.infer(context=context): + if isinstance(inferred, scoped_nodes.ClassDef): + if inferred.newstyle: + metaclass = inferred.metaclass() + if metaclass: + yield metaclass + continue + yield builtins.getattr('type')[0] + elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)): + yield _function_type(inferred, builtins) + elif isinstance(inferred, scoped_nodes.Module): + yield _build_proxy_class('module', builtins) + else: + yield inferred._proxied + + +def object_type(node, context=None): + """Obtain the type of the given node + + This is used to implement the ``type`` builtin, which means that it's + used for inferring type calls, as well as used in a couple of other places + in the inference. + The node will be inferred first, so this function can support all + sorts of objects, as long as they support inference. + """ + + try: + types = set(_object_type(node, context)) + except exceptions.InferenceError: + return util.Uninferable + if len(types) > 1 or not types: + return util.Uninferable + return list(types)[0] + + +def safe_infer(node, context=None): + """Return the inferred value for the given node. + + Return None if inference failed or if there is some ambiguity (more than + one node has been inferred). + """ + try: + inferit = node.infer(context=context) + value = next(inferit) + except exceptions.InferenceError: + return + try: + next(inferit) + return # None if there is ambiguity on the inferred node + except exceptions.InferenceError: + return # there is some kind of ambiguity + except StopIteration: + return value + + +def has_known_bases(klass, context=None): + """Return true if all base classes of a class could be inferred.""" + try: + return klass._all_bases_known + except AttributeError: + pass + for base in klass.bases: + result = safe_infer(base, context=context) + # TODO: check for A->B->A->B pattern in class structure too? + if (not isinstance(result, scoped_nodes.ClassDef) or + result is klass or + not has_known_bases(result, context=context)): + klass._all_bases_known = False + return False + klass._all_bases_known = True + return True + + +def _type_check(type1, type2): + if not all(map(has_known_bases, (type1, type2))): + raise exceptions._NonDeducibleTypeHierarchy + + if not all([type1.newstyle, type2.newstyle]): + return False + try: + return type1 in type2.mro()[:-1] + except exceptions.MroError: + # The MRO is invalid. + raise exceptions._NonDeducibleTypeHierarchy + + +def is_subtype(type1, type2): + """Check if *type1* is a subtype of *typ2*.""" + return _type_check(type2, type1) + + +def is_supertype(type1, type2): + """Check if *type2* is a supertype of *type1*.""" + return _type_check(type1, type2) + + +def class_instance_as_index(node): + """Get the value as an index for the given instance. + + If an instance provides an __index__ method, then it can + be used in some scenarios where an integer is expected, + for instance when multiplying or subscripting a list. + """ + context = contextmod.InferenceContext() + context.callcontext = contextmod.CallContext(args=[node]) + + try: + for inferred in node.igetattr('__index__', context=context): + if not isinstance(inferred, bases.BoundMethod): + continue + + for result in inferred.infer_call_result(node, context=context): + if (isinstance(result, nodes.Const) + and isinstance(result.value, int)): + return result + except exceptions.InferenceError: + pass diff -Nru astroid-1.4.9/astroid/inference.py astroid-1.5.3/astroid/inference.py --- astroid-1.4.9/astroid/inference.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/inference.py 2017-04-20 12:13:28.000000000 +0000 @@ -1,30 +1,26 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2013-2014 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """this module contains a set of functions to handle inference on astroid trees """ -from __future__ import print_function +import functools +import itertools +import operator from astroid import bases from astroid import context as contextmod from astroid import exceptions +from astroid import decorators +from astroid import helpers from astroid import manager from astroid import nodes +from astroid.interpreter import dunder_lookup from astroid import protocols from astroid import util @@ -46,10 +42,77 @@ nodes.FunctionDef._infer = infer_end nodes.Lambda._infer = infer_end nodes.Const._infer = infer_end -nodes.List._infer = infer_end -nodes.Tuple._infer = infer_end -nodes.Dict._infer = infer_end -nodes.Set._infer = infer_end +nodes.Slice._infer = infer_end + + +def infer_seq(self, context=None): + if not any(isinstance(e, nodes.Starred) for e in self.elts): + yield self + else: + values = _infer_seq(self, context) + new_seq = type(self)(self.lineno, self.col_offset, self.parent) + new_seq.postinit(values) + yield new_seq + + +def _infer_seq(node, context=None): + """Infer all values based on _BaseContainer.elts""" + values = [] + + for elt in node.elts: + if isinstance(elt, nodes.Starred): + starred = helpers.safe_infer(elt.value, context) + if starred in (None, util.Uninferable): + raise exceptions.InferenceError(node=node, + context=context) + if not hasattr(starred, 'elts'): + raise exceptions.InferenceError(node=node, + context=context) + values.extend(_infer_seq(starred)) + else: + values.append(elt) + return values + + +nodes.List._infer = infer_seq +nodes.Tuple._infer = infer_seq +nodes.Set._infer = infer_seq + + +def infer_map(self, context=None): + if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items): + yield self + else: + items = _infer_map(self, context) + new_seq = type(self)(self.lineno, self.col_offset, self.parent) + new_seq.postinit(list(items.items())) + yield new_seq + + +def _infer_map(node, context): + """Infer all values based on Dict.items""" + values = {} + for name, value in node.items: + if isinstance(name, nodes.DictUnpack): + double_starred = helpers.safe_infer(value, context) + if double_starred in (None, util.Uninferable): + raise exceptions.InferenceError + if not isinstance(double_starred, nodes.Dict): + raise exceptions.InferenceError(node=node, + context=context) + values.update(_infer_map(double_starred, context)) + else: + key = helpers.safe_infer(name, context=context) + value = helpers.safe_infer(value, context=context) + if key is None or value is None: + raise exceptions.InferenceError(node=node, + context=context) + values[key] = value + return values + + +nodes.Dict._infer = infer_map + def _higher_function_scope(node): """ Search for the first function which encloses the given @@ -81,16 +144,18 @@ _, stmts = parent_function.lookup(self.name) if not stmts: - raise exceptions.UnresolvableName(self.name) + raise exceptions.NameInferenceError(name=self.name, + scope=self.scope(), + context=context) context = context.clone() context.lookupname = self.name return bases._infer_stmts(stmts, context, frame) -nodes.Name._infer = bases.path_wrapper(infer_name) +nodes.Name._infer = decorators.path_wrapper(infer_name) nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper -@bases.path_wrapper -@bases.raise_if_nothing_inferred +@decorators.raise_if_nothing_inferred +@decorators.path_wrapper def infer_call(self, context=None): """infer a Call node by trying to guess what the function returns""" callcontext = context.clone() @@ -98,7 +163,7 @@ keywords=self.keywords) callcontext.boundnode = None for callee in self.func.infer(context): - if callee is util.YES: + if callee is util.Uninferable: yield callee continue try: @@ -108,19 +173,28 @@ except exceptions.InferenceError: ## XXX log error ? continue + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, context=context)) nodes.Call._infer = infer_call -@bases.path_wrapper +@decorators.path_wrapper def infer_import(self, context=None, asname=True): """infer an Import node: return the imported module/object""" name = context.lookupname if name is None: - raise exceptions.InferenceError() - if asname: - yield self.do_import_module(self.real_name(name)) - else: - yield self.do_import_module(name) + raise exceptions.InferenceError(node=self, context=context) + + try: + if asname: + yield self.do_import_module(self.real_name(name)) + else: + yield self.do_import_module(name) + except exceptions.AstroidBuildingError as exc: + util.reraise(exceptions.InferenceError(node=self, error=exc, + context=context)) + nodes.Import._infer = infer_import @@ -131,59 +205,89 @@ nodes.Import.infer_name_module = infer_name_module -@bases.path_wrapper +@decorators.path_wrapper def infer_import_from(self, context=None, asname=True): """infer a ImportFrom node: return the imported module/object""" name = context.lookupname if name is None: - raise exceptions.InferenceError() + raise exceptions.InferenceError(node=self, context=context) if asname: name = self.real_name(name) - module = self.do_import_module() + + try: + module = self.do_import_module() + except exceptions.AstroidBuildingError as exc: + util.reraise(exceptions.InferenceError(node=self, error=exc, + context=context)) + try: context = contextmod.copy_context(context) context.lookupname = name stmts = module.getattr(name, ignore_locals=module is self.root()) return bases._infer_stmts(stmts, context) - except exceptions.NotFoundError: - raise exceptions.InferenceError(name) + except exceptions.AttributeInferenceError as error: + util.reraise(exceptions.InferenceError( + error.message, target=self, attribute=name, context=context)) nodes.ImportFrom._infer = infer_import_from -@bases.raise_if_nothing_inferred +@decorators.raise_if_nothing_inferred def infer_attribute(self, context=None): """infer an Attribute node by using getattr on the associated object""" for owner in self.expr.infer(context): - if owner is util.YES: + if owner is util.Uninferable: yield owner continue + + if context and context.boundnode: + # This handles the situation where the attribute is accessed through a subclass + # of a base class and the attribute is defined at the base class's level, + # by taking in consideration a redefinition in the subclass. + if (isinstance(owner, bases.Instance) + and isinstance(context.boundnode, bases.Instance)): + try: + if helpers.is_subtype(helpers.object_type(context.boundnode), + helpers.object_type(owner)): + owner = context.boundnode + except exceptions._NonDeducibleTypeHierarchy: + # Can't determine anything useful. + pass + try: context.boundnode = owner for obj in owner.igetattr(self.attrname, context): yield obj context.boundnode = None - except (exceptions.NotFoundError, exceptions.InferenceError): + except (exceptions.AttributeInferenceError, exceptions.InferenceError): context.boundnode = None except AttributeError: # XXX method / function context.boundnode = None -nodes.Attribute._infer = bases.path_wrapper(infer_attribute) + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, context=context)) +nodes.Attribute._infer = decorators.path_wrapper(infer_attribute) nodes.AssignAttr.infer_lhs = infer_attribute # # won't work with a path wrapper -@bases.path_wrapper +@decorators.path_wrapper def infer_global(self, context=None): if context.lookupname is None: - raise exceptions.InferenceError() + raise exceptions.InferenceError(node=self, context=context) try: return bases._infer_stmts(self.root().getattr(context.lookupname), context) - except exceptions.NotFoundError: - raise exceptions.InferenceError() + except exceptions.AttributeInferenceError as error: + util.reraise(exceptions.InferenceError( + error.message, target=self, attribute=context.lookupname, + context=context)) nodes.Global._infer = infer_global -@bases.raise_if_nothing_inferred +_SUBSCRIPT_SENTINEL = object() + + +@decorators.raise_if_nothing_inferred def infer_subscript(self, context=None): """Inference for subscripts @@ -194,100 +298,461 @@ """ value = next(self.value.infer(context)) - if value is util.YES: - yield util.YES + if value is util.Uninferable: + yield util.Uninferable return index = next(self.slice.infer(context)) - if index is util.YES: - yield util.YES + if index is util.Uninferable: + yield util.Uninferable return - if isinstance(index, nodes.Const): - try: - assigned = value.getitem(index.value, context) - except AttributeError: - raise exceptions.InferenceError() - except (IndexError, TypeError): - yield util.YES - return - - # Prevent inferring if the infered subscript - # is the same as the original subscripted object. - if self is assigned or assigned is util.YES: - yield util.YES - return - for infered in assigned.infer(context): - yield infered + # Try to deduce the index value. + index_value = _SUBSCRIPT_SENTINEL + if value.__class__ == bases.Instance: + index_value = index else: - raise exceptions.InferenceError() -nodes.Subscript._infer = bases.path_wrapper(infer_subscript) + if index.__class__ == bases.Instance: + instance_as_index = helpers.class_instance_as_index(index) + if instance_as_index: + index_value = instance_as_index + else: + index_value = index + if index_value is _SUBSCRIPT_SENTINEL: + raise exceptions.InferenceError(node=self, context=context) + + try: + assigned = value.getitem(index_value, context) + except (exceptions.AstroidTypeError, + exceptions.AstroidIndexError, + exceptions.AttributeInferenceError, + AttributeError) as exc: + util.reraise(exceptions.InferenceError(node=self, error=exc, + context=context)) + + # Prevent inferring if the inferred subscript + # is the same as the original subscripted object. + if self is assigned or assigned is util.Uninferable: + yield util.Uninferable + return + for inferred in assigned.infer(context): + yield inferred + + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, context=context)) + +nodes.Subscript._infer = decorators.path_wrapper(infer_subscript) nodes.Subscript.infer_lhs = infer_subscript -@bases.raise_if_nothing_inferred -def infer_unaryop(self, context=None): + +@decorators.raise_if_nothing_inferred +@decorators.path_wrapper +def _infer_boolop(self, context=None): + """Infer a boolean operation (and / or / not). + + The function will calculate the boolean operation + for all pairs generated through inference for each component + node. + """ + values = self.values + if self.op == 'or': + predicate = operator.truth + else: + predicate = operator.not_ + + try: + values = [value.infer(context=context) for value in values] + except exceptions.InferenceError: + yield util.Uninferable + return + + for pair in itertools.product(*values): + if any(item is util.Uninferable for item in pair): + # Can't infer the final result, just yield Uninferable. + yield util.Uninferable + continue + + bool_values = [item.bool_value() for item in pair] + if any(item is util.Uninferable for item in bool_values): + # Can't infer the final result, just yield Uninferable. + yield util.Uninferable + continue + + # Since the boolean operations are short circuited operations, + # this code yields the first value for which the predicate is True + # and if no value respected the predicate, then the last value will + # be returned (or Uninferable if there was no last value). + # This is conforming to the semantics of `and` and `or`: + # 1 and 0 -> 1 + # 0 and 1 -> 0 + # 1 or 0 -> 1 + # 0 or 1 -> 1 + value = util.Uninferable + for value, bool_value in zip(pair, bool_values): + if predicate(bool_value): + yield value + break + else: + yield value + + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, context=context)) + +nodes.BoolOp._infer = _infer_boolop + + +# UnaryOp, BinOp and AugAssign inferences + +def _filter_operation_errors(self, infer_callable, context, error): + for result in infer_callable(self, context): + if isinstance(result, error): + # For the sake of .infer(), we don't care about operation + # errors, which is the job of pylint. So return something + # which shows that we can't infer the result. + yield util.Uninferable + else: + yield result + + +def _infer_unaryop(self, context=None): + """Infer what an UnaryOp should return when evaluated.""" for operand in self.operand.infer(context): try: yield operand.infer_unary_op(self.op) - except TypeError: - continue - except AttributeError: + except TypeError as exc: + # The operand doesn't support this operation. + yield util.BadUnaryOperationMessage(operand, self.op, exc) + except AttributeError as exc: meth = protocols.UNARY_OP_METHOD[self.op] if meth is None: - yield util.YES + # `not node`. Determine node's boolean + # value and negate its result, unless it is + # Uninferable, which will be returned as is. + bool_value = operand.bool_value() + if bool_value is not util.Uninferable: + yield nodes.const_factory(not bool_value) + else: + yield util.Uninferable else: + if not isinstance(operand, (bases.Instance, nodes.ClassDef)): + # The operation was used on something which + # doesn't support it. + yield util.BadUnaryOperationMessage(operand, self.op, exc) + continue + try: - # XXX just suppose if the type implement meth, returned type - # will be the same - operand.getattr(meth) - yield operand - except GeneratorExit: - raise - except: - yield util.YES -nodes.UnaryOp._infer = bases.path_wrapper(infer_unaryop) - -def _infer_binop(binop, operand1, operand2, context, failures=None): - if operand1 is util.YES: - yield operand1 - return - try: - for valnode in operand1.infer_binary_op(binop, operand2, context): - yield valnode - except AttributeError: + try: + methods = dunder_lookup.lookup(operand, meth) + except exceptions.AttributeInferenceError: + yield util.BadUnaryOperationMessage(operand, self.op, exc) + continue + + meth = methods[0] + inferred = next(meth.infer(context=context)) + if inferred is util.Uninferable or not inferred.callable(): + continue + + context = contextmod.copy_context(context) + context.callcontext = contextmod.CallContext(args=[operand]) + call_results = inferred.infer_call_result(self, context=context) + result = next(call_results, None) + if result is None: + # Failed to infer, return the same type. + yield operand + else: + yield result + except exceptions.AttributeInferenceError as exc: + # The unary operation special method was not found. + yield util.BadUnaryOperationMessage(operand, self.op, exc) + except exceptions.InferenceError: + yield util.Uninferable + + +@decorators.raise_if_nothing_inferred +@decorators.path_wrapper +def infer_unaryop(self, context=None): + """Infer what an UnaryOp should return when evaluated.""" + for inferred in _filter_operation_errors(self, _infer_unaryop, context, + util.BadUnaryOperationMessage): + yield inferred + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, context=context)) + +nodes.UnaryOp._infer_unaryop = _infer_unaryop +nodes.UnaryOp._infer = infer_unaryop + + +def _is_not_implemented(const): + """Check if the given const node is NotImplemented.""" + return isinstance(const, nodes.Const) and const.value is NotImplemented + + +def _invoke_binop_inference(instance, opnode, op, other, context, method_name): + """Invoke binary operation inference on the given instance.""" + methods = dunder_lookup.lookup(instance, method_name) + method = methods[0] + inferred = next(method.infer(context=context)) + return instance.infer_binary_op(opnode, op, other, context, inferred) + + +def _aug_op(instance, opnode, op, other, context, reverse=False): + """Get an inference callable for an augmented binary operation.""" + method_name = protocols.AUGMENTED_OP_METHOD[op] + return functools.partial(_invoke_binop_inference, + instance=instance, + op=op, opnode=opnode, other=other, + context=context, + method_name=method_name) + + +def _bin_op(instance, opnode, op, other, context, reverse=False): + """Get an inference callable for a normal binary operation. + + If *reverse* is True, then the reflected method will be used instead. + """ + if reverse: + method_name = protocols.REFLECTED_BIN_OP_METHOD[op] + else: + method_name = protocols.BIN_OP_METHOD[op] + return functools.partial(_invoke_binop_inference, + instance=instance, + op=op, opnode=opnode, other=other, + context=context, + method_name=method_name) + + +def _get_binop_contexts(context, left, right): + """Get contexts for binary operations. + + This will return two inferrence contexts, the first one + for x.__op__(y), the other one for y.__rop__(x), where + only the arguments are inversed. + """ + # The order is important, since the first one should be + # left.__op__(right). + for arg in (right, left): + new_context = context.clone() + new_context.callcontext = contextmod.CallContext(args=[arg]) + new_context.boundnode = None + yield new_context + + +def _same_type(type1, type2): + """Check if type1 is the same as type2.""" + return type1.qname() == type2.qname() + + +def _get_binop_flow(left, left_type, binary_opnode, right, right_type, + context, reverse_context): + """Get the flow for binary operations. + + The rules are a bit messy: + + * if left and right have the same type, then only one + method will be called, left.__op__(right) + * if left and right are unrelated typewise, then first + left.__op__(right) is tried and if this does not exist + or returns NotImplemented, then right.__rop__(left) is tried. + * if left is a subtype of right, then only left.__op__(right) + is tried. + * if left is a supertype of right, then right.__rop__(left) + is first tried and then left.__op__(right) + """ + op = binary_opnode.op + if _same_type(left_type, right_type): + methods = [_bin_op(left, binary_opnode, op, right, context)] + elif helpers.is_subtype(left_type, right_type): + methods = [_bin_op(left, binary_opnode, op, right, context)] + elif helpers.is_supertype(left_type, right_type): + methods = [_bin_op(right, binary_opnode, op, left, reverse_context, reverse=True), + _bin_op(left, binary_opnode, op, right, context)] + else: + methods = [_bin_op(left, binary_opnode, op, right, context), + _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True)] + return methods + + +def _get_aug_flow(left, left_type, aug_opnode, right, right_type, + context, reverse_context): + """Get the flow for augmented binary operations. + + The rules are a bit messy: + + * if left and right have the same type, then left.__augop__(right) + is first tried and then left.__op__(right). + * if left and right are unrelated typewise, then + left.__augop__(right) is tried, then left.__op__(right) + is tried and then right.__rop__(left) is tried. + * if left is a subtype of right, then left.__augop__(right) + is tried and then left.__op__(right). + * if left is a supertype of right, then left.__augop__(right) + is tried, then right.__rop__(left) and then + left.__op__(right) + """ + bin_op = aug_opnode.op.strip("=") + aug_op = aug_opnode.op + if _same_type(left_type, right_type): + methods = [_aug_op(left, aug_opnode, aug_op, right, context), + _bin_op(left, aug_opnode, bin_op, right, context)] + elif helpers.is_subtype(left_type, right_type): + methods = [_aug_op(left, aug_opnode, aug_op, right, context), + _bin_op(left, aug_opnode, bin_op, right, context)] + elif helpers.is_supertype(left_type, right_type): + methods = [_aug_op(left, aug_opnode, aug_op, right, context), + _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True), + _bin_op(left, aug_opnode, bin_op, right, context)] + else: + methods = [_aug_op(left, aug_opnode, aug_op, right, context), + _bin_op(left, aug_opnode, bin_op, right, context), + _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True)] + return methods + + +def _infer_binary_operation(left, right, binary_opnode, context, flow_factory): + """Infer a binary operation between a left operand and a right operand + + This is used by both normal binary operations and augmented binary + operations, the only difference is the flow factory used. + """ + + context, reverse_context = _get_binop_contexts(context, left, right) + left_type = helpers.object_type(left) + right_type = helpers.object_type(right) + methods = flow_factory(left, left_type, binary_opnode, right, right_type, + context, reverse_context) + for method in methods: try: - # XXX just suppose if the type implement meth, returned type - # will be the same - operand1.getattr(protocols.BIN_OP_METHOD[operator]) - yield operand1 - except: - if failures is None: - yield util.YES - else: - failures.append(operand1) + results = list(method()) + except AttributeError: + continue + except exceptions.AttributeInferenceError: + continue + except exceptions.InferenceError: + yield util.Uninferable + return + else: + if any(result is util.Uninferable for result in results): + yield util.Uninferable + return + + # TODO(cpopa): since the inference engine might return + # more values than are actually possible, we decide + # to return util.Uninferable if we have union types. + if all(map(_is_not_implemented, results)): + continue + not_implemented = sum(1 for result in results + if _is_not_implemented(result)) + if not_implemented and not_implemented != len(results): + # Can't decide yet what this is, not yet though. + yield util.Uninferable + return -@bases.yes_if_nothing_inferred + for result in results: + yield result + return + # TODO(cpopa): yield a BadBinaryOperationMessage here, + # since the operation is not supported + yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type) + + +def _infer_binop(self, context): + """Binary operation inferrence logic.""" + if context is None: + context = contextmod.InferenceContext() + left = self.left + right = self.right + + # we use two separate contexts for evaluating lhs and rhs because + # 1. evaluating lhs may leave some undesired entries in context.path + # which may not let us infer right value of rhs + lhs_context = context.clone() + rhs_context = context.clone() + + for lhs in left.infer(context=lhs_context): + if lhs is util.Uninferable: + # Don't know how to process this. + yield util.Uninferable + return + + for rhs in right.infer(context=rhs_context): + if rhs is util.Uninferable: + # Don't know how to process this. + yield util.Uninferable + return + + try: + for result in _infer_binary_operation(lhs, rhs, self, + context, _get_binop_flow): + yield result + except exceptions._NonDeducibleTypeHierarchy: + yield util.Uninferable + + +@decorators.yes_if_nothing_inferred +@decorators.path_wrapper def infer_binop(self, context=None): - failures = [] - for lhs in self.left.infer(context): - for val in _infer_binop(self, lhs, self.right, context, failures): - yield val - for lhs in failures: - for rhs in self.right.infer(context): - for val in _infer_binop(self, rhs, lhs, context): - yield val -nodes.BinOp._infer = bases.path_wrapper(infer_binop) + return _filter_operation_errors(self, _infer_binop, context, + util.BadBinaryOperationMessage) + +nodes.BinOp._infer_binop = _infer_binop +nodes.BinOp._infer = infer_binop + + +def _infer_augassign(self, context=None): + """Inference logic for augmented binary operations.""" + if context is None: + context = contextmod.InferenceContext() + + for lhs in self.target.infer_lhs(context=context): + if lhs is util.Uninferable: + # Don't know how to process this. + yield util.Uninferable + return + + # TODO(cpopa): if we have A() * A(), trying to infer + # the rhs with the same context will result in an + # inference error, so we create another context for it. + # This is a bug which should be fixed in InferenceContext at some point. + rhs_context = context.clone() + rhs_context.path = set() + for rhs in self.value.infer(context=rhs_context): + if rhs is util.Uninferable: + # Don't know how to process this. + yield util.Uninferable + return + + try: + results = _infer_binary_operation(lhs, rhs, self, context, _get_aug_flow) + except exceptions._NonDeducibleTypeHierarchy: + yield util.Uninferable + else: + for result in results: + yield result + + +@decorators.path_wrapper +def infer_augassign(self, context=None): + return _filter_operation_errors(self, _infer_augassign, context, + util.BadBinaryOperationMessage) + +nodes.AugAssign._infer_augassign = _infer_augassign +nodes.AugAssign._infer = infer_augassign + +# End of binary operation inference. def infer_arguments(self, context=None): name = context.lookupname if name is None: - raise exceptions.InferenceError() + raise exceptions.InferenceError(node=self, context=context) return protocols._arguments_infer_argname(self, name, context) nodes.Arguments._infer = infer_arguments -@bases.path_wrapper +@decorators.path_wrapper def infer_assign(self, context=None): """infer a AssignName/AssignAttr: need to inspect the RHS part of the assign node @@ -301,31 +766,20 @@ nodes.AssignName._infer = infer_assign nodes.AssignAttr._infer = infer_assign -def infer_augassign(self, context=None): - failures = [] - for lhs in self.target.infer_lhs(context): - for val in _infer_binop(self, lhs, self.value, context, failures): - yield val - for lhs in failures: - for rhs in self.value.infer(context): - for val in _infer_binop(self, rhs, lhs, context): - yield val -nodes.AugAssign._infer = bases.path_wrapper(infer_augassign) - # no infer method on DelName and DelAttr (expected InferenceError) -@bases.path_wrapper +@decorators.path_wrapper def infer_empty_node(self, context=None): if not self.has_underlying_object(): - yield util.YES + yield util.Uninferable else: try: for inferred in MANAGER.infer_ast_from_something(self.object, context=context): yield inferred except exceptions.AstroidError: - yield util.YES + yield util.Uninferable nodes.EmptyNode._infer = infer_empty_node @@ -337,7 +791,6 @@ # will be solved. def instance_getitem(self, index, context=None): # Rewrap index to Const for this case - index = nodes.Const(index) if context: new_context = context.clone() else: @@ -349,11 +802,15 @@ method = next(self.igetattr('__getitem__', context=context)) if not isinstance(method, bases.BoundMethod): - raise exceptions.InferenceError + raise exceptions.InferenceError( + 'Could not find __getitem__ for {node!r}.', + node=self, context=context) try: return next(method.infer_call_result(self, new_context)) except StopIteration: - raise exceptions.InferenceError + util.reraise(exceptions.InferenceError( + message='Inference for {node!r}[{index!s}] failed.', + node=self, index=index, context=context)) bases.Instance.getitem = instance_getitem diff -Nru astroid-1.4.9/astroid/__init__.py astroid-1.5.3/astroid/__init__.py --- astroid-1.4.9/astroid/__init__.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/__init__.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,20 +1,10 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Python Abstract Syntax Tree New Generation The aim of this module is to provide a common base representation of @@ -39,14 +29,27 @@ * builder contains the class responsible to build astroid trees """ -__doctype__ = "restructuredtext en" +import os import sys import re from operator import attrgetter +import enum + + +_Context = enum.Enum('Context', 'Load Store Del') +Load = _Context.Load +Store = _Context.Store +Del = _Context.Del +del _Context + + +from .__pkginfo__ import version as __version__ # WARNING: internal imports order matters ! +# pylint: disable=redefined-builtin, wildcard-import + # make all exception classes accessible from astroid package from astroid.exceptions import * @@ -58,14 +61,13 @@ # more stuff available from astroid import raw_building -from astroid.bases import Instance, BoundMethod, UnboundMethod +from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod from astroid.node_classes import are_exclusive, unpack_infer from astroid.scoped_nodes import builtin_lookup -from astroid.builder import parse -from astroid.util import YES +from astroid.builder import parse, extract_node +from astroid.util import Uninferable, YES -# make a manager instance (borg) as well as Project and Package classes -# accessible from astroid package +# make a manager instance (borg) accessible from astroid package from astroid.manager import AstroidManager MANAGER = AstroidManager() del AstroidManager @@ -73,7 +75,7 @@ # transform utilities (filters and decorator) class AsStringRegexpPredicate(object): - """Class to be used as predicate that may be given to `register_transform` + """ClassDef to be used as predicate that may be given to `register_transform` First argument is a regular expression that will be searched against the `as_string` representation of the node onto which it's applied. @@ -92,6 +94,7 @@ def __call__(self, node): if self.expression is not None: node = attrgetter(self.expression)(node) + # pylint: disable=no-member; github.com/pycqa/astroid/126 return self.regexp.search(node.as_string()) def inference_tip(infer_function): @@ -114,8 +117,8 @@ def register_module_extender(manager, module_name, get_extension_mod): def transform(node): extension_module = get_extension_mod() - for name, objs in extension_module._locals.items(): - node._locals[name] = objs + for name, objs in extension_module.locals.items(): + node.locals[name] = objs for obj in objs: if obj.parent is extension_module: obj.parent = node @@ -124,13 +127,11 @@ # load brain plugins -from os import listdir -from os.path import join, dirname -BRAIN_MODULES_DIR = join(dirname(__file__), 'brain') +BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), 'brain') if BRAIN_MODULES_DIR not in sys.path: # add it to the end of the list so user path take precedence sys.path.append(BRAIN_MODULES_DIR) # load modules in this directory -for module in listdir(BRAIN_MODULES_DIR): +for module in os.listdir(BRAIN_MODULES_DIR): if module.endswith('.py'): __import__(module[:-3]) diff -Nru astroid-1.4.9/astroid/interpreter/dunder_lookup.py astroid-1.5.3/astroid/interpreter/dunder_lookup.py --- astroid-1.4.9/astroid/interpreter/dunder_lookup.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/interpreter/dunder_lookup.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,81 @@ +# Copyright (c) 2016 Claudiu Popa +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +"""Contains logic for retrieving special methods. + +This implementation does not rely on the dot attribute access +logic, found in ``.getattr()``. The difference between these two +is that the dunder methods are looked with the type slots +(you can find more about these here +http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/) +As such, the lookup for the special methods is actually simpler than +the dot attribute access. +""" +import itertools + +import astroid +from astroid import exceptions + + +def _lookup_in_mro(node, name): + attrs = node.locals.get(name, []) + + nodes = itertools.chain.from_iterable( + ancestor.locals.get(name, []) + for ancestor in node.ancestors(recurs=True) + ) + values = list(itertools.chain(attrs, nodes)) + if not values: + raise exceptions.AttributeInferenceError( + attribute=name, + target=node + ) + + return values + + +def lookup(node, name): + """Lookup the given special method name in the given *node* + + If the special method was found, then a list of attributes + will be returned. Otherwise, `astroid.AttributeInferenceError` + is going to be raised. + """ + if isinstance(node, (astroid.List, + astroid.Tuple, + astroid.Const, + astroid.Dict, + astroid.Set)): + return _builtin_lookup(node, name) + elif isinstance(node, astroid.Instance): + return _lookup_in_mro(node, name) + elif isinstance(node, astroid.ClassDef): + return _class_lookup(node, name) + + raise exceptions.AttributeInferenceError( + attribute=name, + target=node + ) + + +def _class_lookup(node, name): + metaclass = node.metaclass() + if metaclass is None: + raise exceptions.AttributeInferenceError( + attribute=name, + target=node + ) + + return _lookup_in_mro(metaclass, name) + + +def _builtin_lookup(node, name): + values = node.locals.get(name, []) + if not values: + raise exceptions.AttributeInferenceError( + attribute=name, + target=node + ) + + return values diff -Nru astroid-1.4.9/astroid/interpreter/_import/spec.py astroid-1.5.3/astroid/interpreter/_import/spec.py --- astroid-1.4.9/astroid/interpreter/_import/spec.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/interpreter/_import/spec.py 2017-06-03 13:47:01.000000000 +0000 @@ -0,0 +1,287 @@ +# Copyright (c) 2016 Claudiu Popa + +import abc +import collections +import enum +import imp +import os +import sys +import zipimport +try: + import importlib.machinery + _HAS_MACHINERY = True +except ImportError: + _HAS_MACHINERY = False + +try: + from functools import lru_cache +except ImportError: + from backports.functools_lru_cache import lru_cache + +from . import util + +ModuleType = enum.Enum('ModuleType', 'C_BUILTIN C_EXTENSION PKG_DIRECTORY ' + 'PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE ' + 'PY_SOURCE PY_ZIPMODULE PY_NAMESPACE') +_ImpTypes = {imp.C_BUILTIN: ModuleType.C_BUILTIN, + imp.C_EXTENSION: ModuleType.C_EXTENSION, + imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY, + imp.PY_COMPILED: ModuleType.PY_COMPILED, + imp.PY_FROZEN: ModuleType.PY_FROZEN, + imp.PY_SOURCE: ModuleType.PY_SOURCE, + } +if hasattr(imp, 'PY_RESOURCE'): + _ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE +if hasattr(imp, 'PY_CODERESOURCE'): + _ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE + +def _imp_type_to_module_type(imp_type): + return _ImpTypes[imp_type] + +_ModuleSpec = collections.namedtuple('_ModuleSpec', 'name type location ' + 'origin submodule_search_locations') + +class ModuleSpec(_ModuleSpec): + """Defines a class similar to PEP 420's ModuleSpec + + A module spec defines a name of a module, its type, location + and where submodules can be found, if the module is a package. + """ + + def __new__(cls, name, module_type, location=None, origin=None, + submodule_search_locations=None): + return _ModuleSpec.__new__(cls, name=name, type=module_type, + location=location, origin=origin, + submodule_search_locations=submodule_search_locations) + + +class Finder(object): + """A finder is a class which knows how to find a particular module.""" + + def __init__(self, path=None): + self._path = path or sys.path + + @abc.abstractmethod + def find_module(self, modname, module_parts, processed, submodule_path): + """Find the given module + + Each finder is responsible for each protocol of finding, as long as + they all return a ModuleSpec. + + :param str modname: The module which needs to be searched. + :param list module_parts: It should be a list of strings, + where each part contributes to the module's + namespace. + :param list processed: What parts from the module parts were processed + so far. + :param list submodule_path: A list of paths where the module + can be looked into. + :returns: A ModuleSpec, describing how and where the module was found, + None, otherwise. + """ + + def contribute_to_path(self, spec, processed): + """Get a list of extra paths where this finder can search.""" + + +class ImpFinder(Finder): + """A finder based on the imp module.""" + + def find_module(self, modname, module_parts, processed, submodule_path): + if submodule_path is not None: + submodule_path = list(submodule_path) + try: + stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path) + except ImportError: + return None + + # Close resources. + if stream: + stream.close() + + return ModuleSpec(name=modname, location=mp_filename, + module_type=_imp_type_to_module_type(mp_desc[2])) + + def contribute_to_path(self, spec, processed): + if spec.location is None: + # Builtin. + return None + + if _is_setuptools_namespace(spec.location): + # extend_path is called, search sys.path for module/packages + # of this name see pkgutil.extend_path documentation + path = [os.path.join(p, *processed) for p in sys.path + if os.path.isdir(os.path.join(p, *processed))] + else: + path = [spec.location] + return path + + +class ExplicitNamespacePackageFinder(ImpFinder): + """A finder for the explicit namespace packages, generated through pkg_resources.""" + + def find_module(self, modname, module_parts, processed, submodule_path): + if util.is_namespace(modname) and modname in sys.modules: + submodule_path = sys.modules[modname].__path__ + return ModuleSpec(name=modname, location='', + origin='namespace', + module_type=ModuleType.PY_NAMESPACE, + submodule_search_locations=submodule_path) + + + def contribute_to_path(self, spec, processed): + return spec.submodule_search_locations + + +class ZipFinder(Finder): + """Finder that knows how to find a module inside zip files.""" + + def __init__(self, path): + super(ZipFinder, self).__init__(path) + self._zipimporters = _precache_zipimporters(path) + + def find_module(self, modname, module_parts, processed, submodule_path): + try: + file_type, filename, path = _search_zip(module_parts, self._zipimporters) + except ImportError: + return None + + return ModuleSpec(name=modname, location=filename, + origin='egg', module_type=file_type, + submodule_search_locations=path) + + +class PathSpecFinder(Finder): + """Finder based on importlib.machinery.PathFinder.""" + + def find_module(self, modname, module_parts, processed, submodule_path): + spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path) + if spec: + location = spec.origin if spec.origin != 'namespace' else None + module_type = ModuleType.PY_NAMESPACE if spec.origin == 'namespace' else None + spec = ModuleSpec(name=spec.name, location=location, + origin=spec.origin, module_type=module_type, + submodule_search_locations=list(spec.submodule_search_locations + or [])) + return spec + + def contribute_to_path(self, spec, processed): + if spec.type == ModuleType.PY_NAMESPACE: + return spec.submodule_search_locations + return None + + +_SPEC_FINDERS = ( + ImpFinder, + ZipFinder, +) +if _HAS_MACHINERY and sys.version_info[:2] > (3, 3): + _SPEC_FINDERS += (PathSpecFinder, ) +_SPEC_FINDERS += (ExplicitNamespacePackageFinder, ) + + +def _is_setuptools_namespace(location): + try: + with open(os.path.join(location, '__init__.py'), 'rb') as stream: + data = stream.read(4096) + except IOError: + pass + else: + extend_path = b'pkgutil' in data and b'extend_path' in data + declare_namespace = ( + b"pkg_resources" in data + and b"declare_namespace(__name__)" in data) + return extend_path or declare_namespace + + +@lru_cache() +def _cached_set_diff(left, right): + result = set(left) + result.difference_update(right) + return result + + +def _precache_zipimporters(path=None): + pic = sys.path_importer_cache + + # When measured, despite having the same complexity (O(n)), + # converting to tuples and then caching the conversion to sets + # and the set difference is faster than converting to sets + # and then only caching the set difference. + + req_paths = tuple(path or sys.path) + cached_paths = tuple(pic) + new_paths = _cached_set_diff(req_paths, cached_paths) + for entry_path in new_paths: + try: + pic[entry_path] = zipimport.zipimporter(entry_path) + except zipimport.ZipImportError: + continue + return pic + + +def _search_zip(modpath, pic): + for filepath, importer in list(pic.items()): + if importer is not None: + found = importer.find_module(modpath[0]) + if found: + if not importer.find_module(os.path.sep.join(modpath)): + raise ImportError('No module named %s in %s/%s' % ( + '.'.join(modpath[1:]), filepath, modpath)) + #import code; code.interact(local=locals()) + return (ModuleType.PY_ZIPMODULE, + os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), + filepath) + raise ImportError('No module named %s' % '.'.join(modpath)) + + +def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path): + finders = [finder(search_path) for finder in _SPEC_FINDERS] + for finder in finders: + spec = finder.find_module(modname, module_parts, processed, submodule_path) + if spec is None: + continue + return finder, spec + + raise ImportError('No module named %s' % '.'.join(module_parts)) + + +def find_spec(modpath, path=None): + """Find a spec for the given module. + + :type modpath: list or tuple + :param modpath: + split module's name (i.e name of a module or package split + on '.'), with leading empty strings for explicit relative import + + :type path: list or None + :param path: + optional list of path where the module or package should be + searched (use sys.path if nothing or None is given) + + :rtype: ModuleSpec + :return: A module spec, which describes how the module was + found and where. + """ + _path = path or sys.path + + # Need a copy for not mutating the argument. + modpath = modpath[:] + + submodule_path = None + module_parts = modpath[:] + processed = [] + + while modpath: + modname = modpath.pop(0) + finder, spec = _find_spec_with_path(_path, modname, + module_parts, processed, + submodule_path or path) + processed.append(modname) + if modpath: + submodule_path = finder.contribute_to_path(spec, processed) + + if spec.type == ModuleType.PKG_DIRECTORY: + spec = spec._replace(submodule_search_locations=submodule_path) + + return spec diff -Nru astroid-1.4.9/astroid/interpreter/_import/util.py astroid-1.5.3/astroid/interpreter/_import/util.py --- astroid-1.4.9/astroid/interpreter/_import/util.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/interpreter/_import/util.py 2017-03-11 13:04:27.000000000 +0000 @@ -0,0 +1,12 @@ +# Copyright (c) 2016 Claudiu Popa + +try: + import pkg_resources +except ImportError: + pkg_resources = None + + +def is_namespace(modname): + # pylint: disable=no-member; astroid issue #290, modifying globals at runtime. + return (pkg_resources is not None + and modname in pkg_resources._namespace_packages) diff -Nru astroid-1.4.9/astroid/interpreter/objectmodel.py astroid-1.5.3/astroid/interpreter/objectmodel.py --- astroid-1.4.9/astroid/interpreter/objectmodel.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/interpreter/objectmodel.py 2017-04-12 14:10:18.000000000 +0000 @@ -0,0 +1,632 @@ +# Copyright (c) 2016 Claudiu Popa +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER +""" +Data object model, as per https://docs.python.org/3/reference/datamodel.html. + +This module describes, at least partially, a data object model for some +of astroid's nodes. The model contains special attributes that nodes such +as functions, classes, modules etc have, such as __doc__, __class__, +__module__ etc, being used when doing attribute lookups over nodes. + +For instance, inferring `obj.__class__` will first trigger an inference +of the `obj` variable. If it was succesfully inferred, then an attribute +`__class__ will be looked for in the inferred object. This is the part +where the data model occurs. The model is attached to those nodes +and the lookup mechanism will try to see if attributes such as +`__class__` are defined by the model or not. If they are defined, +the model will be requested to return the corresponding value of that +attribute. Thus the model can be viewed as a special part of the lookup +mechanism. +""" + +try: + from functools import lru_cache +except ImportError: + from backports.functools_lru_cache import lru_cache + +import itertools +import pprint +import os +import types + +import six + +import astroid +from astroid import context as contextmod +from astroid import exceptions +from astroid import node_classes + + +def _dunder_dict(instance, attributes): + obj = node_classes.Dict(parent=instance) + + # Convert the keys to node strings + keys = [node_classes.Const(value=value, parent=obj) + for value in list(attributes.keys())] + + # The original attribute has a list of elements for each key, + # but that is not useful for retrieving the special attribute's value. + # In this case, we're picking the last value from each list. + values = [elem[-1] for elem in attributes.values()] + + obj.postinit(list(zip(keys, values))) + return obj + + +class ObjectModel(object): + + def __init__(self): + self._instance = None + + def __repr__(self): + result = [] + cname = type(self).__name__ + string = '%(cname)s(%(fields)s)' + alignment = len(cname) + 1 + for field in sorted(self.attributes()): + width = 80 - len(field) - alignment + lines = pprint.pformat(field, indent=2, + width=width).splitlines(True) + + inner = [lines[0]] + for line in lines[1:]: + inner.append(' ' * alignment + line) + result.append(field) + + return string % {'cname': cname, + 'fields': (',\n' + ' ' * alignment).join(result)} + + def __call__(self, instance): + self._instance = instance + return self + + def __get__(self, instance, cls=None): + # ObjectModel needs to be a descriptor so that just doing + # `special_attributes = SomeObjectModel` should be enough in the body of a node. + # But at the same time, node.special_attributes should return an object + # which can be used for manipulating the special attributes. That's the reason + # we pass the instance through which it got accessed to ObjectModel.__call__, + # returning itself afterwards, so we can still have access to the + # underlying data model and to the instance for which it got accessed. + return self(instance) + + def __contains__(self, name): + return name in self.attributes() + + @lru_cache(maxsize=None) + def attributes(self): + """Get the attributes which are exported by this object model.""" + return [obj[2:] for obj in dir(self) if obj.startswith('py')] + + def lookup(self, name): + """Look up the given *name* in the current model + + It should return an AST or an interpreter object, + but if the name is not found, then an AttributeInferenceError will be raised. + """ + + if name in self.attributes(): + return getattr(self, "py" + name) + raise exceptions.AttributeInferenceError(target=self._instance, attribute=name) + + +class ModuleModel(ObjectModel): + + def _builtins(self): + builtins = astroid.MANAGER.astroid_cache[six.moves.builtins.__name__] + return builtins.special_attributes.lookup('__dict__') + + if six.PY3: + @property + def pybuiltins(self): + return self._builtins() + + else: + @property + def py__builtin__(self): + return self._builtins() + + # __path__ is a standard attribute on *packages* not + # non-package modules. The only mention of it in the + # official 2.7 documentation I can find is in the + # tutorial. + + @property + def py__path__(self): + if not self._instance.package: + raise exceptions.AttributeInferenceError(target=self._instance, + attribute='__path__') + + path = os.path.dirname(self._instance.file) + path_obj = node_classes.Const(value=path, parent=self._instance) + + container = node_classes.List(parent=self._instance) + container.postinit([path_obj]) + + return container + + @property + def py__name__(self): + return node_classes.Const(value=self._instance.name, + parent=self._instance) + + @property + def py__doc__(self): + return node_classes.Const(value=self._instance.doc, + parent=self._instance) + + @property + def py__file__(self): + return node_classes.Const(value=self._instance.file, + parent=self._instance) + + @property + def py__dict__(self): + return _dunder_dict(self._instance, self._instance.globals) + + # __package__ isn't mentioned anywhere outside a PEP: + # https://www.python.org/dev/peps/pep-0366/ + @property + def py__package__(self): + if not self._instance.package: + value = '' + else: + value = self._instance.name + + return node_classes.Const(value=value, parent=self._instance) + + # These are related to the Python 3 implementation of the + # import system, + # https://docs.python.org/3/reference/import.html#import-related-module-attributes + + @property + def py__spec__(self): + # No handling for now. + return node_classes.Unknown() + + @property + def py__loader__(self): + # No handling for now. + return node_classes.Unknown() + + @property + def py__cached__(self): + # No handling for now. + return node_classes.Unknown() + + +class FunctionModel(ObjectModel): + + @property + def py__name__(self): + return node_classes.Const(value=self._instance.name, + parent=self._instance) + + @property + def py__doc__(self): + return node_classes.Const(value=self._instance.doc, + parent=self._instance) + + @property + def py__qualname__(self): + return node_classes.Const(value=self._instance.qname(), + parent=self._instance) + + @property + def py__defaults__(self): + func = self._instance + if not func.args.defaults: + return node_classes.Const(value=None, parent=func) + + defaults_obj = node_classes.Tuple(parent=func) + defaults_obj.postinit(func.args.defaults) + return defaults_obj + + @property + def py__annotations__(self): + obj = node_classes.Dict(parent=self._instance) + + if not self._instance.returns: + returns = None + else: + returns = self._instance.returns + + args = self._instance.args + pair_annotations = itertools.chain( + six.moves.zip(args.args, args.annotations), + six.moves.zip(args.kwonlyargs, args.kwonlyargs_annotations) + ) + + annotations = { + arg.name: annotation + for (arg, annotation) in pair_annotations + if annotation + } + if args.varargannotation: + annotations[args.vararg] = args.varargannotation + if args.kwargannotation: + annotations[args.kwarg] = args.kwargannotation + if returns: + annotations['return'] = returns + + items = [(node_classes.Const(key, parent=obj), value) + for (key, value) in annotations.items()] + + obj.postinit(items) + return obj + + @property + def py__dict__(self): + return node_classes.Dict(parent=self._instance) + + py__globals__ = py__dict__ + + @property + def py__kwdefaults__(self): + + def _default_args(args, parent): + for arg in args.kwonlyargs: + try: + default = args.default_value(arg.name) + except exceptions.NoDefault: + continue + + name = node_classes.Const(arg.name, parent=parent) + yield name, default + + args = self._instance.args + obj = node_classes.Dict(parent=self._instance) + defaults = dict(_default_args(args, obj)) + + obj.postinit(list(defaults.items())) + return obj + + @property + def py__module__(self): + return node_classes.Const(self._instance.root().qname()) + + @property + def py__get__(self): + from astroid import bases + + func = self._instance + + class DescriptorBoundMethod(bases.BoundMethod): + """Bound method which knows how to understand calling descriptor binding.""" + def infer_call_result(self, caller, context=None): + if len(caller.args) != 2: + raise exceptions.InferenceError( + "Invalid arguments for descriptor binding", + target=self, context=context) + + context = contextmod.copy_context(context) + cls = next(caller.args[0].infer(context=context)) + + # Rebuild the original value, but with the parent set as the + # class where it will be bound. + new_func = func.__class__(name=func.name, doc=func.doc, + lineno=func.lineno, col_offset=func.col_offset, + parent=cls) + # pylint: disable=no-member + new_func.postinit(func.args, func.body, + func.decorators, func.returns) + + # Build a proper bound method that points to our newly built function. + proxy = bases.UnboundMethod(new_func) + yield bases.BoundMethod(proxy=proxy, bound=cls) + + return DescriptorBoundMethod(proxy=self._instance, bound=self._instance) + + # These are here just for completion. + @property + def py__ne__(self): + return node_classes.Unknown() + + py__subclasshook__ = py__ne__ + py__str__ = py__ne__ + py__sizeof__ = py__ne__ + py__setattr__ = py__ne__ + py__repr__ = py__ne__ + py__reduce__ = py__ne__ + py__reduce_ex__ = py__ne__ + py__new__ = py__ne__ + py__lt__ = py__ne__ + py__eq__ = py__ne__ + py__gt__ = py__ne__ + py__format__ = py__ne__ + py__delattr__ = py__ne__ + py__getattribute__ = py__ne__ + py__hash__ = py__ne__ + py__init__ = py__ne__ + py__dir__ = py__ne__ + py__call__ = py__ne__ + py__class__ = py__ne__ + py__closure__ = py__ne__ + py__code__ = py__ne__ + + if six.PY2: + pyfunc_name = py__name__ + pyfunc_doc = py__doc__ + pyfunc_globals = py__globals__ + pyfunc_dict = py__dict__ + pyfunc_defaults = py__defaults__ + pyfunc_code = py__code__ + pyfunc_closure = py__closure__ + + +class ClassModel(ObjectModel): + + @property + def py__module__(self): + return node_classes.Const(self._instance.root().qname()) + + @property + def py__name__(self): + return node_classes.Const(self._instance.name) + + @property + def py__qualname__(self): + return node_classes.Const(self._instance.qname()) + + @property + def py__doc__(self): + return node_classes.Const(self._instance.doc) + + @property + def py__mro__(self): + if not self._instance.newstyle: + raise exceptions.AttributeInferenceError(target=self._instance, + attribute='__mro__') + + mro = self._instance.mro() + obj = node_classes.Tuple(parent=self._instance) + obj.postinit(mro) + return obj + + @property + def pymro(self): + if not self._instance.newstyle: + raise exceptions.AttributeInferenceError(target=self._instance, + attribute='mro') + + from astroid import bases + + other_self = self + + # Cls.mro is a method and we need to return one in order to have a proper inference. + # The method we're returning is capable of inferring the underlying MRO though. + class MroBoundMethod(bases.BoundMethod): + def infer_call_result(self, caller, context=None): + yield other_self.py__mro__ + + implicit_metaclass = self._instance.implicit_metaclass() + mro_method = implicit_metaclass.locals['mro'][0] + return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass) + + @property + def py__bases__(self): + obj = node_classes.Tuple() + context = contextmod.InferenceContext() + elts = list(self._instance._inferred_bases(context)) + obj.postinit(elts=elts) + return obj + + @property + def py__class__(self): + from astroid import helpers + return helpers.object_type(self._instance) + + @property + def py__subclasses__(self): + """Get the subclasses of the underlying class + + This looks only in the current module for retrieving the subclasses, + thus it might miss a couple of them. + """ + from astroid import bases + from astroid import scoped_nodes + + if not self._instance.newstyle: + raise exceptions.AttributeInferenceError(target=self._instance, + attribute='__subclasses__') + + qname = self._instance.qname() + root = self._instance.root() + classes = [cls for cls in root.nodes_of_class(scoped_nodes.ClassDef) + if cls != self._instance and cls.is_subtype_of(qname)] + + obj = node_classes.List(parent=self._instance) + obj.postinit(classes) + + class SubclassesBoundMethod(bases.BoundMethod): + def infer_call_result(self, caller, context=None): + yield obj + + implicit_metaclass = self._instance.implicit_metaclass() + subclasses_method = implicit_metaclass.locals['__subclasses__'][0] + return SubclassesBoundMethod(proxy=subclasses_method, + bound=implicit_metaclass) + + @property + def py__dict__(self): + return node_classes.Dict(parent=self._instance) + + +class SuperModel(ObjectModel): + + @property + def py__thisclass__(self): + return self._instance.mro_pointer + + @property + def py__self_class__(self): + return self._instance._self_class + + @property + def py__self__(self): + return self._instance.type + + @property + def py__class__(self): + return self._instance._proxied + + +class UnboundMethodModel(ObjectModel): + + @property + def py__class__(self): + from astroid import helpers + return helpers.object_type(self._instance) + + @property + def py__func__(self): + return self._instance._proxied + + @property + def py__self__(self): + return node_classes.Const(value=None, parent=self._instance) + + pyim_func = py__func__ + pyim_class = py__class__ + pyim_self = py__self__ + + +class BoundMethodModel(FunctionModel): + + @property + def py__func__(self): + return self._instance._proxied._proxied + + @property + def py__self__(self): + return self._instance.bound + + +class GeneratorModel(FunctionModel): + + def __new__(cls, *args, **kwargs): + # Append the values from the GeneratorType unto this object. + ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs) + generator = astroid.MANAGER.astroid_cache[six.moves.builtins.__name__]['generator'] + for name, values in generator.locals.items(): + print(name, values) + method = values[0] + patched = lambda cls, meth=method: meth + + setattr(type(ret), 'py' + name, property(patched)) + + return ret + + @property + def py__name__(self): + return node_classes.Const(value=self._instance.parent.name, + parent=self._instance) + + @property + def py__doc__(self): + return node_classes.Const(value=self._instance.parent.doc, + parent=self._instance) + + +class InstanceModel(ObjectModel): + + @property + def py__class__(self): + return self._instance._proxied + + @property + def py__module__(self): + return node_classes.Const(self._instance.root().qname()) + + @property + def py__doc__(self): + return node_classes.Const(self._instance.doc) + + @property + def py__dict__(self): + return _dunder_dict(self._instance, self._instance.instance_attrs) + + +class ExceptionInstanceModel(InstanceModel): + + @property + def pyargs(self): + message = node_classes.Const('') + args = node_classes.Tuple(parent=self._instance) + args.postinit((message, )) + return args + + if six.PY3: + # It's available only on Python 3. + + @property + def py__traceback__(self): + builtins = astroid.MANAGER.astroid_cache[six.moves.builtins.__name__] + traceback_type = builtins[types.TracebackType.__name__] + return traceback_type.instantiate_class() + + if six.PY2: + # It's available only on Python 2. + + @property + def pymessage(self): + return node_classes.Const('') + + +class DictModel(ObjectModel): + + @property + def py__class__(self): + return self._instance._proxied + + def _generic_dict_attribute(self, obj, name): + """Generate a bound method that can infer the given *obj*.""" + + class DictMethodBoundMethod(astroid.BoundMethod): + def infer_call_result(self, caller, context=None): + yield obj + + meth = next(self._instance._proxied.igetattr(name)) + return DictMethodBoundMethod(proxy=meth, bound=self._instance) + + @property + def pyitems(self): + elems = [] + obj = node_classes.List(parent=self._instance) + for key, value in self._instance.items: + elem = node_classes.Tuple(parent=obj) + elem.postinit((key, value)) + elems.append(elem) + obj.postinit(elts=elems) + + if six.PY3: + from astroid import objects + obj = objects.DictItems(obj) + + return self._generic_dict_attribute(obj, 'items') + + @property + def pykeys(self): + keys = [key for (key, _) in self._instance.items] + obj = node_classes.List(parent=self._instance) + obj.postinit(elts=keys) + + if six.PY3: + from astroid import objects + obj = objects.DictKeys(obj) + + return self._generic_dict_attribute(obj, 'keys') + + @property + def pyvalues(self): + + values = [value for (_, value) in self._instance.items] + obj = node_classes.List(parent=self._instance) + obj.postinit(values) + + if six.PY3: + from astroid import objects + obj = objects.DictValues(obj) + + return self._generic_dict_attribute(obj, 'values') diff -Nru astroid-1.4.9/astroid/manager.py astroid-1.5.3/astroid/manager.py --- astroid-1.4.9/astroid/manager.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/manager.py 2017-06-01 22:07:25.000000000 +0000 @@ -1,33 +1,27 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """astroid manager: avoid multiple astroid build of a same module when possible by providing a class responsible to get astroid representation from various source and using a cache of built modules) """ -from __future__ import print_function -import imp import os +import sys import zipimport +import six + from astroid import exceptions +from astroid.interpreter._import import spec from astroid import modutils from astroid import transforms +from astroid import util def safe_repr(obj): @@ -79,20 +73,24 @@ modname = '.'.join(modutils.modpath_from_file(filepath)) except ImportError: modname = filepath - if modname in self.astroid_cache and self.astroid_cache[modname].source_file == filepath: + if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath: return self.astroid_cache[modname] if source: from astroid.builder import AstroidBuilder return AstroidBuilder(self).file_build(filepath, modname) elif fallback and modname: return self.ast_from_module_name(modname) - raise exceptions.AstroidBuildingException( - 'unable to get astroid for file %s' % filepath) + raise exceptions.AstroidBuildingError( + 'Unable to build an AST for {path}.', path=filepath) def _build_stub_module(self, modname): from astroid.builder import AstroidBuilder return AstroidBuilder(self).string_build('', modname) + def _build_namespace_module(self, modname, path): + from astroid.builder import build_namespace_package_module + return build_namespace_package_module(modname, path) + def _can_load_extension(self, modname): if self.always_load_extensions: return True @@ -113,32 +111,52 @@ if context_file: os.chdir(os.path.dirname(context_file)) try: - filepath, mp_type = self.file_from_module_name(modname, context_file) - if mp_type == modutils.PY_ZIPMODULE: - module = self.zip_import_data(filepath) + found_spec = self.file_from_module_name(modname, context_file) + # pylint: disable=no-member + if found_spec.type == spec.ModuleType.PY_ZIPMODULE: + # pylint: disable=no-member + module = self.zip_import_data(found_spec.location) if module is not None: return module - elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION): - if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname): + + elif found_spec.type in (spec.ModuleType.C_BUILTIN, + spec.ModuleType.C_EXTENSION): + # pylint: disable=no-member + if (found_spec.type == spec.ModuleType.C_EXTENSION + and not self._can_load_extension(modname)): return self._build_stub_module(modname) try: module = modutils.load_module_from_name(modname) - except Exception as ex: - msg = 'Unable to load module %s (%s)' % (modname, ex) - raise exceptions.AstroidBuildingException(msg) + except Exception as ex: # pylint: disable=broad-except + util.reraise(exceptions.AstroidImportError( + 'Loading {modname} failed with:\n{error}', + modname=modname, path=found_spec.location, error=ex)) return self.ast_from_module(module, modname) - elif mp_type == imp.PY_COMPILED: - msg = "Unable to load compiled module %s" % (modname,) - raise exceptions.AstroidBuildingException(msg) - if filepath is None: - msg = "Unable to load module %s" % (modname,) - raise exceptions.AstroidBuildingException(msg) - return self.ast_from_file(filepath, modname, fallback=False) - except exceptions.AstroidBuildingException as e: + + elif found_spec.type == spec.ModuleType.PY_COMPILED: + raise exceptions.AstroidImportError( + "Unable to load compiled module {modname}.", + # pylint: disable=no-member + modname=modname, path=found_spec.location) + + elif found_spec.type == spec.ModuleType.PY_NAMESPACE: + return self._build_namespace_module(modname, + # pylint: disable=no-member + found_spec.submodule_search_locations) + + # pylint: disable=no-member + if found_spec.location is None: + raise exceptions.AstroidImportError( + "Can't find a file for module {modname}.", + modname=modname) + + # pylint: disable=no-member + return self.ast_from_file(found_spec.location, modname, fallback=False) + except exceptions.AstroidBuildingError as e: for hook in self._failed_import_hooks: try: return hook(modname) - except exceptions.AstroidBuildingException: + except exceptions.AstroidBuildingError: pass raise e finally: @@ -167,19 +185,23 @@ return None def file_from_module_name(self, modname, contextfile): - # pylint: disable=redefined-variable-type try: value = self._mod_file_cache[(modname, contextfile)] + traceback = sys.exc_info()[2] except KeyError: try: value = modutils.file_info_from_modpath( modname.split('.'), context_file=contextfile) + traceback = sys.exc_info()[2] except ImportError as ex: - msg = 'Unable to load module %s (%s)' % (modname, ex) - value = exceptions.AstroidBuildingException(msg) + value = exceptions.AstroidImportError( + 'Failed to import module {modname} with error:\n{error}.', + modname=modname, error=ex) + traceback = sys.exc_info()[2] self._mod_file_cache[(modname, contextfile)] = value - if isinstance(value, exceptions.AstroidBuildingException): - raise value + if isinstance(value, exceptions.AstroidBuildingError): + six.reraise(exceptions.AstroidBuildingError, + value, traceback) return value def ast_from_module(self, module, modname=None): @@ -203,8 +225,9 @@ try: modname = klass.__module__ except AttributeError: - msg = 'Unable to get module for class %s' % safe_repr(klass) - raise exceptions.AstroidBuildingException(msg) + util.reraise(exceptions.AstroidBuildingError( + 'Unable to get module for class {class_name}.', + cls=klass, class_repr=safe_repr(klass), modname=modname)) modastroid = self.ast_from_module_name(modname) return modastroid.getattr(klass.__name__)[0] # XXX @@ -217,21 +240,23 @@ try: modname = klass.__module__ except AttributeError: - msg = 'Unable to get module for %s' % safe_repr(klass) - raise exceptions.AstroidBuildingException(msg) - except Exception as ex: - msg = ('Unexpected error while retrieving module for %s: %s' - % (safe_repr(klass), ex)) - raise exceptions.AstroidBuildingException(msg) + util.reraise(exceptions.AstroidBuildingError( + 'Unable to get module for {class_repr}.', + cls=klass, class_repr=safe_repr(klass))) + except Exception as ex: # pylint: disable=broad-except + util.reraise(exceptions.AstroidImportError( + 'Unexpected error while retrieving module for {class_repr}:\n' + '{error}', cls=klass, class_repr=safe_repr(klass), error=ex)) try: name = klass.__name__ except AttributeError: - msg = 'Unable to get name for %s' % safe_repr(klass) - raise exceptions.AstroidBuildingException(msg) - except Exception as ex: - exc = ('Unexpected error while retrieving name for %s: %s' - % (safe_repr(klass), ex)) - raise exceptions.AstroidBuildingException(exc) + util.reraise(exceptions.AstroidBuildingError( + 'Unable to get name for {class_repr}:\n', + cls=klass, class_repr=safe_repr(klass))) + except Exception as ex: # pylint: disable=broad-except + util.reraise(exceptions.AstroidImportError( + 'Unexpected error while retrieving name for {class_repr}:\n' + '{error}', cls=klass, class_repr=safe_repr(klass), error=ex)) # take care, on living object __module__ is regularly wrong :( modastroid = self.ast_from_module_name(modname) if klass is obj: @@ -247,7 +272,7 @@ `hook` must be a function that accepts a single argument `modname` which contains the name of the module or package that could not be imported. If `hook` can resolve the import, must return a node of type `astroid.Module`, - otherwise, it must raise `AstroidBuildingException`. + otherwise, it must raise `AstroidBuildingError`. """ self._failed_import_hooks.append(hook) diff -Nru astroid-1.4.9/astroid/mixins.py astroid-1.5.3/astroid/mixins.py --- astroid-1.4.9/astroid/mixins.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/mixins.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,20 +1,10 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """This module contains some mixins for the different nodes. """ @@ -48,7 +38,7 @@ """Mixin for statement filtering and assignment type""" def _get_filtered_stmts(self, _, node, _stmts, mystmt): - """method used in _filter_stmts to get statemtents and trigger break""" + """method used in _filter_stmts to get statements and trigger break""" if self.statement() is mystmt: # original node's statement is the assignment, only keep # current node (gen exp, list comp) @@ -124,15 +114,9 @@ if mymodule.relative_to_absolute_name(modname, level) == mymodule.name: # FIXME: we used to raise InferenceError here, but why ? return mymodule - try: - return mymodule.import_module(modname, level=level, - relative_only=level and level >= 1) - except exceptions.AstroidBuildingException as ex: - if isinstance(ex.args[0], SyntaxError): - raise exceptions.InferenceError(str(ex)) - raise exceptions.InferenceError(modname) - except SyntaxError as ex: - raise exceptions.InferenceError(str(ex)) + + return mymodule.import_module(modname, level=level, + relative_only=level and level >= 1) def real_name(self, asname): """get name from 'as' name""" @@ -144,4 +128,6 @@ _asname = name if asname == _asname: return name - raise exceptions.NotFoundError(asname) + raise exceptions.AttributeInferenceError( + 'Could not find original name for {attribute} in {target!r}', + target=self, attribute=asname) diff -Nru astroid-1.4.9/astroid/modutils.py astroid-1.5.3/astroid/modutils.py --- astroid-1.4.9/astroid/modutils.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/modutils.py 2017-03-11 13:04:27.000000000 +0000 @@ -1,20 +1,13 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# astroid is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# -*- coding: utf-8 -*- +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015 Florian Bruhin +# Copyright (c) 2015 Radosław Ganczarek +# Copyright (c) 2016 Jakub Wilk + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Python modules manipulation utility functions. :type PY_SOURCE_EXTS: tuple(str) @@ -26,22 +19,21 @@ :type BUILTIN_MODULES: dict :var BUILTIN_MODULES: dictionary with builtin module names has key """ -from __future__ import with_statement - import imp import os import platform import sys -from distutils.sysconfig import get_python_lib +from distutils.sysconfig import get_python_lib # pylint: disable=import-error +# pylint: disable=import-error, no-name-in-module from distutils.errors import DistutilsPlatformError -import zipimport +# distutils is replaced by virtualenv with a module that does +# weird path manipulations in order to get to the +# real distutils module. -try: - import pkg_resources -except ImportError: - pkg_resources = None +import six -PY_ZIPMODULE = object() +from .interpreter._import import spec +from .interpreter._import import util if sys.platform.startswith('win'): PY_SOURCE_EXTS = ('py', 'pyw') @@ -70,10 +62,18 @@ if os.name == 'nt': STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls')) try: - # real_prefix is defined when running inside virtualenv. + # real_prefix is defined when running inside virtual environments, + # created with the **virtualenv** library. STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls')) except AttributeError: - pass + # sys.base_exec_prefix is always defined, but in a virtual environment + # created with the stdlib **venv** module, it points to the original + # installation, if the virtual env is activated. + try: + STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, 'dlls')) + except AttributeError: + pass + if platform.python_implementation() == 'PyPy': _root = os.path.join(sys.prefix, 'lib_pypy') STD_LIB_DIRS.add(_root) @@ -120,6 +120,10 @@ return os.path.normcase(os.path.abspath(path)) +def _canonicalize_path(path): + return os.path.realpath(os.path.expanduser(path)) + + def _path_from_filename(filename, is_jython=IS_JYTHON): if not is_jython: if sys.version_info > (3, 0): @@ -187,11 +191,11 @@ def load_module_from_modpath(parts, path=None, use_sys=1): - """Load a python module from its splitted name. + """Load a python module from its split name. :type parts: list(str) or tuple(str) :param parts: - python name of a module or package splitted on '.' + python name of a module or package split on '.' :type path: list or None :param path: @@ -234,7 +238,7 @@ setattr(prevmodule, part, module) _file = getattr(module, '__file__', '') prevmodule = module - if not _file and _is_namespace(curname): + if not _file and util.is_namespace(curname): continue if not _file and len(modpath) != len(parts): raise ImportError('no module in %s' % '.'.join(parts[len(modpath):])) @@ -268,20 +272,48 @@ return load_module_from_modpath(modpath, path, use_sys) -def _check_init(path, mod_path): +def check_modpath_has_init(path, mod_path): """check there are some __init__.py all along the way""" modpath = [] for part in mod_path: modpath.append(part) path = os.path.join(path, part) - if not _is_namespace('.'.join(modpath)) and not _has_init(path): - return False + if not _has_init(path): + old_namespace = util.is_namespace('.'.join(modpath)) + if not old_namespace: + return False return True +def modpath_from_file_with_callback(filename, extrapath=None, is_package_cb=None): + filename = _path_from_filename(filename) + filename = os.path.realpath(os.path.expanduser(filename)) + base = os.path.splitext(filename)[0] + + if extrapath is not None: + for path_ in six.moves.map(_canonicalize_path, extrapath): + path = os.path.abspath(path_) + if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path): + submodpath = [pkg for pkg in base[len(path):].split(os.sep) + if pkg] + if is_package_cb(path, submodpath[:-1]): + return extrapath[path_].split('.') + submodpath + + for path in six.moves.map(_canonicalize_path, sys.path): + path = _cache_normalize_path(path) + if path and os.path.normcase(base).startswith(path): + modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] + if is_package_cb(path, modpath[:-1]): + return modpath + + raise ImportError('Unable to find module for %s in %s' % ( + filename, ', \n'.join(sys.path))) + + + def modpath_from_file(filename, extrapath=None): - """given a file path return the corresponding splitted module's name - (i.e name of a module or package splitted on '.') + """given a file path return the corresponding split module's name + (i.e name of a module or package split on '.') :type filename: str :param filename: file's path for which we want the module's name @@ -289,7 +321,7 @@ :type extrapath: dict :param extrapath: optional extra search path, with path as key and package name for the path - as value. This is usually useful to handle package splitted in multiple + as value. This is usually useful to handle package split in multiple directories using __path__ trick. @@ -297,40 +329,22 @@ if the corresponding module's name has not been found :rtype: list(str) - :return: the corresponding splitted module's name + :return: the corresponding split module's name """ - filename = _path_from_filename(filename) - filename = os.path.abspath(filename) - base = os.path.splitext(filename)[0] - if extrapath is not None: - for path_ in extrapath: - path = os.path.abspath(path_) - if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path): - submodpath = [pkg for pkg in base[len(path):].split(os.sep) - if pkg] - if _check_init(path, submodpath[:-1]): - return extrapath[path_].split('.') + submodpath - for path in sys.path: - path = _cache_normalize_path(path) - if path and os.path.normcase(base).startswith(path): - modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] - if _check_init(path, modpath[:-1]): - return modpath - raise ImportError('Unable to find module for %s in %s' % ( - filename, ', \n'.join(sys.path))) + return modpath_from_file_with_callback(filename, extrapath, check_modpath_has_init) def file_from_modpath(modpath, path=None, context_file=None): - return file_info_from_modpath(modpath, path, context_file)[0] + return file_info_from_modpath(modpath, path, context_file).location def file_info_from_modpath(modpath, path=None, context_file=None): - """given a mod path (i.e. splitted module / package name), return the + """given a mod path (i.e. split module / package name), return the corresponding file, giving priority to source file over precompiled file if it exists :type modpath: list or tuple :param modpath: - splitted module's name (i.e name of a module or package splitted + split module's name (i.e name of a module or package split on '.') (this means explicit relative imports that start with dots have empty strings in this list!) @@ -360,13 +374,13 @@ if modpath[0] == 'xml': # handle _xmlplus try: - return _file_from_modpath(['_xmlplus'] + modpath[1:], path, context) + return _spec_from_modpath(['_xmlplus'] + modpath[1:], path, context) except ImportError: - return _file_from_modpath(modpath, path, context) + return _spec_from_modpath(modpath, path, context) elif modpath == ['os', 'path']: # FIXME: currently ignoring search_path... - return os.path.__file__, imp.PY_SOURCE - return _file_from_modpath(modpath, path, context) + return spec.ModuleSpec(name='os.path', location=os.path.__file__, module_type=imp.PY_SOURCE) + return _spec_from_modpath(modpath, path, context) def get_module_part(dotted_name, context_file=None): @@ -422,13 +436,13 @@ file_from_modpath(parts[starti:i+1], path=path, context_file=context_file) except ImportError: - if not i >= max(1, len(parts) - 2): + if i < max(1, len(parts) - 2): raise return '.'.join(parts[:i]) return dotted_name -def get_module_files(src_directory, blacklist): +def get_module_files(src_directory, blacklist, list_all=False): """given a package directory return a list of all available python module's files in the package and its subpackages @@ -440,6 +454,10 @@ :param blacklist: iterable list of files or directories to ignore. + :type list_all: bool + :param list_all: + get files from all paths, including ones without __init__.py + :rtype: list :return: the list of all available python module's files in the package and @@ -449,7 +467,7 @@ for directory, dirnames, filenames in os.walk(src_directory): _handle_blacklist(blacklist, dirnames, filenames) # check for __init__.py - if not '__init__.py' in filenames: + if not list_all and '__init__.py' not in filenames: dirnames[:] = () continue for filename in filenames: @@ -520,7 +538,7 @@ # (sys and __builtin__ for instance) if filename is None: # we assume there are no namespaces in stdlib - return not _is_namespace(modname) + return not util.is_namespace(modname) filename = _normalize_path(filename) if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)): return False @@ -565,159 +583,38 @@ # internal only functions ##################################################### -def _file_from_modpath(modpath, path=None, context=None): - """given a mod path (i.e. splitted module / package name), return the - corresponding file +def _spec_from_modpath(modpath, path=None, context=None): + """given a mod path (i.e. split module / package name), return the + corresponding spec this function is used internally, see `file_from_modpath`'s documentation for more information """ - assert len(modpath) > 0 + assert modpath + location = None if context is not None: try: - mtype, mp_filename = _module_file(modpath, [context]) + found_spec = spec.find_spec(modpath, [context]) + location = found_spec.location except ImportError: - mtype, mp_filename = _module_file(modpath, path) + found_spec = spec.find_spec(modpath, path) + location = found_spec.location else: - mtype, mp_filename = _module_file(modpath, path) - if mtype == imp.PY_COMPILED: + found_spec = spec.find_spec(modpath, path) + if found_spec.type == spec.ModuleType.PY_COMPILED: try: - return get_source_file(mp_filename), imp.PY_SOURCE + location = get_source_file(found_spec.location) + return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE) except NoSourceFile: - return mp_filename, imp.PY_COMPILED - elif mtype == imp.C_BUILTIN: + return found_spec._replace(location=location) + elif found_spec.type == spec.ModuleType.C_BUILTIN: # integrated builtin module - return None, imp.C_BUILTIN - elif mtype == imp.PKG_DIRECTORY: - mp_filename = _has_init(mp_filename) - mtype = imp.PY_SOURCE - return mp_filename, mtype - -def _search_zip(modpath, pic): - for filepath, importer in list(pic.items()): - if importer is not None: - if importer.find_module(modpath[0]): - if not importer.find_module(os.path.sep.join(modpath)): - raise ImportError('No module named %s in %s/%s' % ( - '.'.join(modpath[1:]), filepath, modpath)) - return (PY_ZIPMODULE, - os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), - filepath) - raise ImportError('No module named %s' % '.'.join(modpath)) - -try: - import pkg_resources -except ImportError: - pkg_resources = None - + return found_spec._replace(location=None) + elif found_spec.type == spec.ModuleType.PKG_DIRECTORY: + location = _has_init(found_spec.location) + return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE) + return found_spec -def _is_namespace(modname): - return (pkg_resources is not None - and modname in pkg_resources._namespace_packages) - - -def _module_file(modpath, path=None): - """get a module type / file path - - :type modpath: list or tuple - :param modpath: - splitted module's name (i.e name of a module or package splitted - on '.'), with leading empty strings for explicit relative import - - :type path: list or None - :param path: - optional list of path where the module or package should be - searched (use sys.path if nothing or None is given) - - - :rtype: tuple(int, str) - :return: the module type flag and the file path for a module - """ - # egg support compat - try: - pic = sys.path_importer_cache - _path = (path is None and sys.path or path) - for __path in _path: - if not __path in pic: - try: - pic[__path] = zipimport.zipimporter(__path) - except zipimport.ZipImportError: - pic[__path] = None - checkeggs = True - except AttributeError: - checkeggs = False - # pkg_resources support (aka setuptools namespace packages) - if _is_namespace(modpath[0]) and modpath[0] in sys.modules: - # setuptools has added into sys.modules a module object with proper - # __path__, get back information from there - module = sys.modules[modpath.pop(0)] - path = list(module.__path__) - if not modpath: - return imp.C_BUILTIN, None - imported = [] - while modpath: - modname = modpath[0] - # take care to changes in find_module implementation wrt builtin modules - # - # Python 2.6.6 (r266:84292, Sep 11 2012, 08:34:23) - # >>> imp.find_module('posix') - # (None, 'posix', ('', '', 6)) - # - # Python 3.3.1 (default, Apr 26 2013, 12:08:46) - # >>> imp.find_module('posix') - # (None, None, ('', '', 6)) - try: - stream, mp_filename, mp_desc = imp.find_module(modname, path) - except ImportError: - if checkeggs: - return _search_zip(modpath, pic)[:2] - raise - else: - # Don't forget to close the stream to avoid - # spurious ResourceWarnings. - if stream: - stream.close() - - if checkeggs and mp_filename: - fullabspath = [_cache_normalize_path(x) for x in _path] - try: - pathindex = fullabspath.index(os.path.dirname(_normalize_path(mp_filename))) - emtype, emp_filename, zippath = _search_zip(modpath, pic) - if pathindex > _path.index(zippath): - # an egg takes priority - return emtype, emp_filename - except ValueError: - # XXX not in _path - pass - except ImportError: - pass - checkeggs = False - imported.append(modpath.pop(0)) - mtype = mp_desc[2] - if modpath: - if mtype != imp.PKG_DIRECTORY: - raise ImportError('No module %s in %s' % ('.'.join(modpath), - '.'.join(imported))) - # XXX guess if package is using pkgutil.extend_path by looking for - # those keywords in the first four Kbytes - try: - with open(os.path.join(mp_filename, '__init__.py'), 'rb') as stream: - data = stream.read(4096) - except IOError: - path = [mp_filename] - else: - extend_path = b'pkgutil' in data and b'extend_path' in data - declare_namespace = ( - b"pkg_resources" in data - and b"declare_namespace(__name__)" in data) - if extend_path or declare_namespace: - # extend_path is called, search sys.path for module/packages - # of this name see pkgutil.extend_path documentation - path = [os.path.join(p, *imported) for p in sys.path - if os.path.isdir(os.path.join(p, *imported))] - else: - path = [mp_filename] - return mtype, mp_filename def _is_python_file(filename): """return true if the given filename should be considered as a python file @@ -739,3 +636,10 @@ if os.path.exists(mod_or_pack + '.' + ext): return mod_or_pack + '.' + ext return None + + +def is_namespace(specobj): + return specobj.type == spec.ModuleType.PY_NAMESPACE + +def is_directory(specobj): + return specobj.type == spec.ModuleType.PKG_DIRECTORY diff -Nru astroid-1.4.9/astroid/node_classes.py astroid-1.5.3/astroid/node_classes.py --- astroid-1.4.9/astroid/node_classes.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/node_classes.py 2017-04-12 14:03:35.000000000 +0000 @@ -1,68 +1,72 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2013-2014, 2016 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2016 Jakub Wilk + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Module for some node classes. More nodes in scoped_nodes.py """ import abc +import pprint import warnings +try: + from functools import singledispatch as _singledispatch +except ImportError: + from singledispatch import singledispatch as _singledispatch -import lazy_object_proxy import six +from astroid import as_string from astroid import bases from astroid import context as contextmod from astroid import decorators from astroid import exceptions +from astroid import manager from astroid import mixins from astroid import util BUILTINS = six.moves.builtins.__name__ +MANAGER = manager.AstroidManager() -@bases.raise_if_nothing_inferred +@decorators.raise_if_nothing_inferred def unpack_infer(stmt, context=None): """recursively generate nodes inferred by the given statement. If the inferred value is a list or a tuple, recurse on the elements """ if isinstance(stmt, (List, Tuple)): for elt in stmt.elts: - if elt is util.YES: + if elt is util.Uninferable: yield elt continue for inferred_elt in unpack_infer(elt, context): yield inferred_elt - return + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=stmt, context=context)) # if inferred is a final node, return it and stop inferred = next(stmt.infer(context)) if inferred is stmt: yield inferred - return - # else, infer recursivly, except YES object that should be returned as is + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=stmt, context=context)) + # else, infer recursively, except Uninferable object that should be returned as is for inferred in stmt.infer(context): - if inferred is util.YES: + if inferred is util.Uninferable: yield inferred else: for inf_inf in unpack_infer(inferred, context): yield inf_inf + raise StopIteration(dict(node=stmt, context=context)) -def are_exclusive(stmt1, stmt2, exceptions=None): +def are_exclusive(stmt1, stmt2, exceptions=None): # pylint: disable=redefined-outer-name """return true if the two given statements are mutually exclusive `exceptions` may be a list of exception names. If specified, discard If @@ -100,12 +104,22 @@ c2attr, c2node = node.locate_child(previous) c1attr, c1node = node.locate_child(children[node]) if c1node is not c2node: - if ((c2attr == 'body' - and c1attr == 'handlers' - and children[node].catch(exceptions)) or - (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or - (c2attr == 'handlers' and c1attr == 'orelse') or - (c2attr == 'orelse' and c1attr == 'handlers')): + first_in_body_caught_by_handlers = ( + c2attr == 'handlers' + and c1attr == 'body' + and previous.catch(exceptions)) + second_in_body_caught_by_handlers = ( + c2attr == 'body' + and c1attr == 'handlers' + and children[node].catch(exceptions)) + first_in_else_other_in_handlers = ( + c2attr == 'handlers' and c1attr == 'orelse') + second_in_else_other_in_handlers = ( + c2attr == 'orelse' and c1attr == 'handlers') + if any((first_in_body_caught_by_handlers, + second_in_body_caught_by_handlers, + first_in_else_other_in_handlers, + second_in_else_other_in_handlers)): return True elif c2attr == 'handlers' and c1attr == 'handlers': return previous is not children[node] @@ -115,19 +129,563 @@ return False +# getitem() helpers. + +_SLICE_SENTINEL = object() + + +def _slice_value(index, context=None): + """Get the value of the given slice index.""" + + if isinstance(index, Const): + if isinstance(index.value, (int, type(None))): + return index.value + elif index is None: + return None + else: + # Try to infer what the index actually is. + # Since we can't return all the possible values, + # we'll stop at the first possible value. + try: + inferred = next(index.infer(context=context)) + except exceptions.InferenceError: + pass + else: + if isinstance(inferred, Const): + if isinstance(inferred.value, (int, type(None))): + return inferred.value + + # Use a sentinel, because None can be a valid + # value that this function can return, + # as it is the case for unspecified bounds. + return _SLICE_SENTINEL + + +def _infer_slice(node, context=None): + lower = _slice_value(node.lower, context) + upper = _slice_value(node.upper, context) + step = _slice_value(node.step, context) + if all(elem is not _SLICE_SENTINEL for elem in (lower, upper, step)): + return slice(lower, upper, step) + + raise exceptions.AstroidTypeError( + message='Could not infer slice used in subscript', + node=node, index=node.parent, context=context) + + +def _container_getitem(instance, elts, index, context=None): + """Get a slice or an item, using the given *index*, for the given sequence.""" + try: + if isinstance(index, Slice): + index_slice = _infer_slice(index, context=context) + new_cls = instance.__class__() + new_cls.elts = elts[index_slice] + new_cls.parent = instance.parent + return new_cls + elif isinstance(index, Const): + return elts[index.value] + except IndexError: + util.reraise(exceptions.AstroidIndexError( + message='Index {index!s} out of range', + node=instance, index=index, context=context)) + except TypeError as exc: + util.reraise(exceptions.AstroidTypeError( + message='Type error {error!r}', error=exc, + node=instance, index=index, context=context)) + + raise exceptions.AstroidTypeError( + 'Could not use %s as subscript index' % index + ) + + +class NodeNG(object): + """Base Class for all Astroid node classes. + + It represents a node of the new abstract syntax tree. + """ + is_statement = False + optional_assign = False # True for For (and for Comprehension if py <3.0) + is_function = False # True for FunctionDef nodes + # attributes below are set by the builder module or by raw factories + lineno = None + col_offset = None + # parent node in the tree + parent = None + # attributes containing child node(s) redefined in most concrete classes: + _astroid_fields = () + # attributes containing non-nodes: + _other_fields = () + # attributes containing AST-dependent fields: + _other_other_fields = () + # instance specific inference function infer(node, context) + _explicit_inference = None + + def __init__(self, lineno=None, col_offset=None, parent=None): + self.lineno = lineno + self.col_offset = col_offset + self.parent = parent + + def infer(self, context=None, **kwargs): + """main interface to the interface system, return a generator on inferred + values. + + If the instance has some explicit inference function set, it will be + called instead of the default interface. + """ + if self._explicit_inference is not None: + # explicit_inference is not bound, give it self explicitly + try: + # pylint: disable=not-callable + return self._explicit_inference(self, context, **kwargs) + except exceptions.UseInferenceDefault: + pass + + if not context: + return self._infer(context, **kwargs) + + key = (self, context.lookupname, + context.callcontext, context.boundnode) + if key in context.inferred: + return iter(context.inferred[key]) + + return context.cache_generator(key, self._infer(context, **kwargs)) + + def _repr_name(self): + """return self.name or self.attrname or '' for nice representation""" + return getattr(self, 'name', getattr(self, 'attrname', '')) + + def __str__(self): + rname = self._repr_name() + cname = type(self).__name__ + if rname: + string = '%(cname)s.%(rname)s(%(fields)s)' + alignment = len(cname) + len(rname) + 2 + else: + string = '%(cname)s(%(fields)s)' + alignment = len(cname) + 1 + result = [] + for field in self._other_fields + self._astroid_fields: + value = getattr(self, field) + width = 80 - len(field) - alignment + lines = pprint.pformat(value, indent=2, + width=width).splitlines(True) + + inner = [lines[0]] + for line in lines[1:]: + inner.append(' ' * alignment + line) + result.append('%s=%s' % (field, ''.join(inner))) + + return string % {'cname': cname, + 'rname': rname, + 'fields': (',\n' + ' ' * alignment).join(result)} + + def __repr__(self): + rname = self._repr_name() + if rname: + string = '<%(cname)s.%(rname)s l.%(lineno)s at 0x%(id)x>' + else: + string = '<%(cname)s l.%(lineno)s at 0x%(id)x>' + return string % {'cname': type(self).__name__, + 'rname': rname, + 'lineno': self.fromlineno, + 'id': id(self)} + + def accept(self, visitor): + func = getattr(visitor, "visit_" + self.__class__.__name__.lower()) + return func(self) + + def get_children(self): + for field in self._astroid_fields: + attr = getattr(self, field) + if attr is None: + continue + if isinstance(attr, (list, tuple)): + for elt in attr: + yield elt + else: + yield attr + + def last_child(self): + """an optimized version of list(get_children())[-1]""" + for field in self._astroid_fields[::-1]: + attr = getattr(self, field) + if not attr: # None or empty listy / tuple + continue + if isinstance(attr, (list, tuple)): + return attr[-1] + + return attr + return None + + def parent_of(self, node): + """return true if i'm a parent of the given node""" + parent = node.parent + while parent is not None: + if self is parent: + return True + parent = parent.parent + return False + + def statement(self): + """return the first parent node marked as statement node""" + if self.is_statement: + return self + return self.parent.statement() + + def frame(self): + """return the first parent frame node (i.e. Module, FunctionDef or + ClassDef) + + """ + return self.parent.frame() + + def scope(self): + """return the first node defining a new scope (i.e. Module, + FunctionDef, ClassDef, Lambda but also GenExpr) + + """ + return self.parent.scope() + + def root(self): + """return the root node of the tree, (i.e. a Module)""" + if self.parent: + return self.parent.root() + return self + + def child_sequence(self, child): + """search for the right sequence where the child lies in""" + for field in self._astroid_fields: + node_or_sequence = getattr(self, field) + if node_or_sequence is child: + return [node_or_sequence] + # /!\ compiler.ast Nodes have an __iter__ walking over child nodes + if (isinstance(node_or_sequence, (tuple, list)) + and child in node_or_sequence): + return node_or_sequence + + msg = 'Could not find %s in %s\'s children' + raise exceptions.AstroidError(msg % (repr(child), repr(self))) + + def locate_child(self, child): + """return a 2-uple (child attribute name, sequence or node)""" + for field in self._astroid_fields: + node_or_sequence = getattr(self, field) + # /!\ compiler.ast Nodes have an __iter__ walking over child nodes + if child is node_or_sequence: + return field, child + if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence: + return field, node_or_sequence + msg = 'Could not find %s in %s\'s children' + raise exceptions.AstroidError(msg % (repr(child), repr(self))) + # FIXME : should we merge child_sequence and locate_child ? locate_child + # is only used in are_exclusive, child_sequence one time in pylint. + + def next_sibling(self): + """return the next sibling statement""" + return self.parent.next_sibling() + + def previous_sibling(self): + """return the previous sibling statement""" + return self.parent.previous_sibling() + + def nearest(self, nodes): + """return the node which is the nearest before this one in the + given list of nodes + """ + myroot = self.root() + mylineno = self.fromlineno + nearest = None, 0 + for node in nodes: + assert node.root() is myroot, \ + 'nodes %s and %s are not from the same module' % (self, node) + lineno = node.fromlineno + if node.fromlineno > mylineno: + break + if lineno > nearest[1]: + nearest = node, lineno + # FIXME: raise an exception if nearest is None ? + return nearest[0] + + # these are lazy because they're relatively expensive to compute for every + # single node, and they rarely get looked at + + @decorators.cachedproperty + def fromlineno(self): + if self.lineno is None: + return self._fixed_source_line() + + return self.lineno + + @decorators.cachedproperty + def tolineno(self): + if not self._astroid_fields: + # can't have children + lastchild = None + else: + lastchild = self.last_child() + if lastchild is None: + return self.fromlineno + + return lastchild.tolineno + + def _fixed_source_line(self): + """return the line number where the given node appears + + we need this method since not all nodes have the lineno attribute + correctly set... + """ + line = self.lineno + _node = self + try: + while line is None: + _node = next(_node.get_children()) + line = _node.lineno + except StopIteration: + _node = self.parent + while _node and line is None: + line = _node.lineno + _node = _node.parent + return line + + def block_range(self, lineno): + """handle block line numbers range for non block opening statements + """ + return lineno, self.tolineno + + def set_local(self, name, stmt): + """delegate to a scoped parent handling a locals dictionary""" + self.parent.set_local(name, stmt) + + def nodes_of_class(self, klass, skip_klass=None): + """return an iterator on nodes which are instance of the given class(es) + + klass may be a class object or a tuple of class objects + """ + if isinstance(self, klass): + yield self + for child_node in self.get_children(): + if skip_klass is not None and isinstance(child_node, skip_klass): + continue + for matching in child_node.nodes_of_class(klass, skip_klass): + yield matching + + def _infer_name(self, frame, name): + # overridden for ImportFrom, Import, Global, TryExcept and Arguments + return None + + def _infer(self, context=None): + """we don't know how to resolve a statement by default""" + # this method is overridden by most concrete classes + raise exceptions.InferenceError('No inference function for {node!r}.', + node=self, context=context) + + def inferred(self): + '''return list of inferred values for a more simple inference usage''' + return list(self.infer()) + + def infered(self): + warnings.warn('%s.infered() is deprecated and slated for removal ' + 'in astroid 2.0, use %s.inferred() instead.' + % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) + return self.inferred() + + def instantiate_class(self): + """instantiate a node if it is a ClassDef node, else return self""" + return self + + def has_base(self, node): + return False + + def callable(self): + return False + + def eq(self, value): + return False + + def as_string(self): + return as_string.to_code(self) + + def repr_tree(self, ids=False, include_linenos=False, + ast_state=False, indent=' ', max_depth=0, max_width=80): + """Returns a string representation of the AST from this node. + + :param ids: If true, includes the ids with the node type names. + + :param include_linenos: If true, includes the line numbers and + column offsets. + + :param ast_state: If true, includes information derived from + the whole AST like local and global variables. + + :param indent: A string to use to indent the output string. + + :param max_depth: If set to a positive integer, won't return + nodes deeper than max_depth in the string. + + :param max_width: Only positive integer values are valid, the + default is 80. Attempts to format the output string to stay + within max_width characters, but can exceed it under some + circumstances. + """ + @_singledispatch + def _repr_tree(node, result, done, cur_indent='', depth=1): + """Outputs a representation of a non-tuple/list, non-node that's + contained within an AST, including strings. + """ + lines = pprint.pformat(node, + width=max(max_width - len(cur_indent), + 1)).splitlines(True) + result.append(lines[0]) + result.extend([cur_indent + line for line in lines[1:]]) + return len(lines) != 1 + + # pylint: disable=unused-variable; doesn't understand singledispatch + @_repr_tree.register(tuple) + @_repr_tree.register(list) + def _repr_seq(node, result, done, cur_indent='', depth=1): + """Outputs a representation of a sequence that's contained within an AST.""" + cur_indent += indent + result.append('[') + if not node: + broken = False + elif len(node) == 1: + broken = _repr_tree(node[0], result, done, cur_indent, depth) + elif len(node) == 2: + broken = _repr_tree(node[0], result, done, cur_indent, depth) + if not broken: + result.append(', ') + else: + result.append(',\n') + result.append(cur_indent) + broken = (_repr_tree(node[1], result, done, cur_indent, depth) + or broken) + else: + result.append('\n') + result.append(cur_indent) + for child in node[:-1]: + _repr_tree(child, result, done, cur_indent, depth) + result.append(',\n') + result.append(cur_indent) + _repr_tree(node[-1], result, done, cur_indent, depth) + broken = True + result.append(']') + return broken + + # pylint: disable=unused-variable; doesn't understand singledispatch + @_repr_tree.register(NodeNG) + def _repr_node(node, result, done, cur_indent='', depth=1): + """Outputs a strings representation of an astroid node.""" + if node in done: + result.append(indent + ' max_depth: + result.append('...') + return False + depth += 1 + cur_indent += indent + if ids: + result.append('%s<0x%x>(\n' % (type(node).__name__, id(node))) + else: + result.append('%s(' % type(node).__name__) + fields = [] + if include_linenos: + fields.extend(('lineno', 'col_offset')) + fields.extend(node._other_fields) + fields.extend(node._astroid_fields) + if ast_state: + fields.extend(node._other_other_fields) + if not fields: + broken = False + elif len(fields) == 1: + result.append('%s=' % fields[0]) + broken = _repr_tree(getattr(node, fields[0]), result, done, + cur_indent, depth) + else: + result.append('\n') + result.append(cur_indent) + for field in fields[:-1]: + result.append('%s=' % field) + _repr_tree(getattr(node, field), result, done, cur_indent, + depth) + result.append(',\n') + result.append(cur_indent) + result.append('%s=' % fields[-1]) + _repr_tree(getattr(node, fields[-1]), result, done, cur_indent, + depth) + broken = True + result.append(')') + return broken + + result = [] + _repr_tree(self, result, set()) + return ''.join(result) + + def bool_value(self): + """Determine the bool value of this node + + The boolean value of a node can have three + possible values: + + * False. For instance, empty data structures, + False, empty strings, instances which return + explicitly False from the __nonzero__ / __bool__ + method. + * True. Most of constructs are True by default: + classes, functions, modules etc + * Uninferable: the inference engine is uncertain of the + node's value. + """ + return util.Uninferable + + +class Statement(NodeNG): + """Statement node adding a few attributes""" + is_statement = True + + def next_sibling(self): + """return the next sibling statement""" + stmts = self.parent.child_sequence(self) + index = stmts.index(self) + try: + return stmts[index +1] + except IndexError: + pass + + def previous_sibling(self): + """return the previous sibling statement""" + stmts = self.parent.child_sequence(self) + index = stmts.index(self) + if index >= 1: + return stmts[index -1] + + + @six.add_metaclass(abc.ABCMeta) class _BaseContainer(mixins.ParentAssignTypeMixin, - bases.NodeNG, - bases.Instance): + NodeNG, bases.Instance): """Base class for Set, FrozenSet, Tuple and List.""" _astroid_fields = ('elts',) - def __init__(self, elts=None): + def __init__(self, lineno=None, col_offset=None, parent=None): + self.elts = [] + super(_BaseContainer, self).__init__(lineno, col_offset, parent) + + def postinit(self, elts): + self.elts = elts + + @classmethod + def from_constants(cls, elts=None): + node = cls() if elts is None: - self.elts = [] + node.elts = [] else: - self.elts = [const_factory(e) for e in elts] + node.elts = [const_factory(e) for e in elts] + return node def itered(self): return self.elts @@ -197,7 +755,6 @@ if self.statement() is myframe and myframe.parent: myframe = myframe.parent.frame() - mystmt = self.statement() # line filtering if we are in the same frame # @@ -228,8 +785,8 @@ optional_assign = assign_type.optional_assign if optional_assign and assign_type.parent_of(self): - # we are inside a loop, loop var assigment is hidding previous - # assigment + # we are inside a loop, loop var assignment is hiding previous + # assignment _stmts = [node] _stmt_parents = [stmt.parent] continue @@ -246,7 +803,7 @@ # both statements are not at the same block level continue # if currently visited node is following previously considered - # assignement and both are not exclusive, we can drop the + # assignment and both are not exclusive, we can drop the # previous one. For instance in the following code :: # # if a: @@ -266,7 +823,7 @@ # # moreover, on loop assignment types, assignment won't # necessarily be done if the loop has no iteration, so we don't - # want to clear previous assigments if any (hence the test on + # want to clear previous assignments if any (hence the test on # optional_assign) if not (optional_assign or are_exclusive(_stmts[pindex], node)): del _stmt_parents[pindex] @@ -287,19 +844,34 @@ # Name classes -class AssignName(LookupMixIn, mixins.ParentAssignTypeMixin, bases.NodeNG): - """class representing an AssName node""" +class AssignName(LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG): + """class representing an AssignName node""" + _other_fields = ('name',) + def __init__(self, name=None, lineno=None, col_offset=None, parent=None): + self.name = name + super(AssignName, self).__init__(lineno, col_offset, parent) -class DelName(LookupMixIn, mixins.ParentAssignTypeMixin, bases.NodeNG): + +class DelName(LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG): """class representing a DelName node""" + _other_fields = ('name',) + + def __init__(self, name=None, lineno=None, col_offset=None, parent=None): + self.name = name + super(DelName, self).__init__(lineno, col_offset, parent) -class Name(LookupMixIn, bases.NodeNG): +class Name(LookupMixIn, NodeNG): """class representing a Name node""" + _other_fields = ('name',) + def __init__(self, name=None, lineno=None, col_offset=None, parent=None): + self.name = name + super(Name, self).__init__(lineno, col_offset, parent) -class Arguments(mixins.AssignTypeMixin, bases.NodeNG): + +class Arguments(mixins.AssignTypeMixin, NodeNG): """class representing an Arguments node""" if six.PY3: # Python 3.4+ uses a different approach regarding annotations, @@ -314,21 +886,38 @@ # annotation, its value will be None. _astroid_fields = ('args', 'defaults', 'kwonlyargs', - 'kw_defaults', 'annotations', - 'varargannotation', 'kwargannotation') - annotations = None + 'kw_defaults', 'annotations', 'varargannotation', + 'kwargannotation', 'kwonlyargs_annotations') varargannotation = None kwargannotation = None else: _astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults') - args = None - defaults = None - kwonlyargs = None - kw_defaults = None + _other_fields = ('vararg', 'kwarg') - def __init__(self, vararg=None, kwarg=None): + def __init__(self, vararg=None, kwarg=None, parent=None): + super(Arguments, self).__init__(parent=parent) self.vararg = vararg self.kwarg = kwarg + self.args = [] + self.defaults = [] + self.kwonlyargs = [] + self.kw_defaults = [] + self.annotations = [] + self.kwonlyargs_annotations = [] + + def postinit(self, args, defaults, kwonlyargs, kw_defaults, + annotations, + kwonlyargs_annotations=None, + varargannotation=None, + kwargannotation=None): + self.args = args + self.defaults = defaults + self.kwonlyargs = kwonlyargs + self.kw_defaults = kw_defaults + self.annotations = annotations + self.kwonlyargs_annotations = kwonlyargs_annotations + self.varargannotation = varargannotation + self.kwargannotation = kwargannotation def _infer_name(self, frame, name): if self.parent is frame: @@ -353,7 +942,11 @@ if self.kwonlyargs: if not self.vararg: result.append('*') - result.append(_format_args(self.kwonlyargs, self.kw_defaults)) + result.append(_format_args( + self.kwonlyargs, + self.kw_defaults, + self.kwonlyargs_annotations + )) if self.kwarg: result.append('**%s' % self.kwarg) return ', '.join(result) @@ -371,7 +964,7 @@ i = _find_arg(argname, self.kwonlyargs)[0] if i is not None and self.kw_defaults[i] is not None: return self.kw_defaults[i] - raise exceptions.NoDefault() + raise exceptions.NoDefault(func=self.parent, name=argname) def is_argument(self, name): """return True if the name is defined in arguments""" @@ -379,7 +972,8 @@ return True if name == self.kwarg: return True - return self.find_argname(name, True)[1] is not None + return (self.find_argname(name, True)[1] is not None or + self.kwonlyargs and _find_arg(name, self.kwonlyargs, True)[1] is not None) def find_argname(self, argname, rec=False): """return index and Name node with given name""" @@ -430,56 +1024,164 @@ return ', '.join(values) -class AssignAttr(mixins.ParentAssignTypeMixin, bases.NodeNG): +class AssignAttr(mixins.ParentAssignTypeMixin, NodeNG): """class representing an AssignAttr node""" _astroid_fields = ('expr',) + _other_fields = ('attrname',) expr = None -class Assert(bases.Statement): + def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None): + self.attrname = attrname + super(AssignAttr, self).__init__(lineno, col_offset, parent) + + def postinit(self, expr=None): + self.expr = expr + + +class Assert(Statement): """class representing an Assert node""" _astroid_fields = ('test', 'fail',) test = None fail = None -class Assign(bases.Statement, mixins.AssignTypeMixin): + def postinit(self, test=None, fail=None): + self.fail = fail + self.test = test + + +class Assign(mixins.AssignTypeMixin, Statement): """class representing an Assign node""" _astroid_fields = ('targets', 'value',) targets = None value = None -class AugAssign(bases.Statement, mixins.AssignTypeMixin): + def postinit(self, targets=None, value=None): + self.targets = targets + self.value = value + + +class AnnAssign(mixins.AssignTypeMixin, Statement): + """Class representing an AnnAssign node""" + + _astroid_fields = ('target', 'annotation', 'value',) + _other_fields = ('simple',) + target = None + annotation = None + value = None + simple = None + + def postinit(self, target, annotation, simple, value=None): + self.target = target + self.annotation = annotation + self.value = value + self.simple = simple + + +class AugAssign(mixins.AssignTypeMixin, Statement): """class representing an AugAssign node""" - _astroid_fields = ('target', 'value',) + _astroid_fields = ('target', 'value') + _other_fields = ('op',) target = None value = None -class Repr(bases.NodeNG): - """class representing a Backquote node""" + def __init__(self, op=None, lineno=None, col_offset=None, parent=None): + self.op = op + super(AugAssign, self).__init__(lineno, col_offset, parent) + + def postinit(self, target=None, value=None): + self.target = target + self.value = value + + # This is set by inference.py + def _infer_augassign(self, context=None): + raise NotImplementedError + + def type_errors(self, context=None): + """Return a list of TypeErrors which can occur during inference. + + Each TypeError is represented by a :class:`BadBinaryOperationMessage`, + which holds the original exception. + """ + try: + results = self._infer_augassign(context=context) + return [result for result in results + if isinstance(result, util.BadBinaryOperationMessage)] + except exceptions.InferenceError: + return [] + + +class Repr(NodeNG): + """class representing a Repr node""" _astroid_fields = ('value',) value = None -class BinOp(bases.NodeNG): + def postinit(self, value=None): + self.value = value + + +class BinOp(NodeNG): """class representing a BinOp node""" - _astroid_fields = ('left', 'right',) + _astroid_fields = ('left', 'right') + _other_fields = ('op',) left = None right = None -class BoolOp(bases.NodeNG): + def __init__(self, op=None, lineno=None, col_offset=None, parent=None): + self.op = op + super(BinOp, self).__init__(lineno, col_offset, parent) + + def postinit(self, left=None, right=None): + self.left = left + self.right = right + + # This is set by inference.py + def _infer_binop(self, context=None): + raise NotImplementedError + + def type_errors(self, context=None): + """Return a list of TypeErrors which can occur during inference. + + Each TypeError is represented by a :class:`BadBinaryOperationMessage`, + which holds the original exception. + """ + try: + results = self._infer_binop(context=context) + return [result for result in results + if isinstance(result, util.BadBinaryOperationMessage)] + except exceptions.InferenceError: + return [] + + +class BoolOp(NodeNG): """class representing a BoolOp node""" _astroid_fields = ('values',) + _other_fields = ('op',) values = None -class Break(bases.Statement): + def __init__(self, op=None, lineno=None, col_offset=None, parent=None): + self.op = op + super(BoolOp, self).__init__(lineno, col_offset, parent) + + def postinit(self, values=None): + self.values = values + + +class Break(Statement): """class representing a Break node""" -class Call(bases.NodeNG): +class Call(NodeNG): """class representing a Call node""" _astroid_fields = ('func', 'args', 'keywords') func = None args = None keywords = None + def postinit(self, func=None, args=None, keywords=None): + self.func = func + self.args = args + self.keywords = keywords + @property def starargs(self): args = self.args or [] @@ -490,12 +1192,17 @@ keywords = self.keywords or [] return [keyword for keyword in keywords if keyword.arg is None] -class Compare(bases.NodeNG): + +class Compare(NodeNG): """class representing a Compare node""" _astroid_fields = ('left', 'ops',) left = None ops = None + def postinit(self, left=None, ops=None): + self.left = left + self.ops = ops + def get_children(self): """override get_children for tuple fields""" yield self.left @@ -509,12 +1216,25 @@ #return self.left -class Comprehension(bases.NodeNG): +class Comprehension(NodeNG): """class representing a Comprehension node""" _astroid_fields = ('target', 'iter', 'ifs') + _other_fields = ('is_async',) target = None iter = None ifs = None + is_async = None + + def __init__(self, parent=None): + super(Comprehension, self).__init__() + self.parent = parent + + # pylint: disable=redefined-builtin; same name as builtin ast module. + def postinit(self, target=None, iter=None, ifs=None, is_async=None): + self.target = target + self.iter = iter + self.ifs = ifs + self.is_async = is_async optional_assign = True def assign_type(self): @@ -542,21 +1262,45 @@ return stmts, False -class Const(bases.NodeNG, bases.Instance): +class Const(NodeNG, bases.Instance): """represent a constant node like num, str, bool, None, bytes""" + _other_fields = ('value',) - def __init__(self, value=None): + def __init__(self, value, lineno=None, col_offset=None, parent=None): self.value = value + super(Const, self).__init__(lineno, col_offset, parent) def getitem(self, index, context=None): - if isinstance(self.value, six.string_types): - return Const(self.value[index]) - if isinstance(self.value, bytes) and six.PY3: - # Bytes aren't instances of six.string_types - # on Python 3. Also, indexing them should return - # integers. - return Const(self.value[index]) - raise TypeError('%r (value=%s)' % (self, self.value)) + if isinstance(index, Const): + index_value = index.value + elif isinstance(index, Slice): + index_value = _infer_slice(index, context=context) + + else: + raise exceptions.AstroidTypeError( + 'Could not use type {} as subscript index'.format(type(index)) + ) + + try: + if isinstance(self.value, six.string_types): + return Const(self.value[index_value]) + if isinstance(self.value, bytes) and six.PY3: + # Bytes aren't instances of six.string_types + # on Python 3. Also, indexing them should return + # integers. + return Const(self.value[index_value]) + except IndexError as exc: + util.reraise(exceptions.AstroidIndexError( + message='Index {index!r} out of range', error=exc, + node=self, index=index, context=context)) + except TypeError as exc: + util.reraise(exceptions.AstroidTypeError( + message='Type error {error!r}', error=exc, + node=self, index=index, context=context)) + + raise exceptions.AstroidTypeError( + '%r (value=%s)' % (self, self.value) + ) def has_dynamic_getattr(self): return False @@ -569,17 +1313,20 @@ def pytype(self): return self._proxied.qname() + def bool_value(self): + return bool(self.value) + -class Continue(bases.Statement): +class Continue(Statement): """class representing a Continue node""" -class Decorators(bases.NodeNG): +class Decorators(NodeNG): """class representing a Decorators node""" _astroid_fields = ('nodes',) nodes = None - def __init__(self, nodes=None): + def postinit(self, nodes): self.nodes = nodes def scope(self): @@ -587,29 +1334,49 @@ return self.parent.parent.scope() -class DelAttr(mixins.ParentAssignTypeMixin, bases.NodeNG): +class DelAttr(mixins.ParentAssignTypeMixin, NodeNG): """class representing a DelAttr node""" _astroid_fields = ('expr',) + _other_fields = ('attrname',) expr = None + def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None): + self.attrname = attrname + super(DelAttr, self).__init__(lineno, col_offset, parent) + + def postinit(self, expr=None): + self.expr = expr -class Delete(mixins.AssignTypeMixin, bases.Statement): +class Delete(mixins.AssignTypeMixin, Statement): """class representing a Delete node""" _astroid_fields = ('targets',) targets = None + def postinit(self, targets=None): + self.targets = targets -class Dict(bases.NodeNG, bases.Instance): + +class Dict(NodeNG, bases.Instance): """class representing a Dict node""" _astroid_fields = ('items',) - def __init__(self, items=None): + def __init__(self, lineno=None, col_offset=None, parent=None): + self.items = [] + super(Dict, self).__init__(lineno, col_offset, parent) + + def postinit(self, items): + self.items = items + + @classmethod + def from_constants(cls, items=None): + node = cls() if items is None: - self.items = [] + node.items = [] else: - self.items = [(const_factory(k), const_factory(v)) - for k, v in list(items.items())] + node.items = [(const_factory(k), const_factory(v)) + for k, v in items.items()] + return node def pytype(self): return '%s.dict' % BUILTINS @@ -630,56 +1397,72 @@ def itered(self): return self.items[::2] - def getitem(self, lookup_key, context=None): + def getitem(self, index, context=None): for key, value in self.items: # TODO(cpopa): no support for overriding yet, {1:2, **{1: 3}}. if isinstance(key, DictUnpack): try: - return value.getitem(lookup_key, context) - except IndexError: + return value.getitem(index, context) + except (exceptions.AstroidTypeError, exceptions.AstroidIndexError): continue for inferredkey in key.infer(context): - if inferredkey is util.YES: + if inferredkey is util.Uninferable: continue - if isinstance(inferredkey, Const) \ - and inferredkey.value == lookup_key: - return value - # This should raise KeyError, but all call sites only catch - # IndexError. Let's leave it like that for now. - raise IndexError(lookup_key) + if isinstance(inferredkey, Const) and isinstance(index, Const): + if inferredkey.value == index.value: + return value + + raise exceptions.AstroidIndexError(index) + + def bool_value(self): + return bool(self.items) -class Expr(bases.Statement): +class Expr(Statement): """class representing a Expr node""" _astroid_fields = ('value',) value = None + def postinit(self, value=None): + self.value = value + -class Ellipsis(bases.NodeNG): # pylint: disable=redefined-builtin +class Ellipsis(NodeNG): # pylint: disable=redefined-builtin """class representing an Ellipsis node""" + def bool_value(self): + return True + -class EmptyNode(bases.NodeNG): +class EmptyNode(NodeNG): """class representing an EmptyNode node""" + object = None + -class ExceptHandler(mixins.AssignTypeMixin, bases.Statement): +class ExceptHandler(mixins.AssignTypeMixin, Statement): """class representing an ExceptHandler node""" _astroid_fields = ('type', 'name', 'body',) type = None name = None body = None + # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. + def postinit(self, type=None, name=None, body=None): + self.type = type + self.name = name + self.body = body + @decorators.cachedproperty def blockstart_tolineno(self): if self.name: return self.name.tolineno elif self.type: return self.type.tolineno - else: - return self.lineno - def catch(self, exceptions): + return self.lineno + + def catch(self, exceptions): # pylint: disable=redefined-outer-name if self.type is None or exceptions is None: return True for node in self.type.nodes_of_class(Name): @@ -687,20 +1470,30 @@ return True -class Exec(bases.Statement): +class Exec(Statement): """class representing an Exec node""" _astroid_fields = ('expr', 'globals', 'locals',) expr = None globals = None locals = None + # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. + def postinit(self, expr=None, globals=None, locals=None): + self.expr = expr + self.globals = globals + self.locals = locals + -class ExtSlice(bases.NodeNG): +class ExtSlice(NodeNG): """class representing an ExtSlice node""" _astroid_fields = ('dims',) dims = None -class For(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, bases.Statement): + def postinit(self, dims=None): + self.dims = dims + + +class For(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, Statement): """class representing a For node""" _astroid_fields = ('target', 'iter', 'body', 'orelse',) target = None @@ -708,6 +1501,13 @@ body = None orelse = None + # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. + def postinit(self, target=None, iter=None, body=None, orelse=None): + self.target = target + self.iter = iter + self.body = body + self.orelse = orelse + optional_assign = True @decorators.cachedproperty def blockstart_tolineno(self): @@ -718,7 +1518,7 @@ """Asynchronous For built with `async` keyword.""" -class Await(bases.NodeNG): +class Await(NodeNG): """Await node for the `await` keyword.""" _astroid_fields = ('value', ) @@ -728,37 +1528,56 @@ self.value = value -class ImportFrom(mixins.ImportFromMixin, bases.Statement): - """class representing a From node""" +class ImportFrom(mixins.ImportFromMixin, Statement): + """class representing a ImportFrom node""" + _other_fields = ('modname', 'names', 'level') - def __init__(self, fromname, names, level=0): + def __init__(self, fromname, names, level=0, lineno=None, + col_offset=None, parent=None): self.modname = fromname self.names = names self.level = level + super(ImportFrom, self).__init__(lineno, col_offset, parent) + -class Attribute(bases.NodeNG): +class Attribute(NodeNG): """class representing a Attribute node""" _astroid_fields = ('expr',) + _other_fields = ('attrname',) expr = None + def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None): + self.attrname = attrname + super(Attribute, self).__init__(lineno, col_offset, parent) -class Global(bases.Statement): + def postinit(self, expr=None): + self.expr = expr + + +class Global(Statement): """class representing a Global node""" + _other_fields = ('names',) - def __init__(self, names): + def __init__(self, names, lineno=None, col_offset=None, parent=None): self.names = names + super(Global, self).__init__(lineno, col_offset, parent) def _infer_name(self, frame, name): return name -class If(mixins.BlockRangeMixIn, bases.Statement): +class If(mixins.BlockRangeMixIn, Statement): """class representing an If node""" _astroid_fields = ('test', 'body', 'orelse') test = None body = None orelse = None + def postinit(self, test=None, body=None, orelse=None): + self.test = test + self.body = body + self.orelse = orelse + @decorators.cachedproperty def blockstart_tolineno(self): return self.test.tolineno @@ -773,73 +1592,119 @@ self.body[0].fromlineno - 1) -class IfExp(bases.NodeNG): +class IfExp(NodeNG): """class representing an IfExp node""" _astroid_fields = ('test', 'body', 'orelse') test = None body = None orelse = None + def postinit(self, test=None, body=None, orelse=None): + self.test = test + self.body = body + self.orelse = orelse + -class Import(mixins.ImportFromMixin, bases.Statement): +class Import(mixins.ImportFromMixin, Statement): """class representing an Import node""" + _other_fields = ('names',) + + def __init__(self, names=None, lineno=None, col_offset=None, parent=None): + self.names = names + super(Import, self).__init__(lineno, col_offset, parent) -class Index(bases.NodeNG): +class Index(NodeNG): """class representing an Index node""" _astroid_fields = ('value',) value = None + def postinit(self, value=None): + self.value = value + -class Keyword(bases.NodeNG): +class Keyword(NodeNG): """class representing a Keyword node""" _astroid_fields = ('value',) + _other_fields = ('arg',) value = None + def __init__(self, arg=None, lineno=None, col_offset=None, parent=None): + self.arg = arg + super(Keyword, self).__init__(lineno, col_offset, parent) + + def postinit(self, value=None): + self.value = value + class List(_BaseContainer): """class representing a List node""" + _other_fields = ('ctx',) + + def __init__(self, ctx=None, lineno=None, + col_offset=None, parent=None): + self.ctx = ctx + super(List, self).__init__(lineno, col_offset, parent) def pytype(self): return '%s.list' % BUILTINS def getitem(self, index, context=None): - return self.elts[index] + return _container_getitem(self, self.elts, index, context=context) -class Nonlocal(bases.Statement): +class Nonlocal(Statement): """class representing a Nonlocal node""" + _other_fields = ('names',) - def __init__(self, names): + def __init__(self, names, lineno=None, col_offset=None, parent=None): self.names = names + super(Nonlocal, self).__init__(lineno, col_offset, parent) def _infer_name(self, frame, name): return name -class Pass(bases.Statement): +class Pass(Statement): """class representing a Pass node""" -class Print(bases.Statement): +class Print(Statement): """class representing a Print node""" _astroid_fields = ('dest', 'values',) dest = None values = None + def __init__(self, nl=None, lineno=None, col_offset=None, parent=None): + self.nl = nl + super(Print, self).__init__(lineno, col_offset, parent) + + def postinit(self, dest=None, values=None): + self.dest = dest + self.values = values -class Raise(bases.Statement): + +class Raise(Statement): """class representing a Raise node""" exc = None if six.PY2: _astroid_fields = ('exc', 'inst', 'tback') inst = None tback = None + + def postinit(self, exc=None, inst=None, tback=None): + self.exc = exc + self.inst = inst + self.tback = tback else: _astroid_fields = ('exc', 'cause') exc = None cause = None + def postinit(self, exc=None, cause=None): + self.exc = exc + self.cause = cause + def raises_not_implemented(self): if not self.exc: return @@ -848,11 +1713,14 @@ return True -class Return(bases.Statement): +class Return(Statement): """class representing a Return node""" _astroid_fields = ('value',) value = None + def postinit(self, value=None): + self.value = value + class Set(_BaseContainer): """class representing a Set node""" @@ -861,33 +1729,94 @@ return '%s.set' % BUILTINS -class Slice(bases.NodeNG): +class Slice(NodeNG): """class representing a Slice node""" _astroid_fields = ('lower', 'upper', 'step') lower = None upper = None step = None -class Starred(mixins.ParentAssignTypeMixin, bases.NodeNG): + def postinit(self, lower=None, upper=None, step=None): + self.lower = lower + self.upper = upper + self.step = step + + def _wrap_attribute(self, attr): + """Wrap the empty attributes of the Slice in a Const node.""" + if not attr: + const = const_factory(attr) + const.parent = self + return const + return attr + + @decorators.cachedproperty + def _proxied(self): + builtins = MANAGER.astroid_cache[BUILTINS] + return builtins.getattr('slice')[0] + + def pytype(self): + return '%s.slice' % BUILTINS + + def igetattr(self, attrname, context=None): + if attrname == 'start': + yield self._wrap_attribute(self.lower) + elif attrname == 'stop': + yield self._wrap_attribute(self.upper) + elif attrname == 'step': + yield self._wrap_attribute(self.step) + else: + for value in self.getattr(attrname, context=context): + yield value + + def getattr(self, attrname, context=None): + return self._proxied.getattr(attrname, context) + + +class Starred(mixins.ParentAssignTypeMixin, NodeNG): """class representing a Starred node""" _astroid_fields = ('value',) + _other_fields = ('ctx', ) value = None + def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None): + self.ctx = ctx + super(Starred, self).__init__(lineno=lineno, + col_offset=col_offset, parent=parent) -class Subscript(bases.NodeNG): + def postinit(self, value=None): + self.value = value + + +class Subscript(NodeNG): """class representing a Subscript node""" _astroid_fields = ('value', 'slice') + _other_fields = ('ctx', ) value = None slice = None + def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None): + self.ctx = ctx + super(Subscript, self).__init__(lineno=lineno, + col_offset=col_offset, parent=parent) -class TryExcept(mixins.BlockRangeMixIn, bases.Statement): + # pylint: disable=redefined-builtin; had to use the same name as builtin ast module. + def postinit(self, value=None, slice=None): + self.value = value + self.slice = slice + + +class TryExcept(mixins.BlockRangeMixIn, Statement): """class representing a TryExcept node""" _astroid_fields = ('body', 'handlers', 'orelse',) body = None handlers = None orelse = None + def postinit(self, body=None, handlers=None, orelse=None): + self.body = body + self.handlers = handlers + self.orelse = orelse + def _infer_name(self, frame, name): return name @@ -904,12 +1833,16 @@ return self._elsed_block_range(lineno, self.orelse, last) -class TryFinally(mixins.BlockRangeMixIn, bases.Statement): +class TryFinally(mixins.BlockRangeMixIn, Statement): """class representing a TryFinally node""" _astroid_fields = ('body', 'finalbody',) body = None finalbody = None + def postinit(self, body=None, finalbody=None): + self.body = body + self.finalbody = finalbody + def block_range(self, lineno): """handle block line numbers range for try/finally statements""" child = self.body[0] @@ -923,41 +1856,82 @@ class Tuple(_BaseContainer): """class representing a Tuple node""" + _other_fields = ('ctx',) + + def __init__(self, ctx=None, lineno=None, + col_offset=None, parent=None): + self.ctx = ctx + super(Tuple, self).__init__(lineno, col_offset, parent) + def pytype(self): return '%s.tuple' % BUILTINS def getitem(self, index, context=None): - return self.elts[index] + return _container_getitem(self, self.elts, index, context=context) -class UnaryOp(bases.NodeNG): +class UnaryOp(NodeNG): """class representing an UnaryOp node""" _astroid_fields = ('operand',) + _other_fields = ('op',) operand = None + def __init__(self, op=None, lineno=None, col_offset=None, parent=None): + self.op = op + super(UnaryOp, self).__init__(lineno, col_offset, parent) + + def postinit(self, operand=None): + self.operand = operand + + # This is set by inference.py + def _infer_unaryop(self, context=None): + raise NotImplementedError + + def type_errors(self, context=None): + """Return a list of TypeErrors which can occur during inference. -class While(mixins.BlockRangeMixIn, bases.Statement): + Each TypeError is represented by a :class:`BadUnaryOperationMessage`, + which holds the original exception. + """ + try: + results = self._infer_unaryop(context=context) + return [result for result in results + if isinstance(result, util.BadUnaryOperationMessage)] + except exceptions.InferenceError: + return [] + + +class While(mixins.BlockRangeMixIn, Statement): """class representing a While node""" _astroid_fields = ('test', 'body', 'orelse',) test = None body = None orelse = None + def postinit(self, test=None, body=None, orelse=None): + self.test = test + self.body = body + self.orelse = orelse + @decorators.cachedproperty def blockstart_tolineno(self): return self.test.tolineno def block_range(self, lineno): - """handle block line numbers range for for and while statements""" + """handle block line numbers range for and while statements""" return self. _elsed_block_range(lineno, self.orelse) -class With(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, bases.Statement): +class With(mixins.BlockRangeMixIn, mixins.AssignTypeMixin, Statement): """class representing a With node""" _astroid_fields = ('items', 'body') items = None body = None + def postinit(self, items=None, body=None): + self.items = items + self.body = body + @decorators.cachedproperty def blockstart_tolineno(self): return self.items[-1][0].tolineno @@ -975,19 +1949,56 @@ """Asynchronous `with` built with the `async` keyword.""" -class Yield(bases.NodeNG): +class Yield(NodeNG): """class representing a Yield node""" _astroid_fields = ('value',) value = None + def postinit(self, value=None): + self.value = value + + class YieldFrom(Yield): """ Class representing a YieldFrom node. """ -class DictUnpack(bases.NodeNG): +class DictUnpack(NodeNG): """Represents the unpacking of dicts into dicts using PEP 448.""" +class FormattedValue(NodeNG): + """Represents a PEP 498 format string.""" + _astroid_fields = ('value', 'format_spec') + value = None + conversion = None + format_spec = None + + def postinit(self, value, conversion=None, format_spec=None): + self.value = value + self.conversion = conversion + self.format_spec = format_spec + + +class JoinedStr(NodeNG): + """Represents a list of string expressions to be joined.""" + _astroid_fields = ('values',) + values = None + + def postinit(self, values=None): + self.values = values + + +class Unknown(NodeNG): + '''This node represents a node in a constructed AST where + introspection is not possible. At the moment, it's only used in + the args attribute of FunctionDef nodes where function signature + introspection failed. + ''' + def infer(self, context=None, **kwargs): + '''Inference on an Unknown node immediately terminates.''' + yield util.Uninferable + + # constants ############################################################## CONST_CLS = { @@ -1003,6 +2014,7 @@ """update constant classes, so the keys of CONST_CLS can be reused""" klasses = (bool, int, float, complex, str) if six.PY2: + # pylint: disable=undefined-variable klasses += (unicode, long) klasses += (bytes,) for kls in klasses: @@ -1010,6 +2022,27 @@ _update_const_classes() +def _two_step_initialization(cls, value): + instance = cls() + instance.postinit(value) + return instance + + +def _dict_initialization(cls, value): + if isinstance(value, dict): + value = tuple(value.items()) + return _two_step_initialization(cls, value) + + +_CONST_CLS_CONSTRUCTORS = { + List: _two_step_initialization, + Tuple: _two_step_initialization, + Dict: _dict_initialization, + Set: _two_step_initialization, + Const: lambda cls, value: cls(value) +} + + def const_factory(value): """return an astroid node for a python value""" # XXX we should probably be stricter here and only consider stuff in @@ -1017,9 +2050,21 @@ # we should rather recall the builder on this value than returning an empty # node (another option being that const_factory shouldn't be called with something # not in CONST_CLS) - assert not isinstance(value, bases.NodeNG) + assert not isinstance(value, NodeNG) + + # Hack for ignoring elements of a sequence + # or a mapping, in order to avoid transforming + # each element to an AST. This is fixed in 2.0 + # and this approach is a temporary hack. + if isinstance(value, (list, set, tuple, dict)): + elts = [] + else: + elts = value + try: - return CONST_CLS[value.__class__](value) + initializer_cls = CONST_CLS[value.__class__] + initializer = _CONST_CLS_CONSTRUCTORS[initializer_cls] + return initializer(initializer_cls, elts) except (KeyError, AttributeError): node = EmptyNode() node.object = value @@ -1027,27 +2072,11 @@ # Backward-compatibility aliases -def instancecheck(cls, other): - wrapped = cls.__wrapped__ - other_cls = other.__class__ - is_instance_of = wrapped is other_cls or issubclass(other_cls, wrapped) - warnings.warn("%r is deprecated and slated for removal in astroid " - "2.0, use %r instead" % (cls.__class__.__name__, - wrapped.__name__), - PendingDeprecationWarning, stacklevel=2) - return is_instance_of - - -def proxy_alias(alias_name, node_type): - proxy = type(alias_name, (lazy_object_proxy.Proxy,), - {'__class__': object.__dict__['__class__'], - '__instancecheck__': instancecheck}) - return proxy(lambda: node_type) - -Backquote = proxy_alias('Backquote', Repr) -Discard = proxy_alias('Discard', Expr) -AssName = proxy_alias('AssName', AssignName) -AssAttr = proxy_alias('AssAttr', AssignAttr) -Getattr = proxy_alias('Getattr', Attribute) -CallFunc = proxy_alias('CallFunc', Call) -From = proxy_alias('From', ImportFrom) + +Backquote = util.proxy_alias('Backquote', Repr) +Discard = util.proxy_alias('Discard', Expr) +AssName = util.proxy_alias('AssName', AssignName) +AssAttr = util.proxy_alias('AssAttr', AssignAttr) +Getattr = util.proxy_alias('Getattr', Attribute) +CallFunc = util.proxy_alias('CallFunc', Call) +From = util.proxy_alias('From', ImportFrom) diff -Nru astroid-1.4.9/astroid/nodes.py astroid-1.5.3/astroid/nodes.py --- astroid-1.4.9/astroid/nodes.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/nodes.py 2017-03-12 12:27:04.000000000 +0000 @@ -1,20 +1,11 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2011, 2013 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """ on all nodes : .is_statement, returning true if the node should be considered as a @@ -37,7 +28,7 @@ # pylint: disable=unused-import,redefined-builtin from astroid.node_classes import ( - Arguments, AssignAttr, Assert, Assign, + Arguments, AssignAttr, Assert, Assign, AnnAssign, AssignName, AugAssign, Repr, BinOp, BoolOp, Break, Call, Compare, Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, Dict, Expr, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, @@ -46,10 +37,12 @@ TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, const_factory, AsyncFor, Await, AsyncWith, + FormattedValue, JoinedStr, # Backwards-compatibility aliases Backquote, Discard, AssName, AssAttr, Getattr, CallFunc, From, # Node not present in the builtin ast module. DictUnpack, + Unknown, ) from astroid.scoped_nodes import ( Module, GeneratorExp, Lambda, DictComp, @@ -64,7 +57,7 @@ ALL_NODE_CLASSES = ( AsyncFunctionDef, AsyncFor, AsyncWith, Await, - Arguments, AssignAttr, Assert, Assign, AssignName, AugAssign, + Arguments, AssignAttr, Assert, Assign, AnnAssign, AssignName, AugAssign, Repr, BinOp, BoolOp, Break, Call, ClassDef, Compare, Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, @@ -84,4 +77,5 @@ UnaryOp, While, With, Yield, YieldFrom, + FormattedValue, JoinedStr, ) diff -Nru astroid-1.4.9/astroid/objects.py astroid-1.5.3/astroid/objects.py --- astroid-1.4.9/astroid/objects.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/objects.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,73 +1,50 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """ Inference objects are a way to represent composite AST nodes, which are used only as inference results, so they can't be found in the -code tree. For instance, inferring the following frozenset use, leads to an -inferred FrozenSet: - - CallFunc(func=Name('frozenset'), args=Tuple(...)) +original AST tree. For instance, inferring the following frozenset use, +leads to an inferred FrozenSet: + Call(func=Name('frozenset'), args=Tuple(...)) """ import six +from astroid import bases +from astroid import decorators +from astroid import exceptions from astroid import MANAGER -from astroid.bases import ( - BUILTINS, NodeNG, Instance, _infer_stmts, - BoundMethod, _is_property -) -from astroid.decorators import cachedproperty -from astroid.exceptions import ( - SuperError, SuperArgumentTypeError, - NotFoundError, MroError -) -from astroid.node_classes import const_factory -from astroid.scoped_nodes import ClassDef, FunctionDef -from astroid.mixins import ParentAssignTypeMixin +from astroid import node_classes +from astroid import scoped_nodes +from astroid import util -class FrozenSet(NodeNG, Instance, ParentAssignTypeMixin): - """class representing a FrozenSet composite node""" +BUILTINS = six.moves.builtins.__name__ +objectmodel = util.lazy_import('interpreter.objectmodel') - def __init__(self, elts=None): - if elts is None: - self.elts = [] - else: - self.elts = [const_factory(e) for e in elts] + +class FrozenSet(node_classes._BaseContainer): + """class representing a FrozenSet composite node""" def pytype(self): return '%s.frozenset' % BUILTINS - def itered(self): - return self.elts - def _infer(self, context=None): yield self - @cachedproperty - def _proxied(self): + @decorators.cachedproperty + def _proxied(self): # pylint: disable=method-hidden builtins = MANAGER.astroid_cache[BUILTINS] return builtins.getattr('frozenset')[0] -class Super(NodeNG): +class Super(node_classes.NodeNG): """Proxy class over a super call. This class offers almost the same behaviour as Python's super, @@ -79,50 +56,53 @@ *self_class* is the class where the super call is, while *scope* is the function where the super call is. """ + # pylint: disable=unnecessary-lambda + special_attributes = util.lazy_descriptor(lambda: objectmodel.SuperModel()) + # pylint: disable=super-init-not-called def __init__(self, mro_pointer, mro_type, self_class, scope): self.type = mro_type self.mro_pointer = mro_pointer self._class_based = False self._self_class = self_class self._scope = scope - self._model = { - '__thisclass__': self.mro_pointer, - '__self_class__': self._self_class, - '__self__': self.type, - '__class__': self._proxied, - } def _infer(self, context=None): yield self def super_mro(self): """Get the MRO which will be used to lookup attributes in this super.""" - if not isinstance(self.mro_pointer, ClassDef): - raise SuperArgumentTypeError("The first super argument must be type.") + if not isinstance(self.mro_pointer, scoped_nodes.ClassDef): + raise exceptions.SuperError( + "The first argument to super must be a subtype of " + "type, not {mro_pointer}.", super_=self) - if isinstance(self.type, ClassDef): + if isinstance(self.type, scoped_nodes.ClassDef): # `super(type, type)`, most likely in a class method. self._class_based = True mro_type = self.type else: mro_type = getattr(self.type, '_proxied', None) - if not isinstance(mro_type, (Instance, ClassDef)): - raise SuperArgumentTypeError("super(type, obj): obj must be an " - "instance or subtype of type") + if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)): + raise exceptions.SuperError( + "The second argument to super must be an " + "instance or subtype of type, not {type}.", + super_=self) if not mro_type.newstyle: - raise SuperError("Unable to call super on old-style classes.") + raise exceptions.SuperError("Unable to call super on old-style classes.", super_=self) mro = mro_type.mro() if self.mro_pointer not in mro: - raise SuperArgumentTypeError("super(type, obj): obj must be an " - "instance or subtype of type") + raise exceptions.SuperError( + "The second argument to super must be an " + "instance or subtype of type, not {type}.", + super_=self) index = mro.index(self.mro_pointer) return mro[index + 1:] - @cachedproperty + @decorators.cachedproperty def _proxied(self): builtins = MANAGER.astroid_cache[BUILTINS] return builtins.getattr('super')[0] @@ -141,46 +121,100 @@ def igetattr(self, name, context=None): """Retrieve the inferred values of the given attribute name.""" - local_name = self._model.get(name) - if local_name: - yield local_name + if name in self.special_attributes: + yield self.special_attributes.lookup(name) return try: mro = self.super_mro() - except (MroError, SuperError) as exc: - # Don't let invalid MROs or invalid super calls - # to leak out as is from this function. - six.raise_from(NotFoundError, exc) - + # Don't let invalid MROs or invalid super calls + # leak out as is from this function. + except exceptions.SuperError as exc: + util.reraise(exceptions.AttributeInferenceError( + ('Lookup for {name} on {target!r} because super call {super!r} ' + 'is invalid.'), + target=self, attribute=name, context=context, super_=exc.super_)) + except exceptions.MroError as exc: + util.reraise(exceptions.AttributeInferenceError( + ('Lookup for {name} on {target!r} failed because {cls!r} has an ' + 'invalid MRO.'), + target=self, attribute=name, context=context, mros=exc.mros, + cls=exc.cls)) found = False for cls in mro: - if name not in cls._locals: + if name not in cls.locals: continue found = True - for infered in _infer_stmts([cls[name]], context, frame=self): - if not isinstance(infered, FunctionDef): - yield infered + for inferred in bases._infer_stmts([cls[name]], context, frame=self): + if not isinstance(inferred, scoped_nodes.FunctionDef): + yield inferred continue # We can obtain different descriptors from a super depending # on what we are accessing and where the super call is. - if infered.type == 'classmethod': - yield BoundMethod(infered, cls) - elif self._scope.type == 'classmethod' and infered.type == 'method': - yield infered - elif self._class_based or infered.type == 'staticmethod': - yield infered - elif _is_property(infered): + if inferred.type == 'classmethod': + yield bases.BoundMethod(inferred, cls) + elif self._scope.type == 'classmethod' and inferred.type == 'method': + yield inferred + elif self._class_based or inferred.type == 'staticmethod': + yield inferred + elif bases._is_property(inferred): # TODO: support other descriptors as well. - for value in infered.infer_call_result(self, context): + for value in inferred.infer_call_result(self, context): yield value else: - yield BoundMethod(infered, cls) + yield bases.BoundMethod(inferred, cls) if not found: - raise NotFoundError(name) + raise exceptions.AttributeInferenceError(target=self, + attribute=name, + context=context) def getattr(self, name, context=None): return list(self.igetattr(name, context=context)) + + +class ExceptionInstance(bases.Instance): + """Class for instances of exceptions + + It has special treatment for some of the exceptions's attributes, + which are transformed at runtime into certain concrete objects, such as + the case of .args. + """ + + # pylint: disable=unnecessary-lambda + special_attributes = util.lazy_descriptor(lambda: objectmodel.ExceptionInstanceModel()) + + +class DictInstance(bases.Instance): + """Special kind of instances for dictionaries + + This instance knows the underlying object model of the dictionaries, which means + that methods such as .values or .items can be properly inferred. + """ + + # pylint: disable=unnecessary-lambda + special_attributes = util.lazy_descriptor(lambda: objectmodel.DictModel()) + + +# Custom objects tailored for dictionaries, which are used to +# disambiguate between the types of Python 2 dict's method returns +# and Python 3 (where they return set like objects). +class DictItems(bases.Proxy): + __str__ = node_classes.NodeNG.__str__ + __repr__ = node_classes.NodeNG.__repr__ + + +class DictKeys(bases.Proxy): + __str__ = node_classes.NodeNG.__str__ + __repr__ = node_classes.NodeNG.__repr__ + + +class DictValues(bases.Proxy): + __str__ = node_classes.NodeNG.__str__ + __repr__ = node_classes.NodeNG.__repr__ + +# TODO: Hack to solve the circular import problem between node_classes and objects +# This is not needed in 2.0, which has a cleaner design overall +node_classes.Dict.__bases__ = (node_classes.NodeNG, DictInstance) diff -Nru astroid-1.4.9/astroid/__pkginfo__.py astroid-1.5.3/astroid/__pkginfo__.py --- astroid-1.4.9/astroid/__pkginfo__.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/__pkginfo__.py 2017-06-03 13:48:03.000000000 +0000 @@ -1,30 +1,49 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """astroid packaging information""" + +from sys import version_info as py_version + +from pkg_resources import parse_version +from setuptools import __version__ as setuptools_version + distname = 'astroid' modname = 'astroid' -numversion = (1, 4, 9) -version = '.'.join([str(num) for num in numversion]) +version = '1.5.3' +numversion = tuple(map(int, version.split('.'))) + +extras_require = {} +install_requires = ['lazy_object_proxy', 'six', 'wrapt'] + + +def has_environment_marker_range_operators_support(): + """Code extracted from 'pytest/setup.py' + https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31 + + The first known release to support environment marker with range operators + it is 17.1, see: https://setuptools.readthedocs.io/en/latest/history.html#id113 + """ + return parse_version(setuptools_version) >= parse_version('17.1') + + +if has_environment_marker_range_operators_support(): + extras_require[':python_version<"3.4"'] = ['enum34>=1.1.3', 'singledispatch'] + extras_require[':python_version<"3.3"'] = ['backports.functools_lru_cache'] +else: + if py_version < (3, 4): + install_requires.extend(['enum34', 'singledispatch']) + if py_version < (3, 3): + install_requires.append('backports.functools_lru_cache') -install_requires = ['six', 'lazy_object_proxy', 'wrapt'] +# pylint: disable=redefined-builtin; why license is a builtin anyway? license = 'LGPL' author = 'Python Code Quality Authority' diff -Nru astroid-1.4.9/astroid/protocols.py astroid-1.5.3/astroid/protocols.py --- astroid-1.4.9/astroid/protocols.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/protocols.py 2017-06-01 22:07:25.000000000 +0000 @@ -1,39 +1,45 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """this module contains a set of functions to handle python protocols for nodes where it makes sense. """ import collections -import operator +import operator as operator_mod import sys +import six + from astroid import arguments from astroid import bases from astroid import context as contextmod from astroid import exceptions +from astroid import decorators from astroid import node_classes +from astroid import helpers from astroid import nodes from astroid import util +raw_building = util.lazy_import('raw_building') +objects = util.lazy_import('objects') + +def _reflected_name(name): + return "__r" + name[2:] + +def _augmented_name(name): + return "__i" + name[2:] + + +_CONTEXTLIB_MGR = 'contextlib.contextmanager' BIN_OP_METHOD = {'+': '__add__', '-': '__sub__', - '/': '__div__', + '/': '__div__' if six.PY2 else '__truediv__', '//': '__floordiv__', '*': '__mul__', '**': '__pow__', @@ -46,41 +52,40 @@ '@': '__matmul__' } +REFLECTED_BIN_OP_METHOD = { + key: _reflected_name(value) + for (key, value) in BIN_OP_METHOD.items() +} +AUGMENTED_OP_METHOD = { + key + "=": _augmented_name(value) + for (key, value) in BIN_OP_METHOD.items() +} + UNARY_OP_METHOD = {'+': '__pos__', '-': '__neg__', '~': '__invert__', 'not': None, # XXX not '__nonzero__' } +_UNARY_OPERATORS = { + '+': operator_mod.pos, + '-': operator_mod.neg, + '~': operator_mod.invert, + 'not': operator_mod.not_, +} + + +def _infer_unary_op(obj, op): + func = _UNARY_OPERATORS[op] + value = func(obj) + return nodes.const_factory(value) + +nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op) +nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op) +nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op) +nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op) +nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op) -# unary operations ############################################################ - -def tl_infer_unary_op(self, operator): - if operator == 'not': - return node_classes.const_factory(not bool(self.elts)) - raise TypeError() # XXX log unsupported operation -nodes.Tuple.infer_unary_op = tl_infer_unary_op -nodes.List.infer_unary_op = tl_infer_unary_op - - -def dict_infer_unary_op(self, operator): - if operator == 'not': - return node_classes.const_factory(not bool(self.items)) - raise TypeError() # XXX log unsupported operation -nodes.Dict.infer_unary_op = dict_infer_unary_op - - -def const_infer_unary_op(self, operator): - if operator == 'not': - return node_classes.const_factory(not self.value) - # XXX log potentially raised TypeError - elif operator == '+': - return node_classes.const_factory(+self.value) - else: # operator == '-': - return node_classes.const_factory(-self.value) -nodes.Const.infer_unary_op = const_infer_unary_op - - -# binary operations ########################################################### +# Binary operations BIN_OP_IMPL = {'+': lambda a, b: a + b, '-': lambda a, b: a - b, @@ -95,50 +100,45 @@ '<<': lambda a, b: a << b, '>>': lambda a, b: a >> b, } - if sys.version_info >= (3, 5): # MatMult is available since Python 3.5+. - BIN_OP_IMPL['@'] = operator.matmul + BIN_OP_IMPL['@'] = operator_mod.matmul -for key, impl in list(BIN_OP_IMPL.items()): - BIN_OP_IMPL[key+'='] = impl +for _KEY, _IMPL in list(BIN_OP_IMPL.items()): + BIN_OP_IMPL[_KEY + '='] = _IMPL -def const_infer_binary_op(self, binop, other, context): - operator = binop.op - for other in other.infer(context): - if isinstance(other, nodes.Const): - try: - impl = BIN_OP_IMPL[operator] - try: - yield node_classes.const_factory(impl(self.value, other.value)) - except Exception: - # ArithmeticError is not enough: float >> float is a TypeError - # TODO : let pylint know about the problem - pass - except TypeError: - # XXX log TypeError - continue - elif other is util.YES: - yield other - else: +@decorators.yes_if_nothing_inferred +def const_infer_binary_op(self, opnode, operator, other, context, _): + not_implemented = nodes.Const(NotImplemented) + if isinstance(other, nodes.Const): + try: + impl = BIN_OP_IMPL[operator] try: - for val in other.infer_binary_op(binop, self, context): - yield val - except AttributeError: - yield util.YES -nodes.Const.infer_binary_op = bases.yes_if_nothing_inferred(const_infer_binary_op) + yield nodes.const_factory(impl(self.value, other.value)) + except TypeError: + # ArithmeticError is not enough: float >> float is a TypeError + yield not_implemented + except Exception: # pylint: disable=broad-except + yield util.Uninferable + except TypeError: + yield not_implemented + elif isinstance(self.value, six.string_types) and operator == '%': + # TODO(cpopa): implement string interpolation later on. + yield util.Uninferable + else: + yield not_implemented +nodes.Const.infer_binary_op = const_infer_binary_op -def _multiply_seq_by_int(self, binop, other, context): - node = self.__class__() - node.parent = binop +def _multiply_seq_by_int(self, opnode, other, context): + node = self.__class__(parent=opnode) elts = [] for elt in self.elts: - infered = util.safe_infer(elt, context) + infered = helpers.safe_infer(elt, context) if infered is None: - infered = util.YES + infered = util.Uninferable elts.append(infered) node.elts = elts * other.value return node @@ -146,61 +146,50 @@ def _filter_uninferable_nodes(elts, context): for elt in elts: - if elt is util.YES: - yield elt + if elt is util.Uninferable: + yield nodes.Unknown() else: for inferred in elt.infer(context): - yield inferred + if inferred is not util.Uninferable: + yield inferred + else: + yield nodes.Unknown() + + +@decorators.yes_if_nothing_inferred +def tl_infer_binary_op(self, opnode, operator, other, context, method): + not_implemented = nodes.Const(NotImplemented) + if isinstance(other, self.__class__) and operator == '+': + node = self.__class__(parent=opnode) + elts = list(_filter_uninferable_nodes(self.elts, context)) + elts += list(_filter_uninferable_nodes(other.elts, context)) + node.elts = elts + yield node + elif isinstance(other, nodes.Const) and operator == '*': + if not isinstance(other.value, int): + yield not_implemented + return + yield _multiply_seq_by_int(self, opnode, other, context) + elif isinstance(other, bases.Instance) and operator == '*': + # Verify if the instance supports __index__. + as_index = helpers.class_instance_as_index(other) + if not as_index: + yield util.Uninferable + else: + yield _multiply_seq_by_int(self, opnode, as_index, context) + else: + yield not_implemented +nodes.Tuple.infer_binary_op = tl_infer_binary_op +nodes.List.infer_binary_op = tl_infer_binary_op -def tl_infer_binary_op(self, binop, other, context): - operator = binop.op - for other in other.infer(context): - if isinstance(other, self.__class__) and operator == '+': - node = self.__class__() - node.parent = binop - elts = list(_filter_uninferable_nodes(self.elts, context)) - elts += list(_filter_uninferable_nodes(other.elts, context)) - node.elts = elts - yield node - elif isinstance(other, nodes.Const) and operator == '*': - if not isinstance(other.value, int): - yield util.YES - continue - yield _multiply_seq_by_int(self, binop, other, context) - elif isinstance(other, bases.Instance) and not isinstance(other, nodes.Const): - yield util.YES - # XXX else log TypeError -nodes.Tuple.infer_binary_op = bases.yes_if_nothing_inferred(tl_infer_binary_op) -nodes.List.infer_binary_op = bases.yes_if_nothing_inferred(tl_infer_binary_op) - - -def dict_infer_binary_op(self, binop, other, context): - for other in other.infer(context): - if isinstance(other, bases.Instance) and isinstance(other._proxied, nodes.ClassDef): - yield util.YES - # XXX else log TypeError -nodes.Dict.infer_binary_op = bases.yes_if_nothing_inferred(dict_infer_binary_op) -def instance_infer_binary_op(self, binop, other, context): - operator = binop.op - try: - methods = self.getattr(BIN_OP_METHOD[operator]) - except (exceptions.NotFoundError, KeyError): - # Unknown operator - yield util.YES - else: - for method in methods: - if not isinstance(method, nodes.FunctionDef): - continue - for result in method.infer_call_result(self, context): - if result is not util.YES: - yield result - # We are interested only in the first infered method, - # don't go looking in the rest of the methods of the ancestors. - break +@decorators.yes_if_nothing_inferred +def instance_class_infer_binary_op(self, opnode, operator, other, context, method): + return method.infer_call_result(self, context) -bases.Instance.infer_binary_op = bases.yes_if_nothing_inferred(instance_infer_binary_op) +bases.Instance.infer_binary_op = instance_class_infer_binary_op +nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op # assignment ################################################################## @@ -221,7 +210,7 @@ asspath = asspath[:] index = asspath.pop(0) for part in parts: - if part is util.YES: + if part is util.Uninferable: continue # XXX handle __iter__ and log potentially detected errors if not hasattr(part, 'itered'): @@ -231,31 +220,35 @@ except TypeError: continue # XXX log error for stmt in itered: + index_node = nodes.Const(index) try: - assigned = stmt.getitem(index, context) - except (AttributeError, IndexError): - continue - except TypeError: # stmt is unsubscriptable Const + assigned = stmt.getitem(index_node, context) + except (AttributeError, + exceptions.AstroidTypeError, + exceptions.AstroidIndexError): continue if not asspath: # we achieved to resolved the assignment path, # don't infer the last part yield assigned - elif assigned is util.YES: + elif assigned is util.Uninferable: break else: # we are not yet on the last part of the path # search on each possibly inferred value try: for inferred in _resolve_looppart(assigned.infer(context), - asspath, context): + asspath, context): yield inferred except exceptions.InferenceError: break - -@bases.raise_if_nothing_inferred +@decorators.raise_if_nothing_inferred def for_assigned_stmts(self, node=None, context=None, asspath=None): + if isinstance(self, nodes.AsyncFor) or getattr(self, 'is_async', False): + # Skip inferring of async code for now + raise StopIteration(dict(node=self, unknown=node, + assign_path=asspath, context=context)) if asspath is None: for lst in self.iter.infer(context): if isinstance(lst, (nodes.Tuple, nodes.List)): @@ -263,8 +256,12 @@ yield item else: for inferred in _resolve_looppart(self.iter.infer(context), - asspath, context): + asspath, context): yield inferred + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, unknown=node, + assign_path=asspath, context=context)) nodes.For.assigned_stmts = for_assigned_stmts nodes.Comprehension.assigned_stmts = for_assigned_stmts @@ -276,9 +273,9 @@ try: index = self.elts.index(node) except ValueError: - util.reraise(exceptions.InferenceError( - 'Tried to retrieve a node {node!r} which does not exist', - node=self, assign_path=asspath, context=context)) + util.reraise(exceptions.InferenceError( + 'Tried to retrieve a node {node!r} which does not exist', + node=self, assign_path=asspath, context=context)) asspath.insert(0, index) return self.parent.assigned_stmts(node=self, context=context, asspath=asspath) @@ -297,17 +294,21 @@ # arguments information may be missing, in which case we can't do anything # more if not (self.args or self.vararg or self.kwarg): - yield util.YES + yield util.Uninferable return # first argument of instance/class method if self.args and getattr(self.args[0], 'name', None) == name: functype = self.parent.type + cls = self.parent.parent.scope() + is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == 'metaclass' + # If this is a metaclass, then the first argument will always + # be the class, not an instance. + if is_metaclass or functype == 'classmethod': + yield cls + return if functype == 'method': yield bases.Instance(self.parent.parent.frame()) return - if functype == 'classmethod': - yield self.parent.parent.frame() - return if context and context.callcontext: call_site = arguments.CallSite(context.callcontext) @@ -317,24 +318,24 @@ # TODO: just provide the type here, no need to have an empty Dict. if name == self.vararg: - vararg = node_classes.const_factory(()) + vararg = nodes.const_factory(()) vararg.parent = self yield vararg return if name == self.kwarg: - kwarg = node_classes.const_factory({}) + kwarg = nodes.const_factory({}) kwarg.parent = self yield kwarg return - # if there is a default value, yield it. And then yield YES to reflect + # if there is a default value, yield it. And then yield Uninferable to reflect # we can't guess given argument value try: context = contextmod.copy_context(context) for inferred in self.default_value(name).infer(context): yield inferred - yield util.YES + yield util.Uninferable except exceptions.NoDefault: - yield util.YES + yield util.Uninferable def arguments_assigned_stmts(self, node=None, context=None, asspath=None): @@ -350,15 +351,28 @@ nodes.Arguments.assigned_stmts = arguments_assigned_stmts -@bases.raise_if_nothing_inferred +@decorators.raise_if_nothing_inferred def assign_assigned_stmts(self, node=None, context=None, asspath=None): if not asspath: yield self.value return for inferred in _resolve_asspart(self.value.infer(context), asspath, context): yield inferred + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, unknown=node, + assign_path=asspath, context=context)) + + +def assign_annassigned_stmts(self, node=None, context=None, asspath=None): + for inferred in assign_assigned_stmts(self, node, context, asspath): + if inferred is None: + yield util.Uninferable + else: + yield inferred nodes.Assign.assigned_stmts = assign_assigned_stmts +nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts nodes.AugAssign.assigned_stmts = assign_assigned_stmts @@ -368,96 +382,212 @@ index = asspath.pop(0) for part in parts: if hasattr(part, 'getitem'): + index_node = nodes.Const(index) try: - assigned = part.getitem(index, context) + assigned = part.getitem(index_node, context) # XXX raise a specific exception to avoid potential hiding of # unexpected exception ? - except (TypeError, IndexError): + except (exceptions.AstroidTypeError, exceptions.AstroidIndexError): return if not asspath: # we achieved to resolved the assignment path, don't infer the # last part yield assigned - elif assigned is util.YES: + elif assigned is util.Uninferable: return else: # we are not yet on the last part of the path search on each # possibly inferred value try: for inferred in _resolve_asspart(assigned.infer(context), - asspath, context): + asspath, context): yield inferred except exceptions.InferenceError: return -@bases.raise_if_nothing_inferred +@decorators.raise_if_nothing_inferred def excepthandler_assigned_stmts(self, node=None, context=None, asspath=None): for assigned in node_classes.unpack_infer(self.type): if isinstance(assigned, nodes.ClassDef): - assigned = bases.Instance(assigned) + assigned = objects.ExceptionInstance(assigned) + yield assigned -nodes.ExceptHandler.assigned_stmts = bases.raise_if_nothing_inferred(excepthandler_assigned_stmts) + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, unknown=node, + assign_path=asspath, context=context)) + +nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts -@bases.raise_if_nothing_inferred + +def _infer_context_manager(self, mgr, context): + try: + inferred = next(mgr.infer(context=context)) + except exceptions.InferenceError: + return + if isinstance(inferred, bases.Generator): + # Check if it is decorated with contextlib.contextmanager. + func = inferred.parent + if not func.decorators: + return + for decorator_node in func.decorators.nodes: + decorator = next(decorator_node.infer(context)) + if isinstance(decorator, nodes.FunctionDef): + if decorator.qname() == _CONTEXTLIB_MGR: + break + else: + # It doesn't interest us. + return + + # Get the first yield point. If it has multiple yields, + # then a RuntimeError will be raised. + # TODO(cpopa): Handle flows. + yield_point = next(func.nodes_of_class(nodes.Yield), None) + if yield_point: + if not yield_point.value: + # TODO(cpopa): an empty yield. Should be wrapped to Const. + const = nodes.Const(None) + const.parent = yield_point + const.lineno = yield_point.lineno + yield const + else: + for inferred in yield_point.value.infer(context=context): + yield inferred + elif isinstance(inferred, bases.Instance): + try: + enter = next(inferred.igetattr('__enter__', context=context)) + except (exceptions.InferenceError, exceptions.AttributeInferenceError): + return + if not isinstance(enter, bases.BoundMethod): + return + if not context.callcontext: + context.callcontext = contextmod.CallContext(args=[inferred]) + for result in enter.infer_call_result(self, context): + yield result + + +@decorators.raise_if_nothing_inferred def with_assigned_stmts(self, node=None, context=None, asspath=None): + """Infer names and other nodes from a *with* statement. + + This enables only inference for name binding in a *with* statement. + For instance, in the following code, inferring `func` will return + the `ContextManager` class, not whatever ``__enter__`` returns. + We are doing this intentionally, because we consider that the context + manager result is whatever __enter__ returns and what it is binded + using the ``as`` keyword. + + class ContextManager(object): + def __enter__(self): + return 42 + with ContextManager() as f: + pass + # ContextManager().infer() will return ContextManager + # f.infer() will return 42. + + Arguments: + self: nodes.With + node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`. + context: TODO + asspath: TODO + """ + mgr = next(mgr for (mgr, vars) in self.items if vars == node) if asspath is None: - for _, vars in self.items: - if vars is None: - continue - for lst in vars.infer(context): - if isinstance(lst, (nodes.Tuple, nodes.List)): - for item in lst.nodes: - yield item + for result in _infer_context_manager(self, mgr, context): + yield result + else: + for result in _infer_context_manager(self, mgr, context): + # Walk the asspath and get the item at the final index. + obj = result + for index in asspath: + if not hasattr(obj, 'elts'): + raise exceptions.InferenceError( + 'Wrong type ({targets!r}) for {node!r} assignment', + node=self, targets=node, assign_path=asspath, + context=context) + try: + obj = obj.elts[index] + except IndexError: + util.reraise(exceptions.InferenceError( + 'Tried to infer a nonexistent target with index {index} ' + 'in {node!r}.', node=self, targets=node, + assign_path=asspath, context=context)) + except TypeError: + util.reraise(exceptions.InferenceError( + 'Tried to unpack an non-iterable value ' + 'in {node!r}.', node=self, targets=node, + assign_path=asspath, context=context)) + yield obj + # Explicit StopIteration to return error information, see comment + # in raise_if_nothing_inferred. + raise StopIteration(dict(node=self, unknown=node, + assign_path=asspath, context=context)) + nodes.With.assigned_stmts = with_assigned_stmts -@bases.yes_if_nothing_inferred +@decorators.yes_if_nothing_inferred def starred_assigned_stmts(self, node=None, context=None, asspath=None): + """ + Arguments: + self: nodes.Starred + node: TODO + context: TODO + asspath: TODO + """ stmt = self.statement() if not isinstance(stmt, (nodes.Assign, nodes.For)): - raise exceptions.InferenceError() + raise exceptions.InferenceError('Statement {stmt!r} enclosing {node!r} ' + 'must be an Assign or For node.', + node=self, stmt=stmt, unknown=node, + context=context) if isinstance(stmt, nodes.Assign): value = stmt.value lhs = stmt.targets[0] if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1: - # Too many starred arguments in the expression. - raise exceptions.InferenceError() + raise exceptions.InferenceError('Too many starred arguments in the ' + ' assignment targets {lhs!r}.', + node=self, targets=lhs, + unknown=node, context=context) if context is None: context = contextmod.InferenceContext() try: rhs = next(value.infer(context)) except exceptions.InferenceError: - yield util.YES + yield util.Uninferable return - if rhs is util.YES or not hasattr(rhs, 'elts'): + if rhs is util.Uninferable or not hasattr(rhs, 'elts'): # Not interested in inferred values without elts. - yield util.YES + yield util.Uninferable return elts = collections.deque(rhs.elts[:]) if len(lhs.elts) > len(rhs.elts): - # a, *b, c = (1, 2) - raise exceptions.InferenceError() + raise exceptions.InferenceError('More targets, {targets!r}, than ' + 'values to unpack, {values!r}.', + node=self, targets=lhs, + values=rhs, unknown=node, + context=context) # Unpack iteratively the values from the rhs of the assignment, # until the find the starred node. What will remain will # be the list of values which the Starred node will represent # This is done in two steps, from left to right to remove # anything before the starred node and from right to left - # to remvoe anything after the starred node. + # to remove anything after the starred node. - for index, node in enumerate(lhs.elts): - if not isinstance(node, nodes.Starred): + for index, left_node in enumerate(lhs.elts): + if not isinstance(left_node, nodes.Starred): elts.popleft() continue lhs_elts = collections.deque(reversed(lhs.elts[index:])) - for node in lhs_elts: - if not isinstance(node, nodes.Starred): + for right_node in lhs_elts: + if not isinstance(right_node, nodes.Starred): elts.pop() continue # We're done diff -Nru astroid-1.4.9/astroid/raw_building.py astroid-1.5.3/astroid/raw_building.py --- astroid-1.4.9/astroid/raw_building.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/raw_building.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,47 +1,42 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """this module contains a set of functions to create astroid trees from scratch (build_* functions) or from living object (object_build_* functions) """ -import sys +import inspect +import logging import os -from os.path import abspath -from inspect import (getargspec, isdatadescriptor, isfunction, ismethod, - ismethoddescriptor, isclass, isbuiltin, ismodule) +import sys +import types + import six -from astroid.node_classes import CONST_CLS -from astroid.nodes import (Module, Class, Const, const_factory, From, - Function, EmptyNode, Name, Arguments) -from astroid.bases import BUILTINS, Generator -from astroid.manager import AstroidManager -MANAGER = AstroidManager() +from astroid import bases +from astroid import manager +from astroid import node_classes +from astroid import nodes -_CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types + +MANAGER = manager.AstroidManager() +# the keys of CONST_CLS eg python builtin types +_CONSTANTS = tuple(node_classes.CONST_CLS) _JYTHON = os.name == 'java' _BUILTINS = vars(six.moves.builtins) +_LOG = logging.getLogger(__name__) + def _io_discrepancy(member): # _io module names itself `io`: http://bugs.python.org/issue18602 member_self = getattr(member, '__self__', None) return (member_self and - ismodule(member_self) and + inspect.ismodule(member_self) and member_self.__name__ == '_io' and member.__module__ == 'io') @@ -56,72 +51,75 @@ cls_name = getattr(python_cls, '__name__', None) if not cls_name: return - bases = [ancestor.__name__ for ancestor in python_cls.__bases__] - ast_klass = build_class(cls_name, bases, python_cls.__doc__) - func._instance_attrs['__class__'] = [ast_klass] + cls_bases = [ancestor.__name__ for ancestor in python_cls.__bases__] + ast_klass = build_class(cls_name, cls_bases, python_cls.__doc__) + func.instance_attrs['__class__'] = [ast_klass] _marker = object() -def attach_dummy_node(node, name, object=_marker): + +def attach_dummy_node(node, name, runtime_object=_marker): """create a dummy node and register it in the locals of the given node with the specified name """ - enode = EmptyNode() - enode.object = object + enode = nodes.EmptyNode() + enode.object = runtime_object _attach_local_node(node, enode, name) def _has_underlying_object(self): - return hasattr(self, 'object') and self.object is not _marker + return self.object is not None and self.object is not _marker -EmptyNode.has_underlying_object = _has_underlying_object +nodes.EmptyNode.has_underlying_object = _has_underlying_object def attach_const_node(node, name, value): """create a Const node and register it in the locals of the given node with the specified name """ - if not name in node.special_attributes: - _attach_local_node(node, const_factory(value), name) + if name not in node.special_attributes: + _attach_local_node(node, nodes.const_factory(value), name) def attach_import_node(node, modname, membername): - """create a From node and register it in the locals of the given + """create a ImportFrom node and register it in the locals of the given node with the specified name """ - from_node = From(modname, [(membername, None)]) + from_node = nodes.ImportFrom(modname, [(membername, None)]) _attach_local_node(node, from_node, membername) def build_module(name, doc=None): """create and initialize a astroid Module node""" - node = Module(name, doc, pure_python=False) + node = nodes.Module(name, doc, pure_python=False) node.package = False node.parent = None return node + def build_class(name, basenames=(), doc=None): - """create and initialize a astroid Class node""" - node = Class(name, doc) + """create and initialize a astroid ClassDef node""" + node = nodes.ClassDef(name, doc) for base in basenames: - basenode = Name() + basenode = nodes.Name() basenode.name = base node.bases.append(basenode) basenode.parent = node return node -def build_function(name, args=None, defaults=None, flag=0, doc=None): - """create and initialize a astroid Function node""" + +def build_function(name, args=None, defaults=None, doc=None): + """create and initialize a astroid FunctionDef node""" args, defaults = args or [], defaults or [] # first argument is now a list of decorators - func = Function(name, doc) - func.args = argsnode = Arguments() + func = nodes.FunctionDef(name, doc) + func.args = argsnode = nodes.Arguments() argsnode.args = [] for arg in args: - argsnode.args.append(Name()) + argsnode.args.append(nodes.Name()) argsnode.args[-1].name = arg argsnode.args[-1].parent = argsnode argsnode.defaults = [] for default in defaults: - argsnode.defaults.append(const_factory(default)) + argsnode.defaults.append(nodes.const_factory(default)) argsnode.defaults[-1].parent = argsnode argsnode.kwarg = None argsnode.vararg = None @@ -132,8 +130,8 @@ def build_from_import(fromname, names): - """create and initialize an astroid From import statement""" - return From(fromname, [(name, None) for name in names]) + """create and initialize an astroid ImportFrom import statement""" + return nodes.ImportFrom(fromname, [(name, None) for name in names]) def register_arguments(func, args=None): """add given arguments to local @@ -148,32 +146,37 @@ if func.args.kwarg: func.set_local(func.args.kwarg, func.args) for arg in args: - if isinstance(arg, Name): + if isinstance(arg, nodes.Name): func.set_local(arg.name, arg) else: register_arguments(func, arg.elts) + def object_build_class(node, member, localname): """create astroid for a living class object""" basenames = [base.__name__ for base in member.__bases__] return _base_class_object_build(node, member, basenames, localname=localname) + def object_build_function(node, member, localname): """create astroid for a living function object""" - args, varargs, varkw, defaults = getargspec(member) + # pylint: disable=deprecated-method; completely removed in 2.0 + args, varargs, varkw, defaults = inspect.getargspec(member) if varargs is not None: args.append(varargs) if varkw is not None: args.append(varkw) func = build_function(getattr(member, '__name__', None) or localname, args, - defaults, six.get_function_code(member).co_flags, member.__doc__) + defaults, member.__doc__) node.add_local_node(func, localname) + def object_build_datadescriptor(node, member, name): """create astroid for a living data descriptor object""" return _base_class_object_build(node, member, [], name) + def object_build_methoddescriptor(node, member, localname): """create astroid for a living method descriptor object""" # FIXME get arguments ? @@ -185,6 +188,7 @@ node.add_local_node(func, localname) _add_dunder_class(func, member) + def _base_class_object_build(node, member, basenames, name=None, localname=None): """create astroid for a living class object, with a given set of base names (e.g. ancestors) @@ -202,25 +206,41 @@ instdict = member().__dict__ else: raise TypeError - except: + except: # pylint: disable=bare-except pass else: - for name, obj in instdict.items(): - valnode = EmptyNode() + for item_name, obj in instdict.items(): + valnode = nodes.EmptyNode() valnode.object = obj valnode.parent = klass valnode.lineno = 1 - klass._instance_attrs[name] = [valnode] + klass.instance_attrs[item_name] = [valnode] return klass +def _build_from_function(node, name, member, module): + # verify this is not an imported function + try: + code = six.get_function_code(member) + except AttributeError: + # Some implementations don't provide the code object, + # such as Jython. + code = None + filename = getattr(code, 'co_filename', None) + if filename is None: + assert isinstance(member, object) + object_build_methoddescriptor(node, member, name) + elif filename != getattr(module, '__file__', None): + attach_dummy_node(node, name, member) + else: + object_build_function(node, member, name) class InspectBuilder(object): """class for building nodes from living object this is actually a really minimal representation, including only Module, - Function and Class nodes and some others as guessed. + FunctionDef and ClassDef nodes and some others as guessed. """ # astroid from living objects ############################################### @@ -242,7 +262,7 @@ except AttributeError: # in jython, java modules have no __doc__ (see #109562) node = build_module(modname) - node.source_file = path and abspath(path) or path + node.file = node.path = os.path.abspath(path) if path else path node.name = modname MANAGER.cache_module(node) node.package = hasattr(module, '__path__') @@ -264,30 +284,21 @@ # damned ExtensionClass.Base, I know you're there ! attach_dummy_node(node, name) continue - if ismethod(member): + if inspect.ismethod(member): member = six.get_method_function(member) - if isfunction(member): - # verify this is not an imported function - filename = getattr(six.get_function_code(member), - 'co_filename', None) - if filename is None: - assert isinstance(member, object) - object_build_methoddescriptor(node, member, name) - elif filename != getattr(self._module, '__file__', None): - attach_dummy_node(node, name, member) - else: - object_build_function(node, member, name) - elif isbuiltin(member): + if inspect.isfunction(member): + _build_from_function(node, name, member, self._module) + elif inspect.isbuiltin(member): if (not _io_discrepancy(member) and self.imported_member(node, member, name)): continue object_build_methoddescriptor(node, member, name) - elif isclass(member): + elif inspect.isclass(member): if self.imported_member(node, member, name): continue if member in self._done: class_node = self._done[member] - if not class_node in node._locals.get(name, ()): + if class_node not in node.locals.get(name, ()): node.add_local_node(class_node, name) else: class_node = object_build_class(node, member, name) @@ -295,14 +306,18 @@ self.object_build(class_node, member) if name == '__class__' and class_node.parent is None: class_node.parent = self._done[self._module] - elif ismethoddescriptor(member): + elif inspect.ismethoddescriptor(member): assert isinstance(member, object) object_build_methoddescriptor(node, member, name) - elif isdatadescriptor(member): + elif inspect.isdatadescriptor(member): assert isinstance(member, object) object_build_datadescriptor(node, member, name) - elif type(member) in _CONSTANTS: + elif isinstance(member, _CONSTANTS): attach_const_node(node, name, member) + elif inspect.isroutine(member): + # This should be called for Jython, where some builtin + # methods aren't caught by isbuiltin branch. + _build_from_function(node, name, member, self._module) else: # create an empty node so that the name is actually defined attach_dummy_node(node, name, member) @@ -314,11 +329,9 @@ # (see http://www.logilab.org/ticket/57299 for instance) try: modname = getattr(member, '__module__', None) - except: - # XXX use logging - print('unexpected error while building astroid from living object') - import traceback - traceback.print_exc() + except: # pylint: disable=bare-except + _LOG.exception('unexpected error while building ' + 'astroid from living object') modname = None if modname is None: if (name in ('__new__', '__subclasshook__') @@ -326,7 +339,7 @@ # Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14) # >>> print object.__new__.__module__ # None - modname = BUILTINS + modname = six.moves.builtins.__name__ else: attach_dummy_node(node, name, member) return True @@ -361,7 +374,8 @@ from six.moves import builtins astroid_builtin = Astroid_BUILDER.inspect_build(builtins) - for cls, node_cls in CONST_CLS.items(): + # pylint: disable=redefined-outer-name + for cls, node_cls in node_classes.CONST_CLS.items(): if cls is type(None): proxy = build_class('NoneType') proxy.parent = astroid_builtin @@ -382,9 +396,21 @@ # infinite recursion (see https://bugs.launchpad.net/pylint/+bug/456870) def _set_proxied(const): return _CONST_PROXY[const.value.__class__] -Const._proxied = property(_set_proxied) - -from types import GeneratorType -Generator._proxied = Class(GeneratorType.__name__, GeneratorType.__doc__) -Astroid_BUILDER.object_build(Generator._proxied, GeneratorType) +nodes.Const._proxied = property(_set_proxied) +_GeneratorType = nodes.ClassDef(types.GeneratorType.__name__, types.GeneratorType.__doc__) +_GeneratorType.parent = MANAGER.astroid_cache[six.moves.builtins.__name__] +bases.Generator._proxied = _GeneratorType +Astroid_BUILDER.object_build(bases.Generator._proxied, types.GeneratorType) + +_builtins = MANAGER.astroid_cache[six.moves.builtins.__name__] +BUILTIN_TYPES = (types.GetSetDescriptorType, types.GeneratorType, + types.MemberDescriptorType, type(None), type(NotImplemented), + types.FunctionType, types.MethodType, + types.BuiltinFunctionType, types.ModuleType, types.TracebackType) +for _type in BUILTIN_TYPES: + if _type.__name__ not in _builtins: + cls = nodes.ClassDef(_type.__name__, _type.__doc__) + cls.parent = MANAGER.astroid_cache[six.moves.builtins.__name__] + Astroid_BUILDER.object_build(cls, _type) + _builtins[_type.__name__] = cls diff -Nru astroid-1.4.9/astroid/rebuilder.py astroid-1.5.3/astroid/rebuilder.py --- astroid-1.4.9/astroid/rebuilder.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/rebuilder.py 2017-04-12 13:52:07.000000000 +0000 @@ -1,79 +1,60 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2009-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2013-2016 Claudiu Popa +# Copyright (c) 2013-2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """this module contains utilities for rebuilding a _ast tree in order to get a single Astroid representation """ import sys import _ast -from _ast import ( - # binary operators - Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor, - LShift, RShift, - # logical operators - And, Or, - # unary operators - UAdd, USub, Not, Invert, - # comparison operators - Eq, Gt, GtE, In, Is, IsNot, Lt, LtE, NotEq, NotIn, - ) -from astroid import nodes as new +import astroid from astroid import astpeephole +from astroid import nodes + -_BIN_OP_CLASSES = {Add: '+', - BitAnd: '&', - BitOr: '|', - BitXor: '^', - Div: '/', - FloorDiv: '//', - Mod: '%', - Mult: '*', - Pow: '**', - Sub: '-', - LShift: '<<', - RShift: '>>', +_BIN_OP_CLASSES = {_ast.Add: '+', + _ast.BitAnd: '&', + _ast.BitOr: '|', + _ast.BitXor: '^', + _ast.Div: '/', + _ast.FloorDiv: '//', + _ast.Mod: '%', + _ast.Mult: '*', + _ast.Pow: '**', + _ast.Sub: '-', + _ast.LShift: '<<', + _ast.RShift: '>>', } if sys.version_info >= (3, 5): - from _ast import MatMult - _BIN_OP_CLASSES[MatMult] = '@' + _BIN_OP_CLASSES[_ast.MatMult] = '@' -_BOOL_OP_CLASSES = {And: 'and', - Or: 'or', +_BOOL_OP_CLASSES = {_ast.And: 'and', + _ast.Or: 'or', } -_UNARY_OP_CLASSES = {UAdd: '+', - USub: '-', - Not: 'not', - Invert: '~', +_UNARY_OP_CLASSES = {_ast.UAdd: '+', + _ast.USub: '-', + _ast.Not: 'not', + _ast.Invert: '~', } -_CMP_OP_CLASSES = {Eq: '==', - Gt: '>', - GtE: '>=', - In: 'in', - Is: 'is', - IsNot: 'is not', - Lt: '<', - LtE: '<=', - NotEq: '!=', - NotIn: 'not in', +_CMP_OP_CLASSES = {_ast.Eq: '==', + _ast.Gt: '>', + _ast.GtE: '>=', + _ast.In: 'in', + _ast.Is: 'is', + _ast.IsNot: 'is not', + _ast.Lt: '<', + _ast.LtE: '<=', + _ast.NotEq: '!=', + _ast.NotIn: 'not in', } CONST_NAME_TRANSFORMS = {'None': None, @@ -88,39 +69,25 @@ 'excepthandler': 'ExceptHandler', 'keyword': 'Keyword', } -PY3K = sys.version_info >= (3, 0) +PY3 = sys.version_info >= (3, 0) PY34 = sys.version_info >= (3, 4) +CONTEXTS = {_ast.Load: astroid.Load, + _ast.Store: astroid.Store, + _ast.Del: astroid.Del, + _ast.Param: astroid.Store} -def _init_set_doc(node, newnode): - newnode.doc = None + +def _get_doc(node): try: if isinstance(node.body[0], _ast.Expr) and isinstance(node.body[0].value, _ast.Str): - newnode.doc = node.body[0].value.s + doc = node.body[0].value.s node.body = node.body[1:] - + return node, doc except IndexError: pass # ast built from scratch + return node, None -def _lineno_parent(oldnode, newnode, parent): - newnode.parent = parent - newnode.lineno = oldnode.lineno - newnode.col_offset = oldnode.col_offset - -def _set_infos(oldnode, newnode, parent): - newnode.parent = parent - if hasattr(oldnode, 'lineno'): - newnode.lineno = oldnode.lineno - if hasattr(oldnode, 'col_offset'): - newnode.col_offset = oldnode.col_offset - -def _create_yield_node(node, parent, rebuilder, factory): - newnode = factory() - _lineno_parent(node, newnode, parent) - if node.value is not None: - newnode.value = rebuilder.visit(node.value, newnode, None) - return newnode - -def _visit_or_none(node, attr, visitor, parent, assign_ctx, visit='visit', +def _visit_or_none(node, attr, visitor, parent, visit='visit', **kws): """If the given node has an attribute, visits the attribute, and otherwise returns None. @@ -128,9 +95,13 @@ """ value = getattr(node, attr, None) if value: - return getattr(visitor, visit)(value, parent, assign_ctx, **kws) - else: - return None + return getattr(visitor, visit)(value, parent, **kws) + + return None + + +def _get_context(node): + return CONTEXTS.get(type(node.ctx), astroid.Load) class TreeRebuilder(object): @@ -138,7 +109,6 @@ def __init__(self, manager): self._manager = manager - self.asscontext = None self._global_names = [] self._import_from_nodes = [] self._delayed_assattr = [] @@ -147,15 +117,13 @@ def visit_module(self, node, modname, modpath, package): """visit a Module node by returning a fresh instance of it""" - newnode = new.Module(modname, None) - newnode.package = package - newnode.parent = None - _init_set_doc(node, newnode) - newnode.body = [self.visit(child, newnode) for child in node.body] - newnode.source_file = modpath + node, doc = _get_doc(node) + newnode = nodes.Module(name=modname, doc=doc, file=modpath, path=modpath, + package=package, parent=None) + newnode.postinit([self.visit(child, newnode) for child in node.body]) return newnode - def visit(self, node, parent, assign_ctx=None): + def visit(self, node, parent): cls = node.__class__ if cls in self._visit_meths: visit_method = self._visit_meths[cls] @@ -164,7 +132,7 @@ visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower() visit_method = getattr(self, visit_name) self._visit_meths[cls] = visit_method - return visit_method(node, parent, assign_ctx) + return visit_method(node, parent) def _save_assignment(self, node, name=None): """save assignement situation since node.parent is not available yet""" @@ -173,40 +141,68 @@ else: node.parent.set_local(node.name, node) - def visit_arguments(self, node, parent, assign_ctx=None): + def visit_arguments(self, node, parent): """visit a Arguments node by returning a fresh instance of it""" - newnode = new.Arguments() - newnode.parent = parent - newnode.args = [self.visit(child, newnode, "Assign") - for child in node.args] - newnode.defaults = [self.visit(child, newnode, assign_ctx) - for child in node.defaults] - newnode.kwonlyargs = [] - newnode.kw_defaults = [] vararg, kwarg = node.vararg, node.kwarg + if PY34: + newnode = nodes.Arguments(vararg.arg if vararg else None, + kwarg.arg if kwarg else None, + parent) + else: + newnode = nodes.Arguments(vararg, kwarg, parent) + args = [self.visit(child, newnode) for child in node.args] + defaults = [self.visit(child, newnode) + for child in node.defaults] + varargannotation = None + kwargannotation = None # change added in 82732 (7c5c678e4164), vararg and kwarg # are instances of `_ast.arg`, not strings if vararg: if PY34: - if vararg.annotation: - newnode.varargannotation = self.visit(vararg.annotation, - newnode, assign_ctx) + if node.vararg.annotation: + varargannotation = self.visit(node.vararg.annotation, + newnode) vararg = vararg.arg - elif PY3K and node.varargannotation: - newnode.varargannotation = self.visit(node.varargannotation, - newnode, assign_ctx) + elif PY3 and node.varargannotation: + varargannotation = self.visit(node.varargannotation, + newnode) if kwarg: if PY34: - if kwarg.annotation: - newnode.kwargannotation = self.visit(kwarg.annotation, - newnode, assign_ctx) + if node.kwarg.annotation: + kwargannotation = self.visit(node.kwarg.annotation, + newnode) kwarg = kwarg.arg - elif PY3K: + elif PY3: if node.kwargannotation: - newnode.kwargannotation = self.visit(node.kwargannotation, - newnode, assign_ctx) - newnode.vararg = vararg - newnode.kwarg = kwarg + kwargannotation = self.visit(node.kwargannotation, + newnode) + if PY3: + kwonlyargs = [self.visit(child, newnode) for child + in node.kwonlyargs] + kw_defaults = [self.visit(child, newnode) if child else + None for child in node.kw_defaults] + annotations = [self.visit(arg.annotation, newnode) if + arg.annotation else None for arg in node.args] + kwonlyargs_annotations = [ + self.visit(arg.annotation, newnode) if arg.annotation else None + for arg in node.kwonlyargs + ] + else: + kwonlyargs = [] + kw_defaults = [] + annotations = [] + kwonlyargs_annotations = [] + + newnode.postinit( + args=args, + defaults=defaults, + kwonlyargs=kwonlyargs, + kw_defaults=kw_defaults, + annotations=annotations, + kwonlyargs_annotations=kwonlyargs_annotations, + varargannotation=varargannotation, + kwargannotation=kwargannotation + ) # save argument names in locals: if vararg: newnode.parent.set_local(vararg, newnode) @@ -214,57 +210,46 @@ newnode.parent.set_local(kwarg, newnode) return newnode - def visit_assignattr(self, node, parent, assign_ctx=None): - """visit a AssAttr node by returning a fresh instance of it""" - newnode = new.AssignAttr() - _lineno_parent(node, newnode, parent) - newnode.expr = self.visit(node.expr, newnode, assign_ctx) - self._delayed_assattr.append(newnode) - return newnode - - def visit_assert(self, node, parent, assign_ctx=None): + def visit_assert(self, node, parent): """visit a Assert node by returning a fresh instance of it""" - newnode = new.Assert() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode, assign_ctx) - if node.msg is not None: - newnode.fail = self.visit(node.msg, newnode, assign_ctx) + newnode = nodes.Assert(node.lineno, node.col_offset, parent) + if node.msg: + msg = self.visit(node.msg, newnode) + else: + msg = None + newnode.postinit(self.visit(node.test, newnode), msg) return newnode - def visit_assign(self, node, parent, assign_ctx=None): + def visit_assign(self, node, parent): """visit a Assign node by returning a fresh instance of it""" - newnode = new.Assign() - _lineno_parent(node, newnode, parent) - newnode.targets = [self.visit(child, newnode, "Assign") - for child in node.targets] - newnode.value = self.visit(node.value, newnode, None) + newnode = nodes.Assign(node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.targets], + self.visit(node.value, newnode)) return newnode - def visit_assignname(self, node, parent, assign_ctx=None, node_name=None): - '''visit a node and return a AssName node''' - newnode = new.AssignName() - _set_infos(node, newnode, parent) - newnode.name = node_name + def visit_assignname(self, node, parent, node_name=None): + '''visit a node and return a AssignName node''' + newnode = nodes.AssignName(node_name, getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) self._save_assignment(newnode) return newnode - def visit_augassign(self, node, parent, assign_ctx=None): + def visit_augassign(self, node, parent): """visit a AugAssign node by returning a fresh instance of it""" - newnode = new.AugAssign() - _lineno_parent(node, newnode, parent) - newnode.op = _BIN_OP_CLASSES[node.op.__class__] + "=" - newnode.target = self.visit(node.target, newnode, "Assign") - newnode.value = self.visit(node.value, newnode, None) + newnode = nodes.AugAssign(_BIN_OP_CLASSES[type(node.op)] + "=", + node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.target, newnode), + self.visit(node.value, newnode)) return newnode - def visit_repr(self, node, parent, assign_ctx=None): + def visit_repr(self, node, parent): """visit a Backquote node by returning a fresh instance of it""" - newnode = new.Repr() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode, assign_ctx) + newnode = nodes.Repr(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_binop(self, node, parent, assign_ctx=None): + def visit_binop(self, node, parent): """visit a BinOp node by returning a fresh instance of it""" if isinstance(node.left, _ast.BinOp) and self._manager.optimize_ast: # Optimize BinOp operations in order to remove @@ -279,711 +264,651 @@ # problem for the correctness of the program). # # ("a" + "b" + # one thousand more + "c") - optimized = self._peepholer.optimize_binop(node) + optimized = self._peepholer.optimize_binop(node, parent) if optimized: - _lineno_parent(node, optimized, parent) return optimized - newnode = new.BinOp() - _lineno_parent(node, newnode, parent) - newnode.left = self.visit(node.left, newnode, assign_ctx) - newnode.right = self.visit(node.right, newnode, assign_ctx) - newnode.op = _BIN_OP_CLASSES[node.op.__class__] + newnode = nodes.BinOp(_BIN_OP_CLASSES[type(node.op)], + node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.left, newnode), + self.visit(node.right, newnode)) return newnode - def visit_boolop(self, node, parent, assign_ctx=None): + def visit_boolop(self, node, parent): """visit a BoolOp node by returning a fresh instance of it""" - newnode = new.BoolOp() - _lineno_parent(node, newnode, parent) - newnode.values = [self.visit(child, newnode, assign_ctx) - for child in node.values] - newnode.op = _BOOL_OP_CLASSES[node.op.__class__] + newnode = nodes.BoolOp(_BOOL_OP_CLASSES[type(node.op)], + node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.values]) return newnode - def visit_break(self, node, parent, assign_ctx=None): + def visit_break(self, node, parent): """visit a Break node by returning a fresh instance of it""" - newnode = new.Break() - _set_infos(node, newnode, parent) - return newnode + return nodes.Break(getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), + parent) - def visit_call(self, node, parent, assign_ctx=None): + def visit_call(self, node, parent): """visit a CallFunc node by returning a fresh instance of it""" - newnode = new.Call() - _lineno_parent(node, newnode, parent) - newnode.func = self.visit(node.func, newnode, assign_ctx) - args = [self.visit(child, newnode, assign_ctx) + newnode = nodes.Call(node.lineno, node.col_offset, parent) + starargs = _visit_or_none(node, 'starargs', self, newnode) + kwargs = _visit_or_none(node, 'kwargs', self, newnode) + args = [self.visit(child, newnode) for child in node.args] - starargs = _visit_or_none(node, 'starargs', self, newnode, - assign_ctx) - kwargs = _visit_or_none(node, 'kwargs', self, newnode, - assign_ctx) - keywords = None if node.keywords: - keywords = [self.visit(child, newnode, assign_ctx) + keywords = [self.visit(child, newnode) for child in node.keywords] - + else: + keywords = None if starargs: - new_starargs = new.Starred() - new_starargs.col_offset = starargs.col_offset - new_starargs.lineno = starargs.lineno - new_starargs.parent = starargs.parent - new_starargs.value = starargs + new_starargs = nodes.Starred(col_offset=starargs.col_offset, + lineno=starargs.lineno, + parent=starargs.parent) + new_starargs.postinit(value=starargs) args.append(new_starargs) if kwargs: - new_kwargs = new.Keyword() - new_kwargs.arg = None - new_kwargs.col_offset = kwargs.col_offset - new_kwargs.lineno = kwargs.lineno - new_kwargs.parent = kwargs.parent - new_kwargs.value = kwargs + new_kwargs = nodes.Keyword(arg=None, col_offset=kwargs.col_offset, + lineno=kwargs.lineno, + parent=kwargs.parent) + new_kwargs.postinit(value=kwargs) if keywords: keywords.append(new_kwargs) else: keywords = [new_kwargs] - newnode.args = args - newnode.keywords = keywords + newnode.postinit(self.visit(node.func, newnode), + args, keywords) return newnode - def visit_classdef(self, node, parent, assign_ctx=None): - """visit a Class node to become astroid""" - newnode = new.ClassDef(node.name, None) - _lineno_parent(node, newnode, parent) - _init_set_doc(node, newnode) - newnode.bases = [self.visit(child, newnode, assign_ctx) - for child in node.bases] - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] + def visit_classdef(self, node, parent, newstyle=None): + """visit a ClassDef node to become astroid""" + node, doc = _get_doc(node) + newnode = nodes.ClassDef(node.name, doc, node.lineno, + node.col_offset, parent) + metaclass = None + if PY3: + for keyword in node.keywords: + if keyword.arg == 'metaclass': + metaclass = self.visit(keyword, newnode).value + break if node.decorator_list: - newnode.decorators = self.visit_decorators(node, newnode, assign_ctx) - newnode.parent.frame().set_local(newnode.name, newnode) + decorators = self.visit_decorators(node, newnode) + else: + decorators = None + newnode.postinit([self.visit(child, newnode) + for child in node.bases], + [self.visit(child, newnode) + for child in node.body], + decorators, newstyle, metaclass, + [self.visit(kwd, newnode) for kwd in node.keywords + if kwd.arg != 'metaclass'] if PY3 else []) return newnode - def visit_const(self, node, parent, assign_ctx=None): + def visit_const(self, node, parent): """visit a Const node by returning a fresh instance of it""" - newnode = new.Const(node.value) - _set_infos(node, newnode, parent) - return newnode + return nodes.Const(node.value, + getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) - def visit_continue(self, node, parent, assign_ctx=None): + def visit_continue(self, node, parent): """visit a Continue node by returning a fresh instance of it""" - newnode = new.Continue() - _set_infos(node, newnode, parent) - return newnode + return nodes.Continue(getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), + parent) - def visit_compare(self, node, parent, assign_ctx=None): + def visit_compare(self, node, parent): """visit a Compare node by returning a fresh instance of it""" - newnode = new.Compare() - _lineno_parent(node, newnode, parent) - newnode.left = self.visit(node.left, newnode, assign_ctx) - newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode, assign_ctx)) - for (op, expr) in zip(node.ops, node.comparators)] + newnode = nodes.Compare(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.left, newnode), + [(_CMP_OP_CLASSES[op.__class__], + self.visit(expr, newnode)) + for (op, expr) in zip(node.ops, node.comparators)]) return newnode - def visit_comprehension(self, node, parent, assign_ctx=None): + def visit_comprehension(self, node, parent): """visit a Comprehension node by returning a fresh instance of it""" - newnode = new.Comprehension() - newnode.parent = parent - newnode.target = self.visit(node.target, newnode, 'Assign') - newnode.iter = self.visit(node.iter, newnode, None) - newnode.ifs = [self.visit(child, newnode, None) - for child in node.ifs] + newnode = nodes.Comprehension(parent) + newnode.postinit(self.visit(node.target, newnode), + self.visit(node.iter, newnode), + [self.visit(child, newnode) + for child in node.ifs], + getattr(node, 'is_async', None)) return newnode - def visit_decorators(self, node, parent, assign_ctx=None): + def visit_decorators(self, node, parent): """visit a Decorators node by returning a fresh instance of it""" - # /!\ node is actually a _ast.Function node while - # parent is a astroid.nodes.Function node - newnode = new.Decorators() - _lineno_parent(node, newnode, parent) - decorators = node.decorator_list - newnode.nodes = [self.visit(child, newnode, assign_ctx) - for child in decorators] + # /!\ node is actually a _ast.FunctionDef node while + # parent is a astroid.nodes.FunctionDef node + newnode = nodes.Decorators(node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.decorator_list]) return newnode - def visit_delete(self, node, parent, assign_ctx=None): + def visit_delete(self, node, parent): """visit a Delete node by returning a fresh instance of it""" - newnode = new.Delete() - _lineno_parent(node, newnode, parent) - newnode.targets = [self.visit(child, newnode, 'Del') - for child in node.targets] + newnode = nodes.Delete(node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.targets]) return newnode - def _visit_dict_items(self, node, parent, newnode, assign_ctx): + def _visit_dict_items(self, node, parent, newnode): for key, value in zip(node.keys, node.values): - rebuilt_value = self.visit(value, newnode, assign_ctx) + rebuilt_value = self.visit(value, newnode) if not key: # Python 3.5 and extended unpacking - rebuilt_key = new.DictUnpack() - rebuilt_key.lineno = rebuilt_value.lineno - rebuilt_key.col_offset = rebuilt_value.col_offset - rebuilt_key.parent = rebuilt_value.parent + rebuilt_key = nodes.DictUnpack(rebuilt_value.lineno, + rebuilt_value.col_offset, + parent) else: - rebuilt_key = self.visit(key, newnode, assign_ctx) + rebuilt_key = self.visit(key, newnode) yield rebuilt_key, rebuilt_value - def visit_dict(self, node, parent, assign_ctx=None): + def visit_dict(self, node, parent): """visit a Dict node by returning a fresh instance of it""" - newnode = new.Dict() - _lineno_parent(node, newnode, parent) - newnode.items = list(self._visit_dict_items(node, parent, newnode, assign_ctx)) + newnode = nodes.Dict(node.lineno, node.col_offset, parent) + items = list(self._visit_dict_items(node, parent, newnode)) + newnode.postinit(items) return newnode - def visit_dictcomp(self, node, parent, assign_ctx=None): + def visit_dictcomp(self, node, parent): """visit a DictComp node by returning a fresh instance of it""" - newnode = new.DictComp() - _lineno_parent(node, newnode, parent) - newnode.key = self.visit(node.key, newnode, assign_ctx) - newnode.value = self.visit(node.value, newnode, assign_ctx) - newnode.generators = [self.visit(child, newnode, assign_ctx) - for child in node.generators] - return newnode - - def visit_expr(self, node, parent, assign_ctx=None): - """visit a Discard node by returning a fresh instance of it""" - newnode = new.Expr() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode, assign_ctx) + newnode = nodes.DictComp(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.key, newnode), + self.visit(node.value, newnode), + [self.visit(child, newnode) + for child in node.generators]) return newnode - def visit_ellipsis(self, node, parent, assign_ctx=None): - """visit an Ellipsis node by returning a fresh instance of it""" - newnode = new.Ellipsis() - _set_infos(node, newnode, parent) + def visit_expr(self, node, parent): + """visit a Expr node by returning a fresh instance of it""" + newnode = nodes.Expr(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_emptynode(self, node, parent, assign_ctx=None): + def visit_ellipsis(self, node, parent): + """visit an Ellipsis node by returning a fresh instance of it""" + return nodes.Ellipsis(getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) + + + def visit_emptynode(self, node, parent): """visit an EmptyNode node by returning a fresh instance of it""" - newnode = new.EmptyNode() - _set_infos(node, newnode, parent) - return newnode + return nodes.EmptyNode(getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) - def visit_excepthandler(self, node, parent, assign_ctx=None): + + def visit_excepthandler(self, node, parent): """visit an ExceptHandler node by returning a fresh instance of it""" - newnode = new.ExceptHandler() - _lineno_parent(node, newnode, parent) - if node.type is not None: - newnode.type = self.visit(node.type, newnode, assign_ctx) - if node.name is not None: - # /!\ node.name can be a tuple - newnode.name = self.visit(node.name, newnode, 'Assign') - newnode.body = [self.visit(child, newnode, None) - for child in node.body] + newnode = nodes.ExceptHandler(node.lineno, node.col_offset, parent) + # /!\ node.name can be a tuple + newnode.postinit(_visit_or_none(node, 'type', self, newnode), + _visit_or_none(node, 'name', self, newnode), + [self.visit(child, newnode) + for child in node.body]) return newnode - def visit_exec(self, node, parent, assign_ctx=None): + def visit_exec(self, node, parent): """visit an Exec node by returning a fresh instance of it""" - newnode = new.Exec() - _lineno_parent(node, newnode, parent) - newnode.expr = self.visit(node.body, newnode) - if node.globals is not None: - newnode.globals = self.visit(node.globals, newnode, - assign_ctx) - if node.locals is not None: - newnode.locals = self.visit(node.locals, newnode, - assign_ctx) + newnode = nodes.Exec(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.body, newnode), + _visit_or_none(node, 'globals', self, newnode), + _visit_or_none(node, 'locals', self, newnode)) return newnode - def visit_extslice(self, node, parent, assign_ctx=None): + def visit_extslice(self, node, parent): """visit an ExtSlice node by returning a fresh instance of it""" - newnode = new.ExtSlice() - newnode.parent = parent - newnode.dims = [self.visit(dim, newnode, assign_ctx) - for dim in node.dims] + newnode = nodes.ExtSlice(parent=parent) + newnode.postinit([self.visit(dim, newnode) + for dim in node.dims]) return newnode - def _visit_for(self, cls, node, parent, assign_ctx=None): + def _visit_for(self, cls, node, parent): """visit a For node by returning a fresh instance of it""" - newnode = cls() - _lineno_parent(node, newnode, parent) - newnode.target = self.visit(node.target, newnode, "Assign") - newnode.iter = self.visit(node.iter, newnode, None) - newnode.body = [self.visit(child, newnode, None) - for child in node.body] - newnode.orelse = [self.visit(child, newnode, None) - for child in node.orelse] + newnode = cls(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.target, newnode), + self.visit(node.iter, newnode), + [self.visit(child, newnode) + for child in node.body], + [self.visit(child, newnode) + for child in node.orelse]) return newnode - def visit_for(self, node, parent, assign_ctx=None): - return self._visit_for(new.For, node, parent, - assign_ctx=assign_ctx) - def visit_importfrom(self, node, parent, assign_ctx=None): - """visit a From node by returning a fresh instance of it""" + def visit_for(self, node, parent): + return self._visit_for(nodes.For, node, parent) + + def visit_importfrom(self, node, parent): + """visit an ImportFrom node by returning a fresh instance of it""" names = [(alias.name, alias.asname) for alias in node.names] - newnode = new.ImportFrom(node.module or '', names, node.level or None) - _set_infos(node, newnode, parent) + newnode = nodes.ImportFrom(node.module or '', names, node.level or None, + getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) # store From names to add them to locals after building self._import_from_nodes.append(newnode) return newnode - def _visit_functiondef(self, cls, node, parent, assign_ctx=None): + def _visit_functiondef(self, cls, node, parent): """visit an FunctionDef node to become astroid""" self._global_names.append({}) - newnode = cls(node.name, None) - _lineno_parent(node, newnode, parent) - _init_set_doc(node, newnode) - newnode.args = self.visit(node.args, newnode, assign_ctx) - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] - decorators = node.decorator_list - if decorators: - newnode.decorators = self.visit_decorators( - node, newnode, assign_ctx) - if PY3K and node.returns: - newnode.returns = self.visit(node.returns, newnode, - assign_ctx) + node, doc = _get_doc(node) + newnode = cls(node.name, doc, node.lineno, + node.col_offset, parent) + if node.decorator_list: + decorators = self.visit_decorators(node, newnode) + else: + decorators = None + if PY3 and node.returns: + returns = self.visit(node.returns, newnode) + else: + returns = None + newnode.postinit(self.visit(node.args, newnode), + [self.visit(child, newnode) + for child in node.body], + decorators, returns) self._global_names.pop() - frame = newnode.parent.frame() - frame.set_local(newnode.name, newnode) return newnode - def visit_functiondef(self, node, parent, assign_ctx=None): - return self._visit_functiondef(new.FunctionDef, node, parent, - assign_ctx=assign_ctx) - - def visit_generatorexp(self, node, parent, assign_ctx=None): - """visit a GenExpr node by returning a fresh instance of it""" - newnode = new.GeneratorExp() - _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode, assign_ctx) - newnode.generators = [self.visit(child, newnode, assign_ctx) - for child in node.generators] - return newnode - - def visit_attribute(self, node, parent, assign_ctx=None): - """visit a Getattr node by returning a fresh instance of it""" - # pylint: disable=redefined-variable-type - if assign_ctx == "Del": + def visit_functiondef(self, node, parent): + return self._visit_functiondef(nodes.FunctionDef, node, parent) + + def visit_generatorexp(self, node, parent): + """visit a GeneratorExp node by returning a fresh instance of it""" + newnode = nodes.GeneratorExp(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.elt, newnode), + [self.visit(child, newnode) + for child in node.generators]) + return newnode + + def visit_attribute(self, node, parent): + """visit an Attribute node by returning a fresh instance of it""" + context = _get_context(node) + if context == astroid.Del: # FIXME : maybe we should reintroduce and visit_delattr ? - # for instance, deactivating asscontext - newnode = new.DelAttr() - elif assign_ctx == "Assign": - # FIXME : maybe we should call visit_assattr ? + # for instance, deactivating assign_ctx + newnode = nodes.DelAttr(node.attr, node.lineno, node.col_offset, + parent) + elif context == astroid.Store: + newnode = nodes.AssignAttr(node.attr, node.lineno, node.col_offset, + parent) # Prohibit a local save if we are in an ExceptHandler. - newnode = new.AssignAttr() - if not isinstance(parent, new.ExceptHandler): + if not isinstance(parent, astroid.ExceptHandler): self._delayed_assattr.append(newnode) else: - newnode = new.Attribute() - _lineno_parent(node, newnode, parent) - newnode.expr = self.visit(node.value, newnode, None) - newnode.attrname = node.attr + newnode = nodes.Attribute(node.attr, node.lineno, node.col_offset, + parent) + newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_global(self, node, parent, assign_ctx=None): - """visit an Global node to become astroid""" - newnode = new.Global(node.names) - _set_infos(node, newnode, parent) + def visit_global(self, node, parent): + """visit a Global node to become astroid""" + newnode = nodes.Global(node.names, getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) if self._global_names: # global at the module level, no effect for name in node.names: self._global_names[-1].setdefault(name, []).append(newnode) return newnode - def visit_if(self, node, parent, assign_ctx=None): - """visit a If node by returning a fresh instance of it""" - newnode = new.If() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode, assign_ctx) - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] - newnode.orelse = [self.visit(child, newnode, assign_ctx) - for child in node.orelse] + def visit_if(self, node, parent): + """visit an If node by returning a fresh instance of it""" + newnode = nodes.If(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.test, newnode), + [self.visit(child, newnode) + for child in node.body], + [self.visit(child, newnode) + for child in node.orelse]) return newnode - def visit_ifexp(self, node, parent, assign_ctx=None): + def visit_ifexp(self, node, parent): """visit a IfExp node by returning a fresh instance of it""" - newnode = new.IfExp() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode, assign_ctx) - newnode.body = self.visit(node.body, newnode, assign_ctx) - newnode.orelse = self.visit(node.orelse, newnode, assign_ctx) + newnode = nodes.IfExp(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.test, newnode), + self.visit(node.body, newnode), + self.visit(node.orelse, newnode)) return newnode - def visit_import(self, node, parent, assign_ctx=None): + def visit_import(self, node, parent): """visit a Import node by returning a fresh instance of it""" - newnode = new.Import() - _set_infos(node, newnode, parent) - newnode.names = [(alias.name, alias.asname) for alias in node.names] + names = [(alias.name, alias.asname) for alias in node.names] + newnode = nodes.Import(names, getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) # save import names in parent's locals: for (name, asname) in newnode.names: name = asname or name - newnode.parent.set_local(name.split('.')[0], newnode) + parent.set_local(name.split('.')[0], newnode) return newnode - def visit_index(self, node, parent, assign_ctx=None): + def visit_index(self, node, parent): """visit a Index node by returning a fresh instance of it""" - newnode = new.Index() - newnode.parent = parent - newnode.value = self.visit(node.value, newnode, assign_ctx) + newnode = nodes.Index(parent=parent) + newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_keyword(self, node, parent, assign_ctx=None): + def visit_keyword(self, node, parent): """visit a Keyword node by returning a fresh instance of it""" - newnode = new.Keyword() - newnode.parent = parent - newnode.arg = node.arg - newnode.value = self.visit(node.value, newnode, assign_ctx) + newnode = nodes.Keyword(node.arg, parent=parent) + newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_lambda(self, node, parent, assign_ctx=None): + def visit_lambda(self, node, parent): """visit a Lambda node by returning a fresh instance of it""" - newnode = new.Lambda() - _lineno_parent(node, newnode, parent) - newnode.args = self.visit(node.args, newnode, assign_ctx) - newnode.body = self.visit(node.body, newnode, assign_ctx) + newnode = nodes.Lambda(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.args, newnode), + self.visit(node.body, newnode)) return newnode - def visit_list(self, node, parent, assign_ctx=None): + def visit_list(self, node, parent): """visit a List node by returning a fresh instance of it""" - newnode = new.List() - _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode, assign_ctx) - for child in node.elts] + context = _get_context(node) + newnode = nodes.List(ctx=context, + lineno=node.lineno, + col_offset=node.col_offset, + parent=parent) + newnode.postinit([self.visit(child, newnode) + for child in node.elts]) return newnode - def visit_listcomp(self, node, parent, assign_ctx=None): + def visit_listcomp(self, node, parent): """visit a ListComp node by returning a fresh instance of it""" - newnode = new.ListComp() - _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode, assign_ctx) - newnode.generators = [self.visit(child, newnode, assign_ctx) - for child in node.generators] + newnode = nodes.ListComp(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.elt, newnode), + [self.visit(child, newnode) + for child in node.generators]) return newnode - def visit_name(self, node, parent, assign_ctx=None): + def visit_name(self, node, parent): """visit a Name node by returning a fresh instance of it""" + context = _get_context(node) # True and False can be assigned to something in py2x, so we have to - # check first the asscontext - # pylint: disable=redefined-variable-type - if assign_ctx == "Del": - newnode = new.DelName() - elif assign_ctx is not None: # Ass - newnode = new.AssName() + # check first the context. + if context == astroid.Del: + newnode = nodes.DelName(node.id, node.lineno, node.col_offset, + parent) + elif context == astroid.Store: + newnode = nodes.AssignName(node.id, node.lineno, node.col_offset, + parent) elif node.id in CONST_NAME_TRANSFORMS: - newnode = new.Const(CONST_NAME_TRANSFORMS[node.id]) - _set_infos(node, newnode, parent) + newnode = nodes.Const(CONST_NAME_TRANSFORMS[node.id], + getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) return newnode else: - newnode = new.Name() - _lineno_parent(node, newnode, parent) - newnode.name = node.id + newnode = nodes.Name(node.id, node.lineno, node.col_offset, parent) # XXX REMOVE me : - if assign_ctx in ('Del', 'Assign'): # 'Aug' ?? + if context in (astroid.Del, astroid.Store): # 'Aug' ?? self._save_assignment(newnode) return newnode - def visit_bytes(self, node, parent, assign_ctx=None): - """visit a Bytes node by returning a fresh instance of Const""" - newnode = new.Const(node.s) - _set_infos(node, newnode, parent) - return newnode + def visit_str(self, node, parent): + """visit a String/Bytes node by returning a fresh instance of Const""" + return nodes.Const(node.s, getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) + visit_bytes = visit_str - def visit_num(self, node, parent, assign_ctx=None): + def visit_num(self, node, parent): """visit a Num node by returning a fresh instance of Const""" - newnode = new.Const(node.n) - _set_infos(node, newnode, parent) - return newnode + return nodes.Const(node.n, getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) - def visit_pass(self, node, parent, assign_ctx=None): + def visit_pass(self, node, parent): """visit a Pass node by returning a fresh instance of it""" - newnode = new.Pass() - _set_infos(node, newnode, parent) - return newnode + return nodes.Pass(node.lineno, node.col_offset, parent) - def visit_str(self, node, parent, assign_ctx=None): - """visit a Str node by returning a fresh instance of Const""" - newnode = new.Const(node.s) - _set_infos(node, newnode, parent) - return newnode - - def visit_print(self, node, parent, assign_ctx=None): + def visit_print(self, node, parent): """visit a Print node by returning a fresh instance of it""" - newnode = new.Print() - _lineno_parent(node, newnode, parent) - newnode.nl = node.nl - if node.dest is not None: - newnode.dest = self.visit(node.dest, newnode, assign_ctx) - newnode.values = [self.visit(child, newnode, assign_ctx) - for child in node.values] + newnode = nodes.Print(node.nl, node.lineno, node.col_offset, parent) + newnode.postinit(_visit_or_none(node, 'dest', self, newnode), + [self.visit(child, newnode) + for child in node.values]) return newnode - def visit_raise(self, node, parent, assign_ctx=None): + def visit_raise(self, node, parent): """visit a Raise node by returning a fresh instance of it""" - newnode = new.Raise() - _lineno_parent(node, newnode, parent) - if node.type is not None: - newnode.exc = self.visit(node.type, newnode, assign_ctx) - if node.inst is not None: - newnode.inst = self.visit(node.inst, newnode, assign_ctx) - if node.tback is not None: - newnode.tback = self.visit(node.tback, newnode, assign_ctx) + newnode = nodes.Raise(node.lineno, node.col_offset, parent) + newnode.postinit(_visit_or_none(node, 'type', self, newnode), + _visit_or_none(node, 'inst', self, newnode), + _visit_or_none(node, 'tback', self, newnode)) return newnode - def visit_return(self, node, parent, assign_ctx=None): + def visit_return(self, node, parent): """visit a Return node by returning a fresh instance of it""" - newnode = new.Return() - _lineno_parent(node, newnode, parent) + newnode = nodes.Return(node.lineno, node.col_offset, parent) if node.value is not None: - newnode.value = self.visit(node.value, newnode, assign_ctx) + newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_set(self, node, parent, assign_ctx=None): + def visit_set(self, node, parent): """visit a Set node by returning a fresh instance of it""" - newnode = new.Set() - _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode, assign_ctx) - for child in node.elts] + newnode = nodes.Set(node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.elts]) return newnode - def visit_setcomp(self, node, parent, assign_ctx=None): + def visit_setcomp(self, node, parent): """visit a SetComp node by returning a fresh instance of it""" - newnode = new.SetComp() - _lineno_parent(node, newnode, parent) - newnode.elt = self.visit(node.elt, newnode, assign_ctx) - newnode.generators = [self.visit(child, newnode, assign_ctx) - for child in node.generators] + newnode = nodes.SetComp(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.elt, newnode), + [self.visit(child, newnode) + for child in node.generators]) return newnode - def visit_slice(self, node, parent, assign_ctx=None): + def visit_slice(self, node, parent): """visit a Slice node by returning a fresh instance of it""" - newnode = new.Slice() - newnode.parent = parent - if node.lower is not None: - newnode.lower = self.visit(node.lower, newnode, assign_ctx) - if node.upper is not None: - newnode.upper = self.visit(node.upper, newnode, assign_ctx) - if node.step is not None: - newnode.step = self.visit(node.step, newnode, assign_ctx) + newnode = nodes.Slice(parent=parent) + newnode.postinit(_visit_or_none(node, 'lower', self, newnode), + _visit_or_none(node, 'upper', self, newnode), + _visit_or_none(node, 'step', self, newnode)) return newnode - def visit_subscript(self, node, parent, assign_ctx=None): + def visit_subscript(self, node, parent): """visit a Subscript node by returning a fresh instance of it""" - newnode = new.Subscript() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode, None) - newnode.slice = self.visit(node.slice, newnode, None) + context = _get_context(node) + newnode = nodes.Subscript(ctx=context, + lineno=node.lineno, + col_offset=node.col_offset, + parent=parent) + newnode.postinit(self.visit(node.value, newnode), + self.visit(node.slice, newnode)) return newnode - def visit_tryexcept(self, node, parent, assign_ctx=None): + def visit_tryexcept(self, node, parent): """visit a TryExcept node by returning a fresh instance of it""" - newnode = new.TryExcept() - _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] - newnode.handlers = [self.visit(child, newnode, assign_ctx) - for child in node.handlers] - newnode.orelse = [self.visit(child, newnode, assign_ctx) - for child in node.orelse] + newnode = nodes.TryExcept(node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.body], + [self.visit(child, newnode) + for child in node.handlers], + [self.visit(child, newnode) + for child in node.orelse]) return newnode - def visit_tryfinally(self, node, parent, assign_ctx=None): + def visit_tryfinally(self, node, parent): """visit a TryFinally node by returning a fresh instance of it""" - newnode = new.TryFinally() - _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] - newnode.finalbody = [self.visit(n, newnode, assign_ctx) - for n in node.finalbody] + newnode = nodes.TryFinally(node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.body], + [self.visit(n, newnode) + for n in node.finalbody]) return newnode - def visit_tuple(self, node, parent, assign_ctx=None): + def visit_tuple(self, node, parent): """visit a Tuple node by returning a fresh instance of it""" - newnode = new.Tuple() - _lineno_parent(node, newnode, parent) - newnode.elts = [self.visit(child, newnode, assign_ctx) - for child in node.elts] + context = _get_context(node) + newnode = nodes.Tuple(ctx=context, + lineno=node.lineno, + col_offset=node.col_offset, + parent=parent) + newnode.postinit([self.visit(child, newnode) + for child in node.elts]) return newnode - def visit_unaryop(self, node, parent, assign_ctx=None): + def visit_unaryop(self, node, parent): """visit a UnaryOp node by returning a fresh instance of it""" - newnode = new.UnaryOp() - _lineno_parent(node, newnode, parent) - newnode.operand = self.visit(node.operand, newnode, assign_ctx) - newnode.op = _UNARY_OP_CLASSES[node.op.__class__] + newnode = nodes.UnaryOp(_UNARY_OP_CLASSES[node.op.__class__], + node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.operand, newnode)) return newnode - def visit_while(self, node, parent, assign_ctx=None): + def visit_while(self, node, parent): """visit a While node by returning a fresh instance of it""" - newnode = new.While() - _lineno_parent(node, newnode, parent) - newnode.test = self.visit(node.test, newnode, assign_ctx) - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] - newnode.orelse = [self.visit(child, newnode, assign_ctx) - for child in node.orelse] + newnode = nodes.While(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.test, newnode), + [self.visit(child, newnode) + for child in node.body], + [self.visit(child, newnode) + for child in node.orelse]) return newnode - def visit_with(self, node, parent, assign_ctx=None): - newnode = new.With() - _lineno_parent(node, newnode, parent) - expr = self.visit(node.context_expr, newnode, assign_ctx) + def visit_with(self, node, parent): + newnode = nodes.With(node.lineno, node.col_offset, parent) + expr = self.visit(node.context_expr, newnode) if node.optional_vars is not None: - vars = self.visit(node.optional_vars, newnode, 'Assign') + optional_vars = self.visit(node.optional_vars, newnode) else: - vars = None - self.asscontext = None - newnode.items = [(expr, vars)] - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] + optional_vars = None + newnode.postinit([(expr, optional_vars)], + [self.visit(child, newnode) + for child in node.body]) return newnode - def visit_yield(self, node, parent, assign_ctx=None): + def visit_yield(self, node, parent): """visit a Yield node by returning a fresh instance of it""" - return _create_yield_node(node, parent, self, new.Yield) + newnode = nodes.Yield(node.lineno, node.col_offset, parent) + if node.value is not None: + newnode.postinit(self.visit(node.value, newnode)) + return newnode + -class TreeRebuilder3k(TreeRebuilder): +class TreeRebuilder3(TreeRebuilder): """extend and overwrite TreeRebuilder for python3k""" - def visit_arg(self, node, parent, assign_ctx=None): + def visit_arg(self, node, parent): """visit a arg node by returning a fresh AssName instance""" # TODO(cpopa): introduce an Arg node instead of using AssignName. - return self.visit_assignname(node, parent, assign_ctx, node.arg) + return self.visit_assignname(node, parent, node.arg) - def visit_nameconstant(self, node, parent, assign_ctx=None): + def visit_nameconstant(self, node, parent): # in Python 3.4 we have NameConstant for True / False / None - newnode = new.Const(node.value) - _set_infos(node, newnode, parent) - return newnode - - def visit_arguments(self, node, parent, assign_ctx=None): - newnode = super(TreeRebuilder3k, self).visit_arguments(node, parent, assign_ctx) - newnode.kwonlyargs = [self.visit(child, newnode, 'Assign') - for child in node.kwonlyargs] - newnode.kw_defaults = [self.visit(child, newnode, None) - if child else None for child in node.kw_defaults] - newnode.annotations = [ - self.visit(arg.annotation, newnode, None) if arg.annotation else None - for arg in node.args] - return newnode + return nodes.Const(node.value, getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) - def visit_excepthandler(self, node, parent, assign_ctx=None): + def visit_excepthandler(self, node, parent): """visit an ExceptHandler node by returning a fresh instance of it""" - newnode = new.ExceptHandler() - _lineno_parent(node, newnode, parent) - if node.type is not None: - newnode.type = self.visit(node.type, newnode, assign_ctx) - if node.name is not None: - newnode.name = self.visit_assignname(node, newnode, 'Assign', node.name) - newnode.body = [self.visit(child, newnode, None) - for child in node.body] + newnode = nodes.ExceptHandler(node.lineno, node.col_offset, parent) + if node.name: + name = self.visit_assignname(node, newnode, node.name) + else: + name = None + newnode.postinit(_visit_or_none(node, 'type', self, newnode), + name, + [self.visit(child, newnode) + for child in node.body]) return newnode - def visit_nonlocal(self, node, parent, assign_ctx=None): + def visit_nonlocal(self, node, parent): """visit a Nonlocal node and return a new instance of it""" - newnode = new.Nonlocal(node.names) - _set_infos(node, newnode, parent) - return newnode + return nodes.Nonlocal(node.names, getattr(node, 'lineno', None), + getattr(node, 'col_offset', None), parent) + - def visit_raise(self, node, parent, assign_ctx=None): + def visit_raise(self, node, parent): """visit a Raise node by returning a fresh instance of it""" - newnode = new.Raise() - _lineno_parent(node, newnode, parent) + newnode = nodes.Raise(node.lineno, node.col_offset, parent) # no traceback; anyway it is not used in Pylint - if node.exc is not None: - newnode.exc = self.visit(node.exc, newnode, assign_ctx) - if node.cause is not None: - newnode.cause = self.visit(node.cause, newnode, assign_ctx) + newnode.postinit(_visit_or_none(node, 'exc', self, newnode), + _visit_or_none(node, 'cause', self, newnode)) return newnode - def visit_starred(self, node, parent, assign_ctx=None): + def visit_starred(self, node, parent): """visit a Starred node and return a new instance of it""" - newnode = new.Starred() - _lineno_parent(node, newnode, parent) - newnode.value = self.visit(node.value, newnode, assign_ctx) + context = _get_context(node) + newnode = nodes.Starred(ctx=context, lineno=node.lineno, + col_offset=node.col_offset, + parent=parent) + newnode.postinit(self.visit(node.value, newnode)) return newnode - def visit_try(self, node, parent, assign_ctx=None): - # python 3.3 introduce a new Try node replacing TryFinally/TryExcept nodes - # pylint: disable=redefined-variable-type + def visit_try(self, node, parent): + # python 3.3 introduce a new Try node replacing + # TryFinally/TryExcept nodes if node.finalbody: - newnode = new.TryFinally() - _lineno_parent(node, newnode, parent) - newnode.finalbody = [self.visit(n, newnode, assign_ctx) - for n in node.finalbody] + newnode = nodes.TryFinally(node.lineno, node.col_offset, parent) if node.handlers: - excnode = new.TryExcept() - _lineno_parent(node, excnode, newnode) - excnode.body = [self.visit(child, excnode, assign_ctx) - for child in node.body] - excnode.handlers = [self.visit(child, excnode, assign_ctx) - for child in node.handlers] - excnode.orelse = [self.visit(child, excnode, assign_ctx) - for child in node.orelse] - newnode.body = [excnode] + body = [self.visit_tryexcept(node, newnode)] else: - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] + body = [self.visit(child, newnode) + for child in node.body] + newnode.postinit(body, + [self.visit(n, newnode) + for n in node.finalbody]) + return newnode elif node.handlers: - newnode = new.TryExcept() - _lineno_parent(node, newnode, parent) - newnode.body = [self.visit(child, newnode, assign_ctx) - for child in node.body] - newnode.handlers = [self.visit(child, newnode, assign_ctx) - for child in node.handlers] - newnode.orelse = [self.visit(child, newnode, assign_ctx) - for child in node.orelse] + return self.visit_tryexcept(node, parent) + + def visit_annassign(self, node, parent): + """visit an AnnAssign node by returning a fresh instance of it""" + newnode = nodes.AnnAssign(node.lineno, node.col_offset, parent) + annotation = _visit_or_none(node, 'annotation', self, newnode) + newnode.postinit(target=self.visit(node.target, newnode), + annotation=annotation, + simple=node.simple, + value=_visit_or_none(node, 'value', self, newnode)) return newnode - def _visit_with(self, cls, node, parent, assign_ctx=None): + def _visit_with(self, cls, node, parent): if 'items' not in node._fields: # python < 3.3 - return super(TreeRebuilder3k, self).visit_with(node, parent, - assign_ctx) + return super(TreeRebuilder3, self).visit_with(node, parent) - newnode = cls() - _lineno_parent(node, newnode, parent) + newnode = cls(node.lineno, node.col_offset, parent) def visit_child(child): expr = self.visit(child.context_expr, newnode) - if child.optional_vars: - var = self.visit(child.optional_vars, newnode, - 'Assign') - else: - var = None + var = _visit_or_none(child, 'optional_vars', self, newnode) return expr, var - newnode.items = [visit_child(child) - for child in node.items] - newnode.body = [self.visit(child, newnode, None) - for child in node.body] + newnode.postinit([visit_child(child) for child in node.items], + [self.visit(child, newnode) + for child in node.body]) return newnode - def visit_with(self, node, parent, assign_ctx=None): - return self._visit_with(new.With, node, parent, assign_ctx=assign_ctx) - - def visit_yieldfrom(self, node, parent, assign_ctx=None): - return _create_yield_node(node, parent, self, new.YieldFrom) + def visit_with(self, node, parent): + return self._visit_with(nodes.With, node, parent) - def visit_classdef(self, node, parent, assign_ctx=None): - newnode = super(TreeRebuilder3k, self).visit_classdef(node, parent, assign_ctx) - newnode._newstyle = True - for keyword in node.keywords: - if keyword.arg == 'metaclass': - newnode._metaclass = self.visit(keyword, newnode, assign_ctx).value - break + def visit_yieldfrom(self, node, parent): + newnode = nodes.YieldFrom(node.lineno, node.col_offset, parent) + if node.value is not None: + newnode.postinit(self.visit(node.value, newnode)) return newnode + def visit_classdef(self, node, parent, newstyle=True): + return super(TreeRebuilder3, self).visit_classdef(node, parent, + newstyle=newstyle) + # Async structs added in Python 3.5 - def visit_asyncfunctiondef(self, node, parent, assign_ctx=None): - return self._visit_functiondef(new.AsyncFunctionDef, node, parent, - assign_ctx=assign_ctx) - - - def visit_asyncfor(self, node, parent, assign_ctx=None): - return self._visit_for(new.AsyncFor, node, parent, - assign_ctx=assign_ctx) - - def visit_await(self, node, parent, assign_ctx=None): - newnode = new.Await() - newnode.lineno = node.lineno - newnode.col_offset = node.col_offset - newnode.parent = parent - newnode.value = self.visit(node.value, newnode, None) - return newnode - - def visit_asyncwith(self, node, parent, assign_ctx=None): - return self._visit_with(new.AsyncWith, node, parent, - assign_ctx=assign_ctx) + def visit_asyncfunctiondef(self, node, parent): + return self._visit_functiondef(nodes.AsyncFunctionDef, node, parent) + + def visit_asyncfor(self, node, parent): + return self._visit_for(nodes.AsyncFor, node, parent) + + def visit_await(self, node, parent): + newnode = nodes.Await(node.lineno, node.col_offset, parent) + newnode.postinit(value=self.visit(node.value, newnode)) + return newnode + def visit_asyncwith(self, node, parent): + return self._visit_with(nodes.AsyncWith, node, parent) + + def visit_joinedstr(self, node, parent): + newnode = nodes.JoinedStr(node.lineno, node.col_offset, parent) + newnode.postinit([self.visit(child, newnode) + for child in node.values]) + return newnode + + def visit_formattedvalue(self, node, parent): + newnode = nodes.FormattedValue(node.lineno, node.col_offset, parent) + newnode.postinit(self.visit(node.value, newnode), + node.conversion, + _visit_or_none(node, 'format_spec', self, newnode)) + return newnode if sys.version_info >= (3, 0): - TreeRebuilder = TreeRebuilder3k + TreeRebuilder = TreeRebuilder3 diff -Nru astroid-1.4.9/astroid/scoped_nodes.py astroid-1.5.3/astroid/scoped_nodes.py --- astroid-1.4.9/astroid/scoped_nodes.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/scoped_nodes.py 2017-04-17 11:04:07.000000000 +0000 @@ -1,20 +1,12 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2011, 2013-2015 Google, Inc. +# Copyright (c) 2013-2016 Claudiu Popa +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015 Rene Zhang + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """ This module contains the classes for "scoped" node, i.e. which are opening a @@ -22,20 +14,22 @@ Lambda, GeneratorExp, DictComp and SetComp to some extent). """ +import sys import io import itertools import warnings import six -import wrapt from astroid import bases from astroid import context as contextmod from astroid import exceptions +from astroid import decorators as decorators_mod +from astroid.interpreter import objectmodel +from astroid.interpreter import dunder_lookup from astroid import manager from astroid import mixins from astroid import node_classes -from astroid import decorators as decorators_mod from astroid import util @@ -43,7 +37,7 @@ ITER_METHODS = ('__iter__', '__getitem__') -def _c3_merge(sequences): +def _c3_merge(sequences, cls, context): """Merges MROs in *sequences* to a single MRO using the C3 algorithm. Adapted from http://www.python.org/download/releases/2.3/mro/. @@ -65,12 +59,10 @@ if not candidate: # Show all the remaining bases, which were considered as # candidates for the next mro sequence. - bases = ["({})".format(", ".join(base.name - for base in subsequence)) - for subsequence in sequences] raise exceptions.InconsistentMroError( - "Cannot create a consistent method resolution " - "order for bases %s" % ", ".join(bases)) + message="Cannot create a consistent method resolution order " + "for MROs {mros} of class {cls!r}.", + mros=sequences, cls=cls, context=context) result.append(candidate) # remove the chosen candidate @@ -79,21 +71,13 @@ del seq[0] -def _verify_duplicates_mro(sequences): +def _verify_duplicates_mro(sequences, cls, context): for sequence in sequences: names = [node.qname() for node in sequence] if len(names) != len(set(names)): - raise exceptions.DuplicateBasesError('Duplicates found in the mro.') - - -def remove_nodes(cls): - @wrapt.decorator - def decorator(func, instance, args, kwargs): - nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)] - if not nodes: - raise exceptions.NotFoundError() - return nodes - return decorator + raise exceptions.DuplicateBasesError( + message='Duplicates found in MROs {mros} for {cls!r}.', + mros=sequences, cls=cls, context=context) def function_to_method(n, klass): @@ -105,20 +89,6 @@ return n -def std_special_attributes(self, name, add_locals=True): - if add_locals: - locals = self._locals - else: - locals = {} - if name == '__name__': - return [node_classes.const_factory(self.name)] + locals.get(name, []) - if name == '__doc__': - return [node_classes.const_factory(self.doc)] + locals.get(name, []) - if name == '__dict__': - return [node_classes.Dict()] + locals.get(name, []) - raise exceptions.NotFoundError(name) - - MANAGER = manager.AstroidManager() def builtin_lookup(name): """lookup a name into the builtin module @@ -129,14 +99,15 @@ if name == '__dict__': return builtin_astroid, () try: - stmts = builtin_astroid._locals[name] + stmts = builtin_astroid.locals[name] except KeyError: stmts = () return builtin_astroid, stmts # TODO move this Mixin to mixins.py; problem: 'FunctionDef' in _scope_lookup -class LocalsDictNodeNG(node_classes.LookupMixIn, bases.NodeNG): +class LocalsDictNodeNG(node_classes.LookupMixIn, + node_classes.NodeNG): """ this class provides locals handling common to Module, FunctionDef and ClassDef nodes, including a dict like interface for direct access to locals information @@ -146,23 +117,14 @@ # dictionary of locals with name as key and node defining the local as # value - @property - def locals(self): - util.attribute_to_function_warning('locals', 2.0, 'get_locals') - return self._locals - @locals.setter - def locals(self, _locals): - util.attribute_to_function_warning('locals', 2.0, 'get_locals') - self._locals = _locals - @locals.deleter - def locals(self): - util.attribute_to_function_warning('locals', 2.0, 'get_locals') - del self._locals + + locals = {} def qname(self): """return the 'qualified' name of the node, eg module.name, module.class.name ... """ + # pylint: disable=no-member; github.com/pycqa/astroid/issues/278 if self.parent is None: return self.name return '%s.%s' % (self.parent.frame().qname(), self.name) @@ -181,7 +143,7 @@ def _scope_lookup(self, node, name, offset=0): """XXX method for interfacing the scope lookup""" try: - stmts = node._filter_stmts(self._locals[name], self, offset) + stmts = node._filter_stmts(self.locals[name], self, offset) except KeyError: stmts = () if stmts: @@ -202,13 +164,17 @@ if the name is already defined, ignore it """ - #assert not stmt in self._locals.get(name, ()), (self, stmt) - self._locals.setdefault(name, []).append(stmt) + #assert not stmt in self.locals.get(name, ()), (self, stmt) + self.locals.setdefault(name, []).append(stmt) __setitem__ = set_local def _append_node(self, child): """append a child, linking it in the tree""" + # pylint: disable=no-member; depending by the class + # which uses the current class as a mixin or base class. + # It's rewritten in 2.0, so it makes no sense for now + # to spend development time on it. self.body.append(child) child.parent = self @@ -227,7 +193,7 @@ :param item: the name of the locally defined object :raises KeyError: if the name is not defined """ - return self._locals[item][0] + return self.locals[item][0] def __iter__(self): """method from the `dict` interface returning an iterator on @@ -239,7 +205,7 @@ """method from the `dict` interface returning a tuple containing locally defined names """ - return list(self._locals.keys()) + return list(self.locals.keys()) def values(self): """method from the `dict` interface returning a tuple containing @@ -255,7 +221,7 @@ return list(zip(self.keys(), self.values())) def __contains__(self, name): - return name in self._locals + return name in self.locals class Module(LocalsDictNodeNG): @@ -268,9 +234,9 @@ # the file from which as been extracted the astroid representation. It may # be None if the representation has been built from a built-in module - source_file = None + file = None # Alternatively, if built from a string/bytes, this can be set - source_code = None + file_bytes = None # encoding of python source file, so we can get unicode out of it (python2 # only) file_encoding = None @@ -282,96 +248,43 @@ package = None # dictionary of globals with name as key and node defining the global # as value - _globals = None + globals = None # Future imports - _future_imports = None + future_imports = None + special_attributes = objectmodel.ModuleModel() # names of python special attributes (handled by getattr impl.) - special_attributes = set(('__name__', '__doc__', '__file__', '__path__', - '__dict__')) + # names of module attributes available through the global scope scope_attrs = set(('__name__', '__doc__', '__file__', '__path__')) - def __init__(self, name, doc, pure_python=True): + _other_fields = ('name', 'doc', 'file', 'path', 'package', + 'pure_python', 'future_imports') + _other_other_fields = ('locals', 'globals') + + def __init__(self, name, doc, file=None, path=None, package=None, + parent=None, pure_python=True): self.name = name self.doc = doc + self.file = file + self.path = path + self.package = package + self.parent = parent self.pure_python = pure_python - self._locals = self._globals = {} + self.locals = self.globals = {} self.body = [] - self._future_imports = set() - - # Future deprecation warnings - @property - def file(self): - util.rename_warning('file', 2.0, 'source_file') - return self.source_file - @file.setter - def file(self, source_file): - util.rename_warning('file', 2.0, 'source_file') - self.source_file = source_file - @file.deleter - def file(self): - util.rename_warning('file', 2.0, 'source_file') - del self.source_file + self.future_imports = set() + # pylint: enable=redefined-builtin - @property - def path(self): - util.rename_warning('path', 2.0, 'source_file') - return self.source_file - @path.setter - def path(self, source_file): - util.rename_warning('path', 2.0, 'source_file') - self.source_file = source_file - @path.deleter - def path(self): - util.rename_warning('path', 2.0, 'source_file') - del self.source_file - - @property - def file_bytes(self): - util.rename_warning('file_bytes', 2.0, 'source_code') - return self.source_code - @file_bytes.setter - def file_bytes(self, source_code): - util.rename_warning('file_bytes', 2.0, 'source_code') - self.source_code = source_code - @file_bytes.deleter - def file_bytes(self): - util.rename_warning('file_bytes', 2.0, 'source_code') - del self.source_code - - @property - def globals(self): - util.attribute_to_function_warning('globals', 2.0, 'get_locals') - return self._locals - @globals.setter - def globals(self, _globals): - util.attribute_to_function_warning('globals', 2.0, 'get_locals') - self._locals = _globals - @globals.deleter - def globals(self): - util.attribute_to_function_warning('globals', 2.0, 'get_locals') - del self._locals - - @property - def future_imports(self): - util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') - return self._future_imports - @future_imports.setter - def future_imports(self, _future_imports): - util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') - self._future_imports = _future_imports - @future_imports.deleter - def future_imports(self): - util.attribute_to_function_warning('future_imports', 2.0, 'future_imports') - del self._future_imports + def postinit(self, body=None): + self.body = body def _get_stream(self): - if self.source_code is not None: - return io.BytesIO(self.source_code) - if self.source_file is not None: - stream = open(self.source_file, 'rb') + if self.file_bytes is not None: + return io.BytesIO(self.file_bytes) + if self.file is not None: + stream = open(self.file, 'rb') return stream return None @@ -406,10 +319,10 @@ return self.fromlineno, self.tolineno def scope_lookup(self, node, name, offset=0): - if name in self.scope_attrs and name not in self._locals: + if name in self.scope_attrs and name not in self.locals: try: return self, self.getattr(name) - except exceptions.NotFoundError: + except exceptions.AttributeInferenceError: return self, () return self._scope_lookup(node, name, offset) @@ -419,24 +332,26 @@ def display_type(self): return 'Module' - @remove_nodes(node_classes.DelName) def getattr(self, name, context=None, ignore_locals=False): - if name in self.special_attributes: - if name == '__file__': - return [node_classes.const_factory(self.source_file)] + self._locals.get(name, []) - if name == '__path__' and self.package: - return [node_classes.List()] + self._locals.get(name, []) - return std_special_attributes(self, name) - if not ignore_locals and name in self._locals: - return self._locals[name] - if self.package: + result = [] + name_in_locals = name in self.locals + + if name in self.special_attributes and not ignore_locals and not name_in_locals: + result = [self.special_attributes.lookup(name)] + elif not ignore_locals and name_in_locals: + result = self.locals[name] + elif self.package: try: - return [self.import_module(name, relative_only=True)] - except exceptions.AstroidBuildingException: - raise exceptions.NotFoundError(name) - except SyntaxError: - raise exceptions.NotFoundError(name) - raise exceptions.NotFoundError(name) + result = [self.import_module(name, relative_only=True)] + except (exceptions.AstroidBuildingError, SyntaxError): + util.reraise(exceptions.AttributeInferenceError(target=self, + attribute=name, + context=context)) + result = [n for n in result if not isinstance(n, node_classes.DelName)] + if result: + return result + raise exceptions.AttributeInferenceError(target=self, attribute=name, + context=context) def igetattr(self, name, context=None): """inferred getattr""" @@ -447,14 +362,15 @@ try: return bases._infer_stmts(self.getattr(name, context), context, frame=self) - except exceptions.NotFoundError: - raise exceptions.InferenceError(name) + except exceptions.AttributeInferenceError as error: + util.reraise(exceptions.InferenceError( + error.message, target=self, attribute=name, context=context)) def fully_defined(self): """return True if this module has been built from a .py file and so contains a complete representation including the code """ - return self.source_file is not None and self.source_file.endswith('.py') + return self.file is not None and self.file.endswith('.py') def statement(self): """return the first parent node marked as statement node @@ -473,7 +389,7 @@ if six.PY2: @decorators_mod.cachedproperty def _absolute_import_activated(self): - for stmt in self._locals.get('absolute_import', ()): + for stmt in self.locals.get('absolute_import', ()): if isinstance(stmt, node_classes.ImportFrom) and stmt.modname == '__future__': return True return False @@ -488,9 +404,10 @@ if relative_only and level is None: level = 0 absmodname = self.relative_to_absolute_name(modname, level) + try: return MANAGER.ast_from_module_name(absmodname) - except exceptions.AstroidBuildingException: + except exceptions.AstroidBuildingError: # we only want to import a sub module or package of this module, # skip here if relative_only: @@ -510,11 +427,16 @@ if level: if self.package: level = level - 1 + if level and self.name.count('.') < level: + raise exceptions.TooManyLevelsError( + level=level, name=self.name) + package_name = self.name.rsplit('.', level)[0] elif self.package: package_name = self.name else: package_name = self.name.rsplit('.', 1)[0] + if package_name: if not modname: return package_name @@ -532,12 +454,12 @@ # to avoid catching too many Exceptions default = [name for name in self.keys() if not name.startswith('_')] try: - all = self['__all__'] + all_values = self['__all__'] except KeyError: return default try: - explicit = next(all.assigned_stmts()) + explicit = next(all_values.assigned_stmts()) except exceptions.InferenceError: return default except AttributeError: @@ -568,7 +490,7 @@ inferred.append(inferred_node.value) return inferred - def _public_names(self): + def public_names(self): """Get the list of the names which are publicly available in this module.""" return [name for name in self.keys() if not name.startswith('_')] @@ -585,44 +507,91 @@ class GeneratorExp(ComprehensionScope): _astroid_fields = ('elt', 'generators') + _other_other_fields = ('locals',) + elt = None + generators = None + + def __init__(self, lineno=None, col_offset=None, parent=None): + self.locals = {} + super(GeneratorExp, self).__init__(lineno, col_offset, parent) + + def postinit(self, elt=None, generators=None): + self.elt = elt + if generators is None: + self.generators = [] + else: + self.generators = generators - def __init__(self): - self._locals = {} - self.elt = None - self.generators = [] + def bool_value(self): + return True class DictComp(ComprehensionScope): _astroid_fields = ('key', 'value', 'generators') + _other_other_fields = ('locals',) + key = None + value = None + generators = None - def __init__(self): - self._locals = {} - self.key = None - self.value = None - self.generators = [] + def __init__(self, lineno=None, col_offset=None, parent=None): + self.locals = {} + super(DictComp, self).__init__(lineno, col_offset, parent) + + def postinit(self, key=None, value=None, generators=None): + self.key = key + self.value = value + if generators is None: + self.generators = [] + else: + self.generators = generators + + def bool_value(self): + return util.Uninferable class SetComp(ComprehensionScope): _astroid_fields = ('elt', 'generators') + _other_other_fields = ('locals',) + elt = None + generators = None - def __init__(self): - self._locals = {} - self.elt = None - self.generators = [] + def __init__(self, lineno=None, col_offset=None, parent=None): + self.locals = {} + super(SetComp, self).__init__(lineno, col_offset, parent) + + def postinit(self, elt=None, generators=None): + self.elt = elt + if generators is None: + self.generators = [] + else: + self.generators = generators + def bool_value(self): + return util.Uninferable -class _ListComp(bases.NodeNG): + +class _ListComp(node_classes.NodeNG): """class representing a ListComp node""" _astroid_fields = ('elt', 'generators') elt = None generators = None + def postinit(self, elt=None, generators=None): + self.elt = elt + self.generators = generators + + def bool_value(self): + return util.Uninferable + if six.PY3: class ListComp(_ListComp, ComprehensionScope): """class representing a ListComp node""" - def __init__(self): - self._locals = {} + _other_other_fields = ('locals',) + + def __init__(self, lineno=None, col_offset=None, parent=None): + self.locals = {} + super(ListComp, self).__init__(lineno, col_offset, parent) else: class ListComp(_ListComp): """class representing a ListComp node""" @@ -654,15 +623,27 @@ class Lambda(mixins.FilterStmtsMixin, LocalsDictNodeNG): _astroid_fields = ('args', 'body',) + _other_other_fields = ('locals',) name = '' # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod' - type = 'function' + @property + def type(self): + # pylint: disable=no-member + if self.args.args and self.args.args[0].name == 'self': + if isinstance(self.parent.scope(), ClassDef): + return 'method' + return 'function' - def __init__(self): - self._locals = {} + def __init__(self, lineno=None, col_offset=None, parent=None): + self.locals = {} self.args = [] self.body = [] + super(Lambda, self).__init__(lineno, col_offset, parent) + + def postinit(self, args, body): + self.args = args + self.body = body def pytype(self): if 'method' in self.type: @@ -679,6 +660,10 @@ def argnames(self): """return a list of argument names""" + # pylint: disable=no-member; github.com/pycqa/astroid/issues/291 + # args is in fact redefined later on by postinit. Can't be changed + # to None due to a strong interaction between Lambda and FunctionDef. + if self.args.args: # maybe None with builtin functions names = _rec_get_names(self.args.args) else: @@ -691,9 +676,17 @@ def infer_call_result(self, caller, context=None): """infer what a function is returning when called""" + # pylint: disable=no-member; github.com/pycqa/astroid/issues/291 + # args is in fact redefined later on by postinit. Can't be changed + # to None due to a strong interaction between Lambda and FunctionDef. + return self.body.infer(context) def scope_lookup(self, node, name, offset=0): + # pylint: disable=no-member; github.com/pycqa/astroid/issues/291 + # args is in fact redefined later on by postinit. Can't be changed + # to None due to a strong interaction between Lambda and FunctionDef. + if node in self.args.defaults or node in self.args.kw_defaults: frame = self.parent.frame() # line offset to avoid that def func(f=func) resolve the default @@ -704,40 +697,40 @@ frame = self return frame._scope_lookup(node, name, offset) + def bool_value(self): + return True -class FunctionDef(bases.Statement, Lambda): +class FunctionDef(node_classes.Statement, Lambda): if six.PY3: _astroid_fields = ('decorators', 'args', 'returns', 'body') returns = None else: _astroid_fields = ('decorators', 'args', 'body') - - special_attributes = set(('__name__', '__doc__', '__dict__')) + decorators = None + special_attributes = objectmodel.FunctionModel() is_function = True # attributes below are set by the builder module or by raw factories - decorators = None + _other_fields = ('name', 'doc') + _other_other_fields = ('locals', '_type') + _type = None - def __init__(self, name, doc): - self._locals = {} - self.args = [] - self.body = [] + def __init__(self, name=None, doc=None, lineno=None, + col_offset=None, parent=None): self.name = name self.doc = doc - self._instance_attrs = {} - - @property - def instance_attrs(self): - util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') - return self._instance_attrs - @instance_attrs.setter - def instance_attrs(self, _instance_attrs): - util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') - self._instance_attrs = _instance_attrs - @instance_attrs.deleter - def instance_attrs(self): - util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') - del self._instance_attrs + self.instance_attrs = {} + super(FunctionDef, self).__init__(lineno, col_offset, parent) + if parent: + frame = parent.frame() + frame.set_local(name, self) + + # pylint: disable=arguments-differ; different than Lambdas + def postinit(self, args, body, decorators=None, returns=None): + self.args = args + self.body = body + self.decorators = decorators + self.returns = returns @decorators_mod.cachedproperty def extra_decorators(self): @@ -793,46 +786,50 @@ if isinstance(frame, ClassDef): if self.name == '__new__': return 'classmethod' + elif sys.version_info >= (3, 6) and self.name == '__init_subclass__': + return 'classmethod' else: type_name = 'method' - if self.decorators: - for node in self.decorators.nodes: - if isinstance(node, node_classes.Name): - if node.name in builtin_descriptors: - return node.name - - if isinstance(node, node_classes.Call): - # Handle the following case: - # @some_decorator(arg1, arg2) - # def func(...) - # - try: - current = next(node.func.infer()) - except exceptions.InferenceError: - continue - _type = _infer_decorator_callchain(current) - if _type is not None: - return _type + if not self.decorators: + return type_name + for node in self.decorators.nodes: + if isinstance(node, node_classes.Name): + if node.name in builtin_descriptors: + return node.name + + if isinstance(node, node_classes.Call): + # Handle the following case: + # @some_decorator(arg1, arg2) + # def func(...) + # try: - for inferred in node.infer(): - # Check to see if this returns a static or a class method. - _type = _infer_decorator_callchain(inferred) - if _type is not None: - return _type + current = next(node.func.infer()) + except exceptions.InferenceError: + continue + _type = _infer_decorator_callchain(current) + if _type is not None: + return _type + + try: + for inferred in node.infer(): + # Check to see if this returns a static or a class method. + _type = _infer_decorator_callchain(inferred) + if _type is not None: + return _type - if not isinstance(inferred, ClassDef): + if not isinstance(inferred, ClassDef): + continue + for ancestor in inferred.ancestors(): + if not isinstance(ancestor, ClassDef): continue - for ancestor in inferred.ancestors(): - if not isinstance(ancestor, ClassDef): - continue - if ancestor.is_subtype_of('%s.classmethod' % BUILTINS): - return 'classmethod' - elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS): - return 'staticmethod' - except exceptions.InferenceError: - pass + if ancestor.is_subtype_of('%s.classmethod' % BUILTINS): + return 'classmethod' + elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS): + return 'staticmethod' + except exceptions.InferenceError: + pass return type_name @decorators_mod.cachedproperty @@ -861,19 +858,20 @@ """this method doesn't look in the instance_attrs dictionary since it's done by an Instance proxy at inference time. """ - if name == '__module__': - return [node_classes.const_factory(self.root().qname())] - if name in self._instance_attrs: - return self._instance_attrs[name] - return std_special_attributes(self, name, False) + if name in self.instance_attrs: + return self.instance_attrs[name] + if name in self.special_attributes: + return [self.special_attributes.lookup(name)] + raise exceptions.AttributeInferenceError(target=self, attribute=name) def igetattr(self, name, context=None): """Inferred getattr, which returns an iterator of inferred statements.""" try: return bases._infer_stmts(self.getattr(name, context), context, frame=self) - except exceptions.NotFoundError: - raise exceptions.InferenceError(name) + except exceptions.AttributeInferenceError as error: + util.reraise(exceptions.InferenceError( + error.message, target=self, attribute=name, context=context)) def is_method(self): """return true if the function node should be considered as a method""" @@ -887,7 +885,6 @@ result = set() decoratornodes = [] if self.decorators is not None: - # pylint: disable=unsupported-binary-operation; damn flow control. decoratornodes += self.decorators.nodes decoratornodes += self.extra_decorators for decnode in decoratornodes: @@ -938,8 +935,7 @@ def infer_call_result(self, caller, context=None): """infer what a function is returning when called""" if self.is_generator(): - result = bases.Generator() - result.parent = self + result = bases.Generator(self) yield result return # This is really a gigantic hack to work around metaclass generators @@ -957,7 +953,7 @@ c.hide = True c.parent = self class_bases = [next(b.infer(context)) for b in caller.args[1:]] - c.bases = [base for base in class_bases if base != util.YES] + c.bases = [base for base in class_bases if base != util.Uninferable] c._metaclass = metaclass yield c return @@ -970,7 +966,10 @@ for inferred in returnnode.value.infer(context): yield inferred except exceptions.InferenceError: - yield util.YES + yield util.Uninferable + + def bool_value(self): + return True class AsyncFunctionDef(FunctionDef): @@ -1008,7 +1007,7 @@ if isinstance(baseobj, bases.Instance): # not abstract return False - if baseobj is util.YES: + if baseobj is util.Uninferable: continue if baseobj is klass: continue @@ -1057,7 +1056,25 @@ return klass._type -class ClassDef(mixins.FilterStmtsMixin, LocalsDictNodeNG, bases.Statement): +def get_wrapping_class(node): + """Obtain the class that *wraps* this node + + We consider that a class wraps a node if the class + is a parent for the said node. + """ + + klass = node.frame() + while klass is not None and not isinstance(klass, ClassDef): + if klass.parent is None: + klass = None + else: + klass = klass.parent.frame() + return klass + + + +class ClassDef(mixins.FilterStmtsMixin, LocalsDictNodeNG, + node_classes.Statement): # some of the attributes below are set by the builder module or # by a raw factories @@ -1066,8 +1083,7 @@ _astroid_fields = ('decorators', 'bases', 'body') # name decorators = None - special_attributes = set(('__name__', '__doc__', '__dict__', '__module__', - '__bases__', '__mro__', '__subclasses__')) + special_attributes = objectmodel.ClassModel() _type = None _metaclass_hack = False @@ -1075,27 +1091,33 @@ type = property(_class_type, doc="class'type, possible values are 'class' | " "'metaclass' | 'exception'") + _other_fields = ('name', 'doc') + _other_other_fields = ('locals', '_newstyle') + _newstyle = None - def __init__(self, name, doc): - self._instance_attrs = {} - self._locals = {} + def __init__(self, name=None, doc=None, lineno=None, + col_offset=None, parent=None): + self.instance_attrs = {} + self.locals = {} + self.keywords = [] self.bases = [] self.body = [] self.name = name self.doc = doc - - @property - def instance_attrs(self): - util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') - return self._instance_attrs - @instance_attrs.setter - def instance_attrs(self, _instance_attrs): - util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') - self._instance_attrs = _instance_attrs - @instance_attrs.deleter - def instance_attrs(self): - util.attribute_to_function_warning('instance_attrs', 2.0, 'get_attributes') - del self._instance_attrs + super(ClassDef, self).__init__(lineno, col_offset, parent) + if parent is not None: + parent.frame().set_local(name, self) + + # pylint: disable=redefined-outer-name + def postinit(self, bases, body, decorators, newstyle=None, metaclass=None, keywords=None): + self.keywords = keywords + self.bases = bases + self.body = body + self.decorators = decorators + if newstyle is not None: + self._newstyle = newstyle + if metaclass is not None: + self._metaclass = metaclass def _newstyle_impl(self, context=None): if context is None: @@ -1106,7 +1128,7 @@ if base._newstyle_impl(context): self._newstyle = True break - klass = self._explicit_metaclass() + klass = self.declared_metaclass() # could be any callable, we'd need to infer the result of klass(name, # bases, dict). punt if it's not a class node. if klass is not None and isinstance(klass, ClassDef): @@ -1124,8 +1146,8 @@ def blockstart_tolineno(self): if self.bases: return self.bases[-1].tolineno - else: - return self.fromlineno + + return self.fromlineno def block_range(self, lineno): """return block line numbers. @@ -1158,7 +1180,7 @@ isinstance(name_node.value, six.string_types)): name = name_node.value else: - return util.YES + return util.Uninferable result = ClassDef(name, None) @@ -1168,9 +1190,9 @@ result.bases = class_bases.itered() else: # There is currently no AST node that can represent an 'unknown' - # node (YES is not an AST node), therefore we simply return YES here + # node (Uninferable is not an AST node), therefore we simply return Uninferable here # although we know at least the name of the class. - return util.YES + return util.Uninferable # Get the members of the class try: @@ -1182,7 +1204,7 @@ for attr, value in members.items: if (isinstance(attr, node_classes.Const) and isinstance(attr.value, six.string_types)): - result._locals[attr.value] = [value] + result.locals[attr.value] = [value] result.parent = caller.parent return result @@ -1197,9 +1219,16 @@ yield bases.Instance(self) def scope_lookup(self, node, name, offset=0): - # pylint: disable=redefined-variable-type + # If the name looks like a builtin name, just try to look + # into the upper scope of this class. We might have a + # decorator that it's poorly named after a builtin object + # inside this class. + lookup_upper_frame = ( + isinstance(node.parent, node_classes.Decorators) and + name in MANAGER.astroid_cache[six.moves.builtins.__name__] + ) if any(node == base or base.parent_of(node) - for base in self.bases): + for base in self.bases) or lookup_upper_frame: # Handle the case where we have either a name # in the bases of a class, which exists before # the actual definition or the case where we have @@ -1295,87 +1324,144 @@ which have defined in their instance attribute dictionary """ for astroid in self.ancestors(context=context): - if name in astroid._instance_attrs: + if name in astroid.instance_attrs: yield astroid def has_base(self, node): return node in self.bases - @remove_nodes(node_classes.DelAttr) def local_attr(self, name, context=None): """return the list of assign node associated to name in this class locals or in its parents - :raises `NotFoundError`: + :raises `AttributeInferenceError`: if no attribute with this name has been find in this class or its parent classes """ - try: - return self._locals[name] - except KeyError: - for class_node in self.local_attr_ancestors(name, context): - return class_node._locals[name] - raise exceptions.NotFoundError(name) + result = [] + if name in self.locals: + result = self.locals[name] + else: + class_node = next(self.local_attr_ancestors(name, context), ()) + if class_node: + result = class_node.locals[name] + result = [n for n in result if not isinstance(n, node_classes.DelAttr)] + if result: + return result + raise exceptions.AttributeInferenceError(target=self, attribute=name, + context=context) - @remove_nodes(node_classes.DelAttr) def instance_attr(self, name, context=None): """return the astroid nodes associated to name in this class instance attributes dictionary and in its parents - :raises `NotFoundError`: + :raises `AttributeInferenceError`: if no attribute with this name has been find in this class or its parent classes """ - # Return a copy, so we don't modify self._instance_attrs, + # Return a copy, so we don't modify self.instance_attrs, # which could lead to infinite loop. - values = list(self._instance_attrs.get(name, [])) + values = list(self.instance_attrs.get(name, [])) # get all values from parents for class_node in self.instance_attr_ancestors(name, context): - values += class_node._instance_attrs[name] - if not values: - raise exceptions.NotFoundError(name) - return values + values += class_node.instance_attrs[name] + values = [n for n in values if not isinstance(n, node_classes.DelAttr)] + if values: + return values + raise exceptions.AttributeInferenceError(target=self, attribute=name, + context=context) def instantiate_class(self): """return Instance of ClassDef node, else return self""" return bases.Instance(self) def instanciate_class(self): - """return Instance of ClassDef node, else return self""" - util.rename_warning('instanciate_class()', 2.0, 'instantiate_class()') + warnings.warn('%s.instanciate_class() is deprecated and slated for ' + 'removal in astroid 2.0, use %s.instantiate_class() ' + 'instead.' % (type(self).__name__, type(self).__name__), + PendingDeprecationWarning, stacklevel=2) return self.instantiate_class() - def getattr(self, name, context=None): - """this method doesn't look in the instance_attrs dictionary since it's - done by an Instance proxy at inference time. + def getattr(self, name, context=None, class_context=True): + """Get an attribute from this class, using Python's attribute semantic - It may return a YES object if the attribute has not been actually - found but a __getattr__ or __getattribute__ method is defined - """ - values = self._locals.get(name, []) - if name in self.special_attributes: - if name == '__module__': - return [node_classes.const_factory(self.root().qname())] + values + This method doesn't look in the instance_attrs dictionary + since it's done by an Instance proxy at inference time. It + may return a Uninferable object if the attribute has not been actually + found but a __getattr__ or __getattribute__ method is defined. + If *class_context* is given, then it's considered that the + attribute is accessed from a class context, + e.g. ClassDef.attribute, otherwise it might have been accessed + from an instance as well. If *class_context* is used in that + case, then a lookup in the implicit metaclass and the explicit + metaclass will be done. + + """ + values = self.locals.get(name, []) + if name in self.special_attributes and class_context and not values: + result = [self.special_attributes.lookup(name)] if name == '__bases__': - node = node_classes.Tuple() - elts = list(self._inferred_bases(context)) - node.elts = elts - return [node] + values - if name == '__mro__' and self.newstyle: - mro = self.mro() - node = node_classes.Tuple() - node.elts = mro - return [node] - return std_special_attributes(self, name) - # don't modify the list in self._locals! + # Need special treatment, since they are mutable + # and we need to return all the values. + result += values + return result + + # don't modify the list in self.locals! values = list(values) for classnode in self.ancestors(recurs=True, context=context): - values += classnode._locals.get(name, []) + values += classnode.locals.get(name, []) + + if class_context: + values += self._metaclass_lookup_attribute(name, context) + if not values: - raise exceptions.NotFoundError(name) + raise exceptions.AttributeInferenceError(target=self, attribute=name, + context=context) return values - def igetattr(self, name, context=None): + def _metaclass_lookup_attribute(self, name, context): + """Search the given name in the implicit and the explicit metaclass.""" + attrs = set() + implicit_meta = self.implicit_metaclass() + metaclass = self.metaclass() + for cls in {implicit_meta, metaclass}: + if cls and cls != self and isinstance(cls, ClassDef): + cls_attributes = self._get_attribute_from_metaclass( + cls, name, context) + attrs.update(set(cls_attributes)) + return attrs + + def _get_attribute_from_metaclass(self, cls, name, context): + try: + attrs = cls.getattr(name, context=context, + class_context=True) + except exceptions.AttributeInferenceError: + return + + for attr in bases._infer_stmts(attrs, context, frame=cls): + if not isinstance(attr, FunctionDef): + yield attr + continue + + if bases._is_property(attr): + # TODO(cpopa): don't use a private API. + for inferred in attr.infer_call_result(self, context): + yield inferred + continue + if attr.type == 'classmethod': + # If the method is a classmethod, then it will + # be bound to the metaclass, not to the class + # from where the attribute is retrieved. + # get_wrapping_class could return None, so just + # default to the current class. + frame = get_wrapping_class(attr) or self + yield bases.BoundMethod(attr, frame) + elif attr.type == 'staticmethod': + yield attr + else: + yield bases.BoundMethod(attr, self) + + def igetattr(self, name, context=None, class_context=True): """inferred getattr, need special treatment in class to handle descriptors """ @@ -1384,25 +1470,26 @@ context = contextmod.copy_context(context) context.lookupname = name try: - for inferred in bases._infer_stmts(self.getattr(name, context), - context, frame=self): - # yield YES object instead of descriptors when necessary + attrs = self.getattr(name, context, class_context=class_context) + for inferred in bases._infer_stmts(attrs, context, frame=self): + # yield Uninferable object instead of descriptors when necessary if (not isinstance(inferred, node_classes.Const) and isinstance(inferred, bases.Instance)): try: inferred._proxied.getattr('__get__', context) - except exceptions.NotFoundError: + except exceptions.AttributeInferenceError: yield inferred else: - yield util.YES + yield util.Uninferable else: yield function_to_method(inferred, self) - except exceptions.NotFoundError: + except exceptions.AttributeInferenceError as error: if not name.startswith('__') and self.has_dynamic_getattr(context): - # class handle some dynamic attributes, return a YES object - yield util.YES + # class handle some dynamic attributes, return a Uninferable object + yield util.Uninferable else: - raise exceptions.InferenceError(name) + util.reraise(exceptions.InferenceError( + error.message, target=self, attribute=name, context=context)) def has_dynamic_getattr(self, context=None): """ @@ -1419,15 +1506,43 @@ try: return _valid_getattr(self.getattr('__getattr__', context)[0]) - except exceptions.NotFoundError: + except exceptions.AttributeInferenceError: #if self.newstyle: XXX cause an infinite recursion error try: getattribute = self.getattr('__getattribute__', context)[0] return _valid_getattr(getattribute) - except exceptions.NotFoundError: + except exceptions.AttributeInferenceError: pass return False + def getitem(self, index, context=None): + """Return the inference of a subscript. + + This is basically looking up the method in the metaclass and calling it. + """ + try: + methods = dunder_lookup.lookup(self, '__getitem__') + except exceptions.AttributeInferenceError as exc: + util.reraise( + exceptions.AstroidTypeError( + node=self, error=exc, + context=context + ) + ) + + method = methods[0] + + # Create a new callcontext for providing index as an argument. + if context: + new_context = context.clone() + else: + new_context = contextmod.InferenceContext() + + new_context.callcontext = contextmod.CallContext(args=[index]) + new_context.boundnode = self + + return next(method.infer_call_result(self, new_context)) + def methods(self): """return an iterator on all methods defined in the class and its ancestors @@ -1458,11 +1573,10 @@ return builtin_lookup('type')[1][0] _metaclass = None - def _explicit_metaclass(self): - """ Return the explicit defined metaclass - for the current class. + def declared_metaclass(self): + """Return the explicit declared metaclass for the current class. - An explicit defined metaclass is defined + An explicit declared metaclass is defined either by passing the ``metaclass`` keyword argument in the class definition line (Python 3) or (Python 2) by having a ``__metaclass__`` class attribute, or if there are @@ -1482,18 +1596,18 @@ # Expects this from Py3k TreeRebuilder try: return next(node for node in self._metaclass.infer() - if node is not util.YES) + if node is not util.Uninferable) except (exceptions.InferenceError, StopIteration): return None if six.PY3: return None - if '__metaclass__' in self._locals: - assignment = self._locals['__metaclass__'][-1] + if '__metaclass__' in self.locals: + assignment = self.locals['__metaclass__'][-1] elif self.bases: return None - elif '__metaclass__' in self.root()._locals: - assignments = [ass for ass in self.root()._locals['__metaclass__'] + elif '__metaclass__' in self.root().locals: + assignments = [ass for ass in self.root().locals['__metaclass__'] if ass.lineno < self.lineno] if not assignments: return None @@ -1505,7 +1619,7 @@ inferred = next(assignment.infer()) except exceptions.InferenceError: return - if inferred is util.YES: # don't expose this + if inferred is util.Uninferable: # don't expose this return None return inferred @@ -1514,7 +1628,7 @@ seen = set() seen.add(self) - klass = self._explicit_metaclass() + klass = self.declared_metaclass() if klass is None: for parent in self.ancestors(): if parent not in seen: @@ -1537,7 +1651,7 @@ def _islots(self): """ Return an iterator with the inferred slots. """ - if '__slots__' not in self._locals: + if '__slots__' not in self.locals: return for slots in self.igetattr('__slots__'): # check if __slots__ is a valid type @@ -1545,7 +1659,7 @@ try: slots.getattr(meth) break - except exceptions.NotFoundError: + except exceptions.AttributeInferenceError: continue else: continue @@ -1564,7 +1678,7 @@ values = [item[0] for item in slots.items] else: values = slots.itered() - if values is util.YES: + if values is util.Uninferable: continue if not values: # Stop the iteration, because the class @@ -1574,7 +1688,7 @@ for elt in values: try: for inferred in elt.infer(): - if inferred is util.YES: + if inferred is util.Uninferable: continue if (not isinstance(inferred, node_classes.Const) or not isinstance(inferred.value, @@ -1599,7 +1713,6 @@ if exc.args and exc.args[0] not in ('', None): return exc.args[0] return None - # pylint: disable=unsupported-binary-operation; false positive return [first] + list(slots) # Cached, because inferring them all the time is expensive @@ -1672,22 +1785,15 @@ for base in baseobj.bases: yield base - def mro(self, context=None): - """Get the method resolution order, using C3 linearization. - - It returns the list of ancestors sorted by the mro. - This will raise `NotImplementedError` for old-style classes, since - they don't have the concept of MRO. - """ - if not self.newstyle: - raise NotImplementedError( - "Could not obtain mro for old-style classes.") - - bases = list(self._inferred_bases(context=context)) + def _compute_mro(self, context=None): + inferred_bases = list(self._inferred_bases(context=context)) bases_mro = [] - for base in bases: + for base in inferred_bases: + if base is self: + continue + try: - mro = base.mro(context=context) + mro = base._compute_mro(context=context) bases_mro.append(mro) except NotImplementedError: # Some classes have in their ancestors both newstyle and @@ -1698,19 +1804,29 @@ ancestors = list(base.ancestors(context=context)) bases_mro.append(ancestors) - unmerged_mro = ([[self]] + bases_mro + [bases]) - _verify_duplicates_mro(unmerged_mro) - return _c3_merge(unmerged_mro) - -def get_locals(node): - '''Stub function for forwards compatibility.''' - return node._locals - -def get_attributes(node): - '''Stub function for forwards compatibility.''' - return node._instance_attrs + unmerged_mro = ([[self]] + bases_mro + [inferred_bases]) + _verify_duplicates_mro(unmerged_mro, self, context) + return _c3_merge(unmerged_mro, self, context) + + def mro(self, context=None): + """Get the method resolution order, using C3 linearization. + + It returns the list of ancestors sorted by the mro. + This will raise `NotImplementedError` for old-style classes, since + they don't have the concept of MRO. + """ + + if not self.newstyle: + raise NotImplementedError( + "Could not obtain mro for old-style classes.") + + return self._compute_mro(context=context) + + def bool_value(self): + return True + # Backwards-compatibility aliases -Class = node_classes.proxy_alias('Class', ClassDef) -Function = node_classes.proxy_alias('Function', FunctionDef) -GenExpr = node_classes.proxy_alias('GenExpr', GeneratorExp) +Class = util.proxy_alias('Class', ClassDef) +Function = util.proxy_alias('Function', FunctionDef) +GenExpr = util.proxy_alias('GenExpr', GeneratorExp) diff -Nru astroid-1.4.9/astroid/tests/resources.py astroid-1.5.3/astroid/tests/resources.py --- astroid-1.4.9/astroid/tests/resources.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/resources.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,20 +1,9 @@ -# Copyright 2014 Google, Inc. All rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + import os import sys @@ -23,9 +12,11 @@ from astroid import builder from astroid import MANAGER from astroid.bases import BUILTINS +from astroid import tests -DATA_DIR = 'testdata/python{}/'.format(sys.version_info[0]) +DATA_DIR = os.path.join('testdata', 'python{}'.format(sys.version_info[0])) +RESOURCE_PATH = os.path.join(tests.__path__[0], DATA_DIR, 'data') def find(name): return pkg_resources.resource_filename( diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/clientmodule_test.py astroid-1.5.3/astroid/tests/testdata/python2/data/clientmodule_test.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/clientmodule_test.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/clientmodule_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -""" docstring for file clientmodule.py """ -from data.suppliermodule_test import Interface as IFace, DoNothing - -class Toto: pass - -class Ancestor: - """ Ancestor method """ - __implements__ = (IFace,) - - def __init__(self, value): - local_variable = 0 - self.attr = 'this method shouldn\'t have a docstring' - self.__value = value - - def get_value(self): - """ nice docstring ;-) """ - return self.__value - - def set_value(self, value): - self.__value = value - return 'this method shouldn\'t have a docstring' - -class Specialization(Ancestor): - TYPE = 'final class' - top = 'class' - - def __init__(self, value, _id): - Ancestor.__init__(self, value) - self._id = _id - self.relation = DoNothing() - self.toto = Toto() - diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py astroid-1.5.3/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +var = 42 \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/foogle/fax/a.py astroid-1.5.3/astroid/tests/testdata/python2/data/foogle/fax/a.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/foogle/fax/a.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/foogle/fax/a.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +x = 1 \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/invalid_encoding.py astroid-1.5.3/astroid/tests/testdata/python2/data/invalid_encoding.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/invalid_encoding.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/invalid_encoding.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +# -*- coding: lala -*- \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/namespace_pep_420/module.py astroid-1.5.3/astroid/tests/testdata/python2/data/namespace_pep_420/module.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/namespace_pep_420/module.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/namespace_pep_420/module.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +from namespace_pep_420.submodule import var \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/notall.py astroid-1.5.3/astroid/tests/testdata/python2/data/notall.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/notall.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/notall.py 2017-03-11 13:04:28.000000000 +0000 @@ -3,5 +3,5 @@ other = 'o' class Aaa: pass -def func(): print('yo') +def func(): return 'yo' diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1,2 @@ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1,2 @@ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1,2 @@ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python2/data/suppliermodule_test.py astroid-1.5.3/astroid/tests/testdata/python2/data/suppliermodule_test.py --- astroid-1.4.9/astroid/tests/testdata/python2/data/suppliermodule_test.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python2/data/suppliermodule_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -""" file suppliermodule.py """ - -class NotImplemented(Exception): - pass - -class Interface: - def get_value(self): - raise NotImplemented() - - def set_value(self, value): - raise NotImplemented() - -class DoNothing : pass diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/clientmodule_test.py astroid-1.5.3/astroid/tests/testdata/python3/data/clientmodule_test.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/clientmodule_test.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/clientmodule_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -""" docstring for file clientmodule.py """ -from data.suppliermodule_test import Interface as IFace, DoNothing - -class Toto: pass - -class Ancestor: - """ Ancestor method """ - __implements__ = (IFace,) - - def __init__(self, value): - local_variable = 0 - self.attr = 'this method shouldn\'t have a docstring' - self.__value = value - - def get_value(self): - """ nice docstring ;-) """ - return self.__value - - def set_value(self, value): - self.__value = value - return 'this method shouldn\'t have a docstring' - -class Specialization(Ancestor): - TYPE = 'final class' - top = 'class' - - def __init__(self, value, _id): - Ancestor.__init__(self, value) - self._id = _id - self.relation = DoNothing() - self.toto = Toto() - diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py astroid-1.5.3/astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +var = 42 \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/foogle/fax/a.py astroid-1.5.3/astroid/tests/testdata/python3/data/foogle/fax/a.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/foogle/fax/a.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/foogle/fax/a.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +x = 1 \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/invalid_encoding.py astroid-1.5.3/astroid/tests/testdata/python3/data/invalid_encoding.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/invalid_encoding.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/invalid_encoding.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +# -*- coding: lala -*- \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/module2.py astroid-1.5.3/astroid/tests/testdata/python3/data/module2.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/module2.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/module2.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,3 +1,4 @@ +from __future__ import print_function from data.module import YO, YOUPI import data diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/namespace_pep_420/module.py astroid-1.5.3/astroid/tests/testdata/python3/data/namespace_pep_420/module.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/namespace_pep_420/module.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/namespace_pep_420/module.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +from namespace_pep_420.submodule import var \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1,2 @@ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1,2 @@ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1,2 @@ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) \ No newline at end of file diff -Nru astroid-1.4.9/astroid/tests/testdata/python3/data/suppliermodule_test.py astroid-1.5.3/astroid/tests/testdata/python3/data/suppliermodule_test.py --- astroid-1.4.9/astroid/tests/testdata/python3/data/suppliermodule_test.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/testdata/python3/data/suppliermodule_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -""" file suppliermodule.py """ - -class NotImplemented(Exception): - pass - -class Interface: - def get_value(self): - raise NotImplemented() - - def set_value(self, value): - raise NotImplemented() - -class DoNothing : pass diff -Nru astroid-1.4.9/astroid/tests/unittest_brain.py astroid-1.5.3/astroid/tests/unittest_brain.py --- astroid-1.4.9/astroid/tests/unittest_brain.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_brain.py 2017-06-01 22:07:25.000000000 +0000 @@ -1,51 +1,37 @@ -# Copyright 2013 Google Inc. All Rights Reserved. -# -# This file is part of astroid. -# -# logilab-astng is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# logilab-astng is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with logilab-astng. If not, see . -"""Tests for basic functionality in astroid.brain.""" -import sys -import unittest - -import six - -from astroid import MANAGER -from astroid import bases -from astroid import builder -from astroid import nodes -from astroid import test_utils -from astroid import util -import astroid +# Copyright (c) 2013-2014 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2015 Philip Lorenz +# Copyright (c) 2015 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2015 raylu +# Copyright (c) 2015-2016 Cara Vinson +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER -try: - import nose # pylint: disable=unused-import - HAS_NOSE = True -except ImportError: - HAS_NOSE = False - +"""Tests for basic functionality in astroid.brain.""" try: import multiprocessing # pylint: disable=unused-import HAS_MULTIPROCESSING = True except ImportError: HAS_MULTIPROCESSING = False +import sys +import unittest try: import enum # pylint: disable=unused-import HAS_ENUM = True except ImportError: - HAS_ENUM = False + try: + import enum34 as enum # pylint: disable=unused-import + HAS_ENUM = True + except ImportError: + HAS_ENUM = False + +try: + import nose # pylint: disable=unused-import + HAS_NOSE = True +except ImportError: + HAS_NOSE = False try: import dateutil # pylint: disable=unused-import @@ -60,10 +46,19 @@ HAS_NUMPY = False try: - import pytest # pylint: disable=unused-import + import pytest HAS_PYTEST = True except ImportError: HAS_PYTEST = False +import six + +from astroid import MANAGER +from astroid import bases +from astroid import builder +from astroid import nodes +from astroid import util +from astroid import test_utils +import astroid class HashlibTest(unittest.TestCase): @@ -87,7 +82,7 @@ class NamedTupleTest(unittest.TestCase): def test_namedtuple_base(self): - klass = test_utils.extract_node(""" + klass = builder.extract_node(""" from collections import namedtuple class X(namedtuple("X", ["a", "b", "c"])): @@ -100,7 +95,7 @@ self.assertFalse(anc.parent is None) def test_namedtuple_inference(self): - klass = test_utils.extract_node(""" + klass = builder.extract_node(""" from collections import namedtuple name = "X" @@ -108,56 +103,108 @@ class X(namedtuple(name, fields)): pass """) - for base in klass.ancestors(): - if base.name == 'X': - break - self.assertSetEqual({"a", "b", "c"}, set(base._instance_attrs)) + base = next(base for base in klass.ancestors() + if base.name == 'X') + self.assertSetEqual({"a", "b", "c"}, set(base.instance_attrs)) def test_namedtuple_inference_failure(self): - klass = test_utils.extract_node(""" + klass = builder.extract_node(""" from collections import namedtuple def foo(fields): return __(namedtuple("foo", fields)) """) - self.assertIs(util.YES, next(klass.infer())) + self.assertIs(util.Uninferable, next(klass.infer())) - @unittest.skipIf(sys.version_info[0] > 2, - 'namedtuple inference is broken on Python 3') def test_namedtuple_advanced_inference(self): # urlparse return an object of class ParseResult, which has a # namedtuple call and a mixin as base classes - result = test_utils.extract_node(""" - import urlparse + result = builder.extract_node(""" + import six - result = __(urlparse.urlparse('gopher://')) + result = __(six.moves.urllib.parse.urlparse('gopher://')) """) instance = next(result.infer()) - self.assertEqual(len(instance.getattr('scheme')), 1) - self.assertEqual(len(instance.getattr('port')), 1) - with self.assertRaises(astroid.NotFoundError): + self.assertGreaterEqual(len(instance.getattr('scheme')), 1) + self.assertGreaterEqual(len(instance.getattr('port')), 1) + with self.assertRaises(astroid.AttributeInferenceError): instance.getattr('foo') - self.assertEqual(len(instance.getattr('geturl')), 1) + self.assertGreaterEqual(len(instance.getattr('geturl')), 1) self.assertEqual(instance.name, 'ParseResult') def test_namedtuple_instance_attrs(self): - result = test_utils.extract_node(''' + result = builder.extract_node(''' from collections import namedtuple namedtuple('a', 'a b c')(1, 2, 3) #@ ''') inferred = next(result.infer()) - for name, attr in inferred._instance_attrs.items(): + for name, attr in inferred.instance_attrs.items(): self.assertEqual(attr[0].attrname, name) def test_namedtuple_uninferable_fields(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' x = [A] * 2 from collections import namedtuple l = namedtuple('a', x) l(1) ''') inferred = next(node.infer()) - self.assertIs(util.YES, inferred) + self.assertIs(util.Uninferable, inferred) + + def test_namedtuple_access_class_fields(self): + node = builder.extract_node(""" + from collections import namedtuple + Tuple = namedtuple("Tuple", "field other") + Tuple #@ + """) + inferred = next(node.infer()) + self.assertIn('field', inferred.locals) + self.assertIn('other', inferred.locals) + + def test_namedtuple_rename_keywords(self): + node = builder.extract_node(""" + from collections import namedtuple + Tuple = namedtuple("Tuple", "abc def", rename=True) + Tuple #@ + """) + inferred = next(node.infer()) + self.assertIn('abc', inferred.locals) + self.assertIn('_1', inferred.locals) + + def test_namedtuple_rename_duplicates(self): + node = builder.extract_node(""" + from collections import namedtuple + Tuple = namedtuple("Tuple", "abc abc abc", rename=True) + Tuple #@ + """) + inferred = next(node.infer()) + self.assertIn('abc', inferred.locals) + self.assertIn('_1', inferred.locals) + self.assertIn('_2', inferred.locals) + + def test_namedtuple_rename_uninferable(self): + node = builder.extract_node(""" + from collections import namedtuple + Tuple = namedtuple("Tuple", "a b c", rename=UNINFERABLE) + Tuple #@ + """) + inferred = next(node.infer()) + self.assertIn('a', inferred.locals) + self.assertIn('b', inferred.locals) + self.assertIn('c', inferred.locals) + + +class DefaultDictTest(unittest.TestCase): + + def test_1(self): + node = builder.extract_node(''' + from collections import defaultdict + + X = defaultdict(int) + X[0] + ''') + inferred = next(node.infer()) + self.assertIs(util.Uninferable, inferred) class ModuleExtenderTest(unittest.TestCase): @@ -172,7 +219,7 @@ class NoseBrainTest(unittest.TestCase): def test_nose_tools(self): - methods = test_utils.extract_node(""" + methods = builder.extract_node(""" from nose.tools import assert_equal from nose.tools import assert_equals from nose.tools import assert_true @@ -198,7 +245,7 @@ class SixBrainTest(unittest.TestCase): def test_attribute_access(self): - ast_nodes = test_utils.extract_node(''' + ast_nodes = builder.extract_node(''' import six six.moves.http_client #@ six.moves.urllib_parse #@ @@ -268,7 +315,7 @@ self.assertEqual(urlretrieve.qname(), 'urllib.request.urlretrieve') def test_from_imports(self): - ast_node = test_utils.extract_node(''' + ast_node = builder.extract_node(''' from six.moves import http_client http_client.HTTPSConnection #@ ''') @@ -291,7 +338,7 @@ # Test that module attributes are working, # especially on Python 3.4+, where they are obtained # from a context. - module = test_utils.extract_node(""" + module = builder.extract_node(""" import multiprocessing """) module = module.do_import_module('multiprocessing') @@ -302,7 +349,7 @@ self.assertIsInstance(cpu_count, astroid.BoundMethod) def test_module_name(self): - module = test_utils.extract_node(""" + module = builder.extract_node(""" import multiprocessing multiprocessing.SyncManager() """) @@ -369,6 +416,34 @@ self.assertTrue(manager.getattr('shutdown')) +class ThreadingBrainTest(unittest.TestCase): + def test_lock(self): + self._test_lock_object('Lock') + + def test_rlock(self): + self._test_lock_object('RLock') + + def test_semaphore(self): + self._test_lock_object('Semaphore') + + def test_boundedsemaphore(self): + self._test_lock_object('BoundedSemaphore') + + def _test_lock_object(self, object_name): + lock_instance = builder.extract_node(""" + import threading + threading.{0}() + """.format(object_name)) + inferred = next(lock_instance.infer()) + self.assert_is_valid_lock(inferred) + + def assert_is_valid_lock(self, inferred): + self.assertIsInstance(inferred, astroid.Instance) + self.assertEqual(inferred.root().name, 'threading') + for method in {'acquire', 'release', '__enter__', '__exit__'}: + self.assertIsInstance(next(inferred.igetattr(method)), astroid.BoundMethod) + + @unittest.skipUnless(HAS_ENUM, 'The enum module was only added in Python 3.4. Support for ' 'older Python versions may be available through the enum34 ' @@ -388,8 +463,8 @@ """) - enum = next(module['MyEnum'].infer()) - one = enum['one'] + enumeration = next(module['MyEnum'].infer()) + one = enumeration['one'] self.assertEqual(one.pytype(), '.MyEnum.one') property_type = '{}.property'.format(bases.BUILTINS) @@ -408,8 +483,8 @@ pass test = 42 ''') - enum = module['Enumeration'] - test = next(enum.igetattr('test')) + enumeration = module['Enumeration'] + test = next(enumeration.igetattr('test')) self.assertEqual(test.value, 42) def test_enum_multiple_base_classes(self): @@ -422,8 +497,8 @@ class MyEnum(Mixin, enum.Enum): one = 1 """) - enum = next(module['MyEnum'].infer()) - one = enum['one'] + enumeration = next(module['MyEnum'].infer()) + one = enumeration['one'] clazz = one.getattr('__class__')[0] self.assertTrue(clazz.is_subtype_of('.Mixin'), @@ -437,8 +512,8 @@ one = 1 """) - enum = next(module['MyEnum'].infer()) - one = enum['one'] + enumeration = next(module['MyEnum'].infer()) + one = enumeration['one'] clazz = one.getattr('__class__')[0] int_type = '{}.{}'.format(bases.BUILTINS, 'int') @@ -446,7 +521,7 @@ 'IntEnum based enums should be a subtype of int') def test_enum_func_form_is_class_not_instance(self): - cls, instance = test_utils.extract_node(''' + cls, instance = builder.extract_node(''' from enum import Enum f = Enum('Audience', ['a', 'b', 'c']) f #@ @@ -475,7 +550,7 @@ class NumpyBrainTest(unittest.TestCase): def test_numpy(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' import numpy numpy.ones #@ ''') @@ -487,7 +562,7 @@ class PytestBrainTest(unittest.TestCase): def test_pytest(self): - ast_node = test_utils.extract_node(''' + ast_node = builder.extract_node(''' import pytest pytest #@ ''') @@ -502,5 +577,122 @@ self.assertIn(attr, module) +class IOBrainTest(unittest.TestCase): + + @unittest.skipUnless(six.PY3, 'Needs Python 3 io model') + def test_sys_streams(self): + for name in {'stdout', 'stderr', 'stdin'}: + node = astroid.extract_node(''' + import sys + sys.{} + '''.format(name)) + inferred = next(node.infer()) + buffer_attr = next(inferred.igetattr('buffer')) + self.assertIsInstance(buffer_attr, astroid.Instance) + self.assertEqual(buffer_attr.name, 'BufferedWriter') + raw = next(buffer_attr.igetattr('raw')) + self.assertIsInstance(raw, astroid.Instance) + self.assertEqual(raw.name, 'FileIO') + + +@test_utils.require_version('3.6') +class TypingBrain(unittest.TestCase): + + def test_namedtuple_base(self): + klass = builder.extract_node(""" + from typing import NamedTuple + + class X(NamedTuple("X", [("a", int), ("b", str), ("c", bytes)])): + pass + """) + self.assertEqual( + [anc.name for anc in klass.ancestors()], + ['X', 'tuple', 'object']) + for anc in klass.ancestors(): + self.assertFalse(anc.parent is None) + + def test_namedtuple_inference(self): + klass = builder.extract_node(""" + from typing import NamedTuple + + class X(NamedTuple("X", [("a", int), ("b", str), ("c", bytes)])): + pass + """) + base = next(base for base in klass.ancestors() + if base.name == 'X') + self.assertSetEqual({"a", "b", "c"}, set(base.instance_attrs)) + + def test_namedtuple_inference_nonliteral(self): + # Note: NamedTuples in mypy only work with literals. + klass = builder.extract_node(""" + from typing import NamedTuple + + name = "X" + fields = [("a", int), ("b", str), ("c", bytes)] + NamedTuple(name, fields) + """) + inferred = next(klass.infer()) + self.assertIsInstance(inferred, astroid.Instance) + self.assertEqual(inferred.qname(), "typing.NamedTuple") + + def test_namedtuple_instance_attrs(self): + result = builder.extract_node(''' + from typing import NamedTuple + NamedTuple("A", [("a", int), ("b", str), ("c", bytes)])(1, 2, 3) #@ + ''') + inferred = next(result.infer()) + for name, attr in inferred.instance_attrs.items(): + self.assertEqual(attr[0].attrname, name) + + def test_namedtuple_simple(self): + result = builder.extract_node(''' + from typing import NamedTuple + NamedTuple("A", [("a", int), ("b", str), ("c", bytes)]) + ''') + inferred = next(result.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertSetEqual({"a", "b", "c"}, set(inferred.instance_attrs)) + + def test_namedtuple_few_args(self): + result = builder.extract_node(''' + from typing import NamedTuple + NamedTuple("A") + ''') + inferred = next(result.infer()) + self.assertIsInstance(inferred, astroid.Instance) + self.assertEqual(inferred.qname(), "typing.NamedTuple") + + def test_namedtuple_few_fields(self): + result = builder.extract_node(''' + from typing import NamedTuple + NamedTuple("A", [("a",), ("b", str), ("c", bytes)]) + ''') + inferred = next(result.infer()) + self.assertIsInstance(inferred, astroid.Instance) + self.assertEqual(inferred.qname(), "typing.NamedTuple") + + def test_namedtuple_class_form(self): + result = builder.extract_node(''' + from typing import NamedTuple + + class Example(NamedTuple): + mything: int + + Example(mything=1) + ''') + inferred = next(result.infer()) + self.assertIsInstance(inferred, astroid.Instance) + + +class ReBrainTest(unittest.TestCase): + def test_regex_flags(self): + import re + names = [name for name in dir(re) if name.isupper()] + re_ast = MANAGER.ast_from_module_name('re') + for name in names: + self.assertIn(name, re_ast) + self.assertEqual(next(re_ast[name].infer()).value, getattr(re, name)) + + if __name__ == '__main__': unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_builder.py astroid-1.5.3/astroid/tests/unittest_builder.py --- astroid-1.4.9/astroid/tests/unittest_builder.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_builder.py 2017-04-12 05:57:16.000000000 +0000 @@ -1,20 +1,11 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014-2015 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """tests for the astroid builder and rebuilder module""" import os @@ -120,7 +111,8 @@ print (arg) ''', __name__) function = astroid['function'] - self.assertEqual(function.fromlineno, 3) # XXX discussable, but that's what is expected by pylint right now + # XXX discussable, but that's what is expected by pylint right now + self.assertEqual(function.fromlineno, 3) self.assertEqual(function.tolineno, 5) self.assertEqual(function.decorators.fromlineno, 2) self.assertEqual(function.decorators.tolineno, 2) @@ -255,11 +247,11 @@ self.builder = builder.AstroidBuilder() def test_data_build_null_bytes(self): - with self.assertRaises(exceptions.AstroidBuildingException): + with self.assertRaises(exceptions.AstroidSyntaxError): self.builder.string_build('\x00') def test_data_build_invalid_x_escape(self): - with self.assertRaises(exceptions.AstroidBuildingException): + with self.assertRaises(exceptions.AstroidSyntaxError): self.builder.string_build('"\\x1"') def test_missing_newline(self): @@ -267,7 +259,7 @@ resources.build_file('data/noendingnewline.py') def test_missing_file(self): - with self.assertRaises(exceptions.AstroidBuildingException): + with self.assertRaises(exceptions.AstroidBuildingError): resources.build_file('data/inexistant.py') def test_inspect_build0(self): @@ -331,8 +323,8 @@ @test_utils.require_version(maxver='3.0') def test_inspect_build_instance(self): """test astroid tree build from a living object""" - import exceptions - builtin_ast = self.builder.inspect_build(exceptions) + import exceptions as builtin_exceptions + builtin_ast = self.builder.inspect_build(builtin_exceptions) fclass = builtin_ast['OSError'] # things like OSError.strerror are now (2.5) data descriptors on the # class instead of entries in the __dict__ of an instance @@ -378,6 +370,9 @@ datap = resources.build_file('data/__init__.py', 'data.__init__') self.assertEqual(datap.name, 'data') self.assertEqual(datap.package, 1) + datap = resources.build_file('data/tmp__init__.py', 'data.tmp__init__') + self.assertEqual(datap.name, 'data.tmp__init__') + self.assertEqual(datap.package, 0) def test_yield_parent(self): """check if we added discard nodes as yield parent (w/ compiler)""" @@ -387,7 +382,7 @@ if noe: yield more """ - func = test_utils.extract_node(code) + func = builder.extract_node(code) self.assertIsInstance(func, nodes.FunctionDef) stmt = func.body[0] self.assertIsInstance(stmt, nodes.Expr) @@ -451,7 +446,7 @@ self.assertIsInstance(astroid.getattr('CSTE')[0], nodes.AssignName) self.assertEqual(astroid.getattr('CSTE')[0].fromlineno, 2) self.assertEqual(astroid.getattr('CSTE')[1].fromlineno, 6) - with self.assertRaises(exceptions.NotFoundError): + with self.assertRaises(exceptions.AttributeInferenceError): astroid.getattr('CSTE2') with self.assertRaises(exceptions.InferenceError): next(astroid['global_no_effect'].ilookup('CSTE2')) @@ -482,22 +477,22 @@ n = test_utils.get_name_node(astroid, 'n') self.assertIsNot(n.scope(), astroid) self.assertEqual([i.__class__ for i in n.infer()], - [util.YES.__class__]) + [util.Uninferable.__class__]) def test_no_future_imports(self): mod = builder.parse("import sys") - self.assertEqual(set(), mod._future_imports) + self.assertEqual(set(), mod.future_imports) def test_future_imports(self): mod = builder.parse("from __future__ import print_function") - self.assertEqual(set(['print_function']), mod._future_imports) + self.assertEqual(set(['print_function']), mod.future_imports) def test_two_future_imports(self): mod = builder.parse(""" from __future__ import print_function from __future__ import absolute_import """) - self.assertEqual(set(['print_function', 'absolute_import']), mod._future_imports) + self.assertEqual(set(['print_function', 'absolute_import']), mod.future_imports) def test_inferred_build(self): code = ''' @@ -512,8 +507,8 @@ lclass = list(astroid.igetattr('A')) self.assertEqual(len(lclass), 1) lclass = lclass[0] - self.assertIn('assign_type', lclass._locals) - self.assertIn('type', lclass._locals) + self.assertIn('assign_type', lclass.locals) + self.assertIn('type', lclass.locals) def test_augassign_attr(self): builder.parse(""" @@ -534,22 +529,23 @@ ''' builder.parse(code) nonetype = nodes.const_factory(None) - self.assertNotIn('custom_attr', nonetype._locals) - self.assertNotIn('custom_attr', nonetype._instance_attrs) + # pylint: disable=no-member; union type in const_factory, this shouldn't happen + self.assertNotIn('custom_attr', nonetype.locals) + self.assertNotIn('custom_attr', nonetype.instance_attrs) nonetype = nodes.const_factory({}) - self.assertNotIn('custom_attr', nonetype._locals) - self.assertNotIn('custom_attr', nonetype._instance_attrs) + self.assertNotIn('custom_attr', nonetype.locals) + self.assertNotIn('custom_attr', nonetype.instance_attrs) def test_asstuple(self): code = 'a, b = range(2)' astroid = builder.parse(code) - self.assertIn('b', astroid._locals) + self.assertIn('b', astroid.locals) code = ''' def visit_if(self, node): node.test, body = node.tests[0] ''' astroid = builder.parse(code) - self.assertIn('body', astroid['visit_if']._locals) + self.assertIn('body', astroid['visit_if'].locals) def test_build_constants(self): '''test expected values of constants after rebuilding''' @@ -568,7 +564,7 @@ self.assertEqual(chain.value, 'None') def test_not_implemented(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' NotImplemented #@ ''') inferred = next(node.infer()) @@ -589,7 +585,7 @@ self.assertIsNone(module.parent) self.assertEqual(module.frame(), module) self.assertEqual(module.root(), module) - self.assertEqual(module.source_file, os.path.abspath(resources.find('data/module.py'))) + self.assertEqual(module.file, os.path.abspath(resources.find('data/module.py'))) self.assertEqual(module.pure_python, 1) self.assertEqual(module.package, 0) self.assertFalse(module.is_statement) @@ -599,8 +595,8 @@ def test_module_locals(self): """test the 'locals' dictionary of a astroid module""" module = self.module - _locals = module._locals - self.assertIs(_locals, module._globals) + _locals = module.locals + self.assertIs(_locals, module.globals) keys = sorted(_locals.keys()) should = ['MY_DICT', 'NameNode', 'YO', 'YOUPI', '__revision__', 'global_access', 'modutils', 'four_args', @@ -624,7 +620,7 @@ def test_function_locals(self): """test the 'locals' dictionary of a astroid function""" - _locals = self.module['global_access']._locals + _locals = self.module['global_access'].locals self.assertEqual(len(_locals), 4) keys = sorted(_locals.keys()) self.assertEqual(keys, ['i', 'key', 'local', 'val']) @@ -650,11 +646,11 @@ """test the 'locals' dictionary of a astroid class""" module = self.module klass1 = module['YO'] - locals1 = klass1._locals + locals1 = klass1.locals keys = sorted(locals1.keys()) self.assertEqual(keys, ['__init__', 'a']) klass2 = module['YOUPI'] - locals2 = klass2._locals + locals2 = klass2.locals keys = locals2.keys() self.assertEqual(sorted(keys), ['__init__', 'class_attr', 'class_method', @@ -664,8 +660,8 @@ module = self.module klass1 = module['YO'] klass2 = module['YOUPI'] - self.assertEqual(list(klass1._instance_attrs.keys()), ['yo']) - self.assertEqual(list(klass2._instance_attrs.keys()), ['member']) + self.assertEqual(list(klass1.instance_attrs.keys()), ['yo']) + self.assertEqual(list(klass2.instance_attrs.keys()), ['member']) def test_class_basenames(self): module = self.module @@ -696,7 +692,7 @@ def test_method_locals(self): """test the 'locals' dictionary of a astroid method""" method = self.module['YOUPI']['method'] - _locals = method._locals + _locals = method.locals keys = sorted(_locals) if sys.version_info < (3, 0): self.assertEqual(len(_locals), 5) @@ -705,6 +701,10 @@ self.assertEqual(len(_locals), 3) self.assertEqual(keys, ['autre', 'local', 'self']) + def test_unknown_encoding(self): + with self.assertRaises(exceptions.AstroidSyntaxError): + resources.build_file('data/invalid_encoding.py') + class ModuleBuildTest(resources.SysPathSetup, FileBuildTest): @@ -753,13 +753,13 @@ self.assertIsNone(e) def test_wrong_coding(self): - # setting "coding" varaible + # setting "coding" variable e = self.guess_encoding("coding = UTF-8") self.assertIsNone(e) - # setting a dictionnary entry + # setting a dictionary entry e = self.guess_encoding("coding:UTF-8") self.assertIsNone(e) - # setting an arguement + # setting an argument e = self.guess_encoding("def do_something(a_word_with_coding=None):") self.assertIsNone(e) diff -Nru astroid-1.4.9/astroid/tests/unittest_helpers.py astroid-1.5.3/astroid/tests/unittest_helpers.py --- astroid-1.4.9/astroid/tests/unittest_helpers.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_helpers.py 2017-03-11 13:04:28.000000000 +0000 @@ -0,0 +1,256 @@ +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + + +import unittest + +import six +from six.moves import builtins + +from astroid import builder +from astroid import exceptions +from astroid import helpers +from astroid import manager +from astroid import raw_building +from astroid import test_utils +from astroid import util + + +class TestHelpers(unittest.TestCase): + + def setUp(self): + builtins_name = builtins.__name__ + astroid_manager = manager.AstroidManager() + self.builtins = astroid_manager.astroid_cache[builtins_name] + self.manager = manager.AstroidManager() + + def _extract(self, obj_name): + return self.builtins.getattr(obj_name)[0] + + def _build_custom_builtin(self, obj_name): + proxy = raw_building.build_class(obj_name) + proxy.parent = self.builtins + return proxy + + def assert_classes_equal(self, cls, other): + self.assertEqual(cls.name, other.name) + self.assertEqual(cls.parent, other.parent) + self.assertEqual(cls.qname(), other.qname()) + + def test_object_type(self): + pairs = [ + ('1', self._extract('int')), + ('[]', self._extract('list')), + ('{1, 2, 3}', self._extract('set')), + ('{1:2, 4:3}', self._extract('dict')), + ('type', self._extract('type')), + ('object', self._extract('type')), + ('object()', self._extract('object')), + ('lambda: None', self._build_custom_builtin('function')), + ('len', self._build_custom_builtin('builtin_function_or_method')), + ('None', self._build_custom_builtin('NoneType')), + ('import sys\nsys#@', self._build_custom_builtin('module')), + ] + for code, expected in pairs: + node = builder.extract_node(code) + objtype = helpers.object_type(node) + self.assert_classes_equal(objtype, expected) + + def test_object_type_classes_and_functions(self): + ast_nodes = builder.extract_node(''' + def generator(): + yield + + class A(object): + def test(self): + self #@ + @classmethod + def cls_method(cls): pass + @staticmethod + def static_method(): pass + A #@ + A() #@ + A.test #@ + A().test #@ + A.cls_method #@ + A().cls_method #@ + A.static_method #@ + A().static_method #@ + generator() #@ + ''') + from_self = helpers.object_type(ast_nodes[0]) + cls = next(ast_nodes[1].infer()) + self.assert_classes_equal(from_self, cls) + + cls_type = helpers.object_type(ast_nodes[1]) + self.assert_classes_equal(cls_type, self._extract('type')) + + instance_type = helpers.object_type(ast_nodes[2]) + cls = next(ast_nodes[2].infer())._proxied + self.assert_classes_equal(instance_type, cls) + + expected_method_types = [ + (ast_nodes[3], 'instancemethod' if six.PY2 else 'function'), + (ast_nodes[4], 'instancemethod' if six.PY2 else 'method'), + (ast_nodes[5], 'instancemethod' if six.PY2 else 'method'), + (ast_nodes[6], 'instancemethod' if six.PY2 else 'method'), + (ast_nodes[7], 'function'), + (ast_nodes[8], 'function'), + (ast_nodes[9], 'generator'), + ] + for node, expected in expected_method_types: + node_type = helpers.object_type(node) + expected_type = self._build_custom_builtin(expected) + self.assert_classes_equal(node_type, expected_type) + + @test_utils.require_version(minver='3.0') + def test_object_type_metaclasses(self): + module = builder.parse(''' + import abc + class Meta(metaclass=abc.ABCMeta): + pass + meta_instance = Meta() + ''') + meta_type = helpers.object_type(module['Meta']) + self.assert_classes_equal(meta_type, module['Meta'].metaclass()) + + meta_instance = next(module['meta_instance'].infer()) + instance_type = helpers.object_type(meta_instance) + self.assert_classes_equal(instance_type, module['Meta']) + + @test_utils.require_version(minver='3.0') + def test_object_type_most_derived(self): + node = builder.extract_node(''' + class A(type): + def __new__(*args, **kwargs): + return type.__new__(*args, **kwargs) + class B(object): pass + class C(object, metaclass=A): pass + + # The most derived metaclass of D is A rather than type. + class D(B , C): #@ + pass + ''') + metaclass = node.metaclass() + self.assertEqual(metaclass.name, 'A') + obj_type = helpers.object_type(node) + self.assertEqual(metaclass, obj_type) + + def test_inference_errors(self): + node = builder.extract_node(''' + from unknown import Unknown + u = Unknown #@ + ''') + self.assertEqual(helpers.object_type(node), util.Uninferable) + + def test_object_type_too_many_types(self): + node = builder.extract_node(''' + from unknown import Unknown + def test(x): + if x: + return lambda: None + else: + return 1 + test(Unknown) #@ + ''') + self.assertEqual(helpers.object_type(node), util.Uninferable) + + def test_is_subtype(self): + ast_nodes = builder.extract_node(''' + class int_subclass(int): + pass + class A(object): pass #@ + class B(A): pass #@ + class C(A): pass #@ + int_subclass() #@ + ''') + cls_a = ast_nodes[0] + cls_b = ast_nodes[1] + cls_c = ast_nodes[2] + int_subclass = ast_nodes[3] + int_subclass = helpers.object_type(next(int_subclass.infer())) + base_int = self._extract('int') + self.assertTrue(helpers.is_subtype(int_subclass, base_int)) + self.assertTrue(helpers.is_supertype(base_int, int_subclass)) + + self.assertTrue(helpers.is_supertype(cls_a, cls_b)) + self.assertTrue(helpers.is_supertype(cls_a, cls_c)) + self.assertTrue(helpers.is_subtype(cls_b, cls_a)) + self.assertTrue(helpers.is_subtype(cls_c, cls_a)) + self.assertFalse(helpers.is_subtype(cls_a, cls_b)) + self.assertFalse(helpers.is_subtype(cls_a, cls_b)) + + @test_utils.require_version(maxver='3.0') + def test_is_subtype_supertype_old_style_classes(self): + cls_a, cls_b = builder.extract_node(''' + class A: #@ + pass + class B(A): #@ + pass + ''') + self.assertFalse(helpers.is_subtype(cls_a, cls_b)) + self.assertFalse(helpers.is_subtype(cls_b, cls_a)) + self.assertFalse(helpers.is_supertype(cls_a, cls_b)) + self.assertFalse(helpers.is_supertype(cls_b, cls_a)) + + def test_is_subtype_supertype_mro_error(self): + cls_e, cls_f = builder.extract_node(''' + class A(object): pass + class B(A): pass + class C(A): pass + class D(B, C): pass + class E(C, B): pass #@ + class F(D, E): pass #@ + ''') + self.assertFalse(helpers.is_subtype(cls_e, cls_f)) + + self.assertFalse(helpers.is_subtype(cls_e, cls_f)) + with self.assertRaises(exceptions._NonDeducibleTypeHierarchy): + helpers.is_subtype(cls_f, cls_e) + self.assertFalse(helpers.is_supertype(cls_f, cls_e)) + + def test_is_subtype_supertype_unknown_bases(self): + cls_a, cls_b = builder.extract_node(''' + from unknown import Unknown + class A(Unknown): pass #@ + class B(A): pass #@ + ''') + with self.assertRaises(exceptions._NonDeducibleTypeHierarchy): + helpers.is_subtype(cls_a, cls_b) + with self.assertRaises(exceptions._NonDeducibleTypeHierarchy): + helpers.is_supertype(cls_a, cls_b) + + def test_is_subtype_supertype_unrelated_classes(self): + cls_a, cls_b = builder.extract_node(''' + class A(object): pass #@ + class B(object): pass #@ + ''') + self.assertFalse(helpers.is_subtype(cls_a, cls_b)) + self.assertFalse(helpers.is_subtype(cls_b, cls_a)) + self.assertFalse(helpers.is_supertype(cls_a, cls_b)) + self.assertFalse(helpers.is_supertype(cls_b, cls_a)) + + def test_is_subtype_supertype_classes_no_type_ancestor(self): + cls_a = builder.extract_node(''' + class A(object): #@ + pass + ''') + builtin_type = self._extract('type') + self.assertFalse(helpers.is_supertype(builtin_type, cls_a)) + self.assertFalse(helpers.is_subtype(cls_a, builtin_type)) + + def test_is_subtype_supertype_classes_metaclasses(self): + cls_a = builder.extract_node(''' + class A(type): #@ + pass + ''') + builtin_type = self._extract('type') + self.assertTrue(helpers.is_supertype(builtin_type, cls_a)) + self.assertTrue(helpers.is_subtype(cls_a, builtin_type)) + + +if __name__ == '__main__': + unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_inference.py astroid-1.5.3/astroid/tests/unittest_inference.py --- astroid-1.4.9/astroid/tests/unittest_inference.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_inference.py 2017-06-01 22:07:25.000000000 +0000 @@ -1,22 +1,17 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2013-2014 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015 Dmitry Pribysh +# Copyright (c) 2015 Rene Zhang + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """tests for the astroid inference capabilities """ +# pylint: disable=too-many-lines +import os import sys from functools import partial import unittest @@ -25,11 +20,14 @@ import six from astroid import InferenceError, builder, nodes -from astroid.builder import parse +from astroid.builder import parse, extract_node from astroid.inference import infer_end as inference_infer_end from astroid.bases import Instance, BoundMethod, UnboundMethod,\ - path_wrapper, BUILTINS + BUILTINS from astroid import arguments +from astroid import decorators as decoratorsmod +from astroid import exceptions +from astroid import helpers from astroid import objects from astroid import test_utils from astroid import util @@ -43,8 +41,10 @@ if sys.version_info < (3, 0): EXC_MODULE = 'exceptions' + BOOL_SPECIAL_METHOD = '__nonzero__' else: EXC_MODULE = BUILTINS + BOOL_SPECIAL_METHOD = '__bool__' class InferenceUtilsTest(unittest.TestCase): @@ -52,8 +52,8 @@ def test_path_wrapper(self): def infer_default(self, *args): raise InferenceError - infer_default = path_wrapper(infer_default) - infer_end = path_wrapper(inference_infer_end) + infer_default = decoratorsmod.path_wrapper(infer_default) + infer_end = decoratorsmod.path_wrapper(inference_infer_end) with self.assertRaises(InferenceError): next(infer_default(1)) self.assertEqual(next(infer_end(1)), 1) @@ -235,12 +235,12 @@ self.assertRaises(StopIteration, partial(next, inferred)) def test_swap_assign_inference(self): - inferred = self.ast._locals['a'][1].infer() + inferred = self.ast.locals['a'][1].infer() const = next(inferred) self.assertIsInstance(const, nodes.Const) self.assertEqual(const.value, 1) self.assertRaises(StopIteration, partial(next, inferred)) - inferred = self.ast._locals['b'][1].infer() + inferred = self.ast.locals['b'][1].infer() exc = next(inferred) self.assertIsInstance(exc, Instance) self.assertEqual(exc.name, 'Exception') @@ -307,7 +307,7 @@ self.assertIsInstance(obj1, nodes.Const) self.assertEqual(obj1.value, 0) obj1 = next(inferred) - self.assertIs(obj1, util.YES, obj1) + self.assertIs(obj1, util.Uninferable, obj1) self.assertRaises(StopIteration, partial(next, inferred)) def test_args_default_inference2(self): @@ -316,13 +316,13 @@ self.assertIsInstance(obj1, nodes.Const) self.assertEqual(obj1.value, 4) obj1 = next(inferred) - self.assertIs(obj1, util.YES, obj1) + self.assertIs(obj1, util.Uninferable, obj1) self.assertRaises(StopIteration, partial(next, inferred)) def test_inference_restrictions(self): inferred = test_utils.get_name_node(self.ast['C']['meth1'], 'arg1').infer() obj1 = next(inferred) - self.assertIs(obj1, util.YES, obj1) + self.assertIs(obj1, util.Uninferable, obj1) self.assertRaises(StopIteration, partial(next, inferred)) def test_ancestors_inference(self): @@ -333,7 +333,7 @@ class A(A): #@ pass ''' - a1, a2 = test_utils.extract_node(code, __name__) + a1, a2 = extract_node(code, __name__) a2_ancestors = list(a2.ancestors()) self.assertEqual(len(a2_ancestors), 2) self.assertIs(a2_ancestors[0], a1) @@ -349,7 +349,7 @@ class A(B): #@ pass ''' - a1, b, a2 = test_utils.extract_node(code, __name__) + a1, b, a2 = extract_node(code, __name__) a2_ancestors = list(a2.ancestors()) self.assertEqual(len(a2_ancestors), 3) self.assertIs(a2_ancestors[0], b) @@ -378,9 +378,9 @@ ast = parse(code, __name__) a = ast['a'] - warnings.simplefilter('always') with warnings.catch_warnings(record=True) as w: - a.infered() + with test_utils.enable_warning(PendingDeprecationWarning): + a.infered() self.assertIsInstance(w[0].message, PendingDeprecationWarning) def test_exc_ancestors(self): @@ -388,14 +388,17 @@ def f(): raise __(NotImplementedError) ''' - error = test_utils.extract_node(code, __name__) + error = extract_node(code, __name__) nie = error.inferred()[0] self.assertIsInstance(nie, nodes.ClassDef) nie_ancestors = [c.name for c in nie.ancestors()] if sys.version_info < (3, 0): - self.assertEqual(nie_ancestors, ['RuntimeError', 'StandardError', 'Exception', 'BaseException', 'object']) + expected = ['RuntimeError', 'StandardError', + 'Exception', 'BaseException', 'object'] + self.assertEqual(nie_ancestors, expected) else: - self.assertEqual(nie_ancestors, ['RuntimeError', 'Exception', 'BaseException', 'object']) + expected = ['RuntimeError', 'Exception', 'BaseException', 'object'] + self.assertEqual(nie_ancestors, expected) def test_except_inference(self): code = ''' @@ -425,7 +428,7 @@ code = ''' del undefined_attr ''' - delete = test_utils.extract_node(code, __name__) + delete = extract_node(code, __name__) self.assertRaises(InferenceError, delete.infer) def test_del2(self): @@ -467,15 +470,15 @@ inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.List) self.assertIsInstance(inferred, Instance) - self.assertEqual(inferred.getitem(0).value, 1) + self.assertEqual(inferred.getitem(nodes.Const(0)).value, 1) self.assertIsInstance(inferred._proxied, nodes.ClassDef) self.assertEqual(inferred._proxied.name, 'list') - self.assertIn('append', inferred._proxied._locals) + self.assertIn('append', inferred._proxied.locals) n = ast['t'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.Tuple) self.assertIsInstance(inferred, Instance) - self.assertEqual(inferred.getitem(0).value, 2) + self.assertEqual(inferred.getitem(nodes.Const(0)).value, 2) self.assertIsInstance(inferred._proxied, nodes.ClassDef) self.assertEqual(inferred._proxied.name, 'tuple') n = ast['d'] @@ -484,16 +487,16 @@ self.assertIsInstance(inferred, Instance) self.assertIsInstance(inferred._proxied, nodes.ClassDef) self.assertEqual(inferred._proxied.name, 'dict') - self.assertIn('get', inferred._proxied._locals) + self.assertIn('get', inferred._proxied.locals) n = ast['s'] inferred = next(n.infer()) self.assertIsInstance(inferred, nodes.Const) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'str') - self.assertIn('lower', inferred._proxied._locals) + self.assertIn('lower', inferred._proxied.locals) n = ast['s2'] inferred = next(n.infer()) - self.assertEqual(inferred.getitem(0).value, '_') + self.assertEqual(inferred.getitem(nodes.Const(0)).value, '_') code = 's = {1}' ast = parse(code, __name__) @@ -502,7 +505,7 @@ self.assertIsInstance(inferred, nodes.Set) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'set') - self.assertIn('remove', inferred._proxied._locals) + self.assertIn('remove', inferred._proxied.locals) @test_utils.require_version(maxver='3.0') def test_unicode_type(self): @@ -513,7 +516,7 @@ self.assertIsInstance(inferred, nodes.Const) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.name, 'unicode') - self.assertIn('lower', inferred._proxied._locals) + self.assertIn('lower', inferred._proxied.locals) @unittest.expectedFailure def test_descriptor_are_callable(self): @@ -558,7 +561,7 @@ ast = parse(code, __name__) xxx = ast['xxx'] self.assertSetEqual({n.__class__ for n in xxx.inferred()}, - {nodes.Const, util.YES.__class__}) + {nodes.Const, util.Uninferable.__class__}) def test_method_argument(self): code = ''' @@ -574,13 +577,13 @@ ast = parse(code, __name__) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'e_type') self.assertEqual([n.__class__ for n in arg.infer()], - [util.YES.__class__]) + [util.Uninferable.__class__]) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['__init__'], 'kwargs') self.assertEqual([n.__class__ for n in arg.infer()], [nodes.Dict]) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'e_type') self.assertEqual([n.__class__ for n in arg.infer()], - [util.YES.__class__]) + [util.Uninferable.__class__]) arg = test_utils.get_name_node(ast['ErudiEntitySchema']['meth'], 'args') self.assertEqual([n.__class__ for n in arg.infer()], [nodes.Tuple]) @@ -594,7 +597,7 @@ tags = list(tags) __(tags).append(vid) ''' - name = test_utils.extract_node(code, __name__) + name = extract_node(code, __name__) it = name.infer() tags = next(it) self.assertIsInstance(tags, nodes.List) @@ -639,9 +642,10 @@ ast = parse(code, __name__) self.assertEqual(len(list(ast['process_line'].infer_call_result(None))), 3) self.assertEqual(len(list(ast['tupletest'].infer())), 3) - values = ['FunctionDef(first_word)', 'FunctionDef(last_word)', 'Const(NoneType)'] - self.assertEqual([str(inferred) - for inferred in ast['fct'].infer()], values) + values = ['= (3, 0): - self.skipTest(' module has been removed') - inferred = list(ast.igetattr('make_code')) - self.assertEqual(len(inferred), 1) - self.assertIsInstance(inferred[0], Instance) - self.assertEqual(str(inferred[0]), - 'Instance of %s.type' % BUILTINS) def _test_const_inferred(self, node, value): inferred = list(node.infer()) @@ -883,14 +885,32 @@ def test_unary_not(self): for code in ('a = not (1,); b = not ()', - 'a = not {1:2}; b = not {}'): + 'a = not {1:2}; b = not {}', + 'a = not [1, 2]; b = not []', + 'a = not {1, 2}; b = not set()', + 'a = not 1; b = not 0', + 'a = not "a"; b = not ""', + 'a = not b"a"; b = not b""'): ast = builder.string_build(code, __name__, __file__) self._test_const_inferred(ast['a'], False) self._test_const_inferred(ast['b'], True) + def test_unary_op_numbers(self): + ast_nodes = extract_node(''' + +1 #@ + -1 #@ + ~1 #@ + +2.0 #@ + -2.0 #@ + ''') + expected = [1, -1, -2, 2.0, -2.0] + for node, expected_value in zip(ast_nodes, expected): + inferred = next(node.infer()) + self.assertEqual(inferred.value, expected_value) + @test_utils.require_version(minver='3.5') def test_matmul(self): - node = test_utils.extract_node(''' + node = extract_node(''' class Array: def __matmul__(self, other): return 42 @@ -916,26 +936,65 @@ ast = builder.string_build('a = "*" * 40', __name__, __file__) self._test_const_inferred(ast['a'], "*" * 40) - def test_binary_op_bitand(self): + def test_binary_op_int_bitand(self): ast = builder.string_build('a = 23&20', __name__, __file__) self._test_const_inferred(ast['a'], 23&20) - def test_binary_op_bitor(self): + def test_binary_op_int_bitor(self): ast = builder.string_build('a = 23|8', __name__, __file__) self._test_const_inferred(ast['a'], 23|8) - def test_binary_op_bitxor(self): + def test_binary_op_int_bitxor(self): ast = builder.string_build('a = 23^9', __name__, __file__) self._test_const_inferred(ast['a'], 23^9) - def test_binary_op_shiftright(self): + def test_binary_op_int_shiftright(self): ast = builder.string_build('a = 23 >>1', __name__, __file__) self._test_const_inferred(ast['a'], 23>>1) - def test_binary_op_shiftleft(self): + def test_binary_op_int_shiftleft(self): ast = builder.string_build('a = 23 <<1', __name__, __file__) self._test_const_inferred(ast['a'], 23<<1) + def test_binary_op_other_type(self): + ast_nodes = extract_node(''' + class A: + def __add__(self, other): + return other + 42 + A() + 1 #@ + 1 + A() #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.Const) + self.assertEqual(first.value, 43) + + second = next(ast_nodes[1].infer()) + self.assertEqual(second, util.Uninferable) + + def test_binary_op_other_type_using_reflected_operands(self): + ast_nodes = extract_node(''' + class A(object): + def __radd__(self, other): + return other + 42 + A() + 1 #@ + 1 + A() #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertEqual(first, util.Uninferable) + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, nodes.Const) + self.assertEqual(second.value, 43) + + def test_binary_op_reflected_and_not_implemented_is_type_error(self): + ast_node = extract_node(''' + class A(object): + def __radd__(self, other): return NotImplemented + + 1 + A() #@ + ''') + first = next(ast_node.infer()) + self.assertEqual(first, util.Uninferable) def test_binary_op_list_mul(self): for code in ('a = [[]] * 2', 'a = 2 * [[]]'): @@ -952,10 +1011,10 @@ ast = builder.string_build('a = [1] * None\nb = [1] * "r"') inferred = ast['a'].inferred() self.assertEqual(len(inferred), 1) - self.assertEqual(inferred[0], util.YES) + self.assertEqual(inferred[0], util.Uninferable) inferred = ast['b'].inferred() self.assertEqual(len(inferred), 1) - self.assertEqual(inferred[0], util.YES) + self.assertEqual(inferred[0], util.Uninferable) def test_binary_op_list_mul_int(self): 'test correct handling on list multiplied by int when there are more than one' @@ -970,6 +1029,18 @@ self.assertIsInstance(listval, nodes.List) self.assertEqual(len(listval.itered()), 4) + def test_binary_op_on_self(self): + 'test correct handling of applying binary operator to self' + code = ''' + import sys + sys.path = ['foo'] + sys.path + sys.path.insert(0, 'bar') + path = sys.path + ''' + ast = parse(code, __name__) + inferred = ast['path'].inferred() + self.assertIsInstance(inferred[0], nodes.List) + def test_binary_op_tuple_add(self): ast = builder.string_build('a = (1,) + (2,)', __name__, __file__) inferred = list(ast['a'].infer()) @@ -1012,10 +1083,10 @@ def f(g = lambda: None): __(g()).x ''' - callfuncnode = test_utils.extract_node(code) + callfuncnode = extract_node(code) inferred = list(callfuncnode.infer()) self.assertEqual(len(inferred), 2, inferred) - inferred.remove(util.YES) + inferred.remove(util.Uninferable) self.assertIsInstance(inferred[0], nodes.Const) self.assertIsNone(inferred[0].value) @@ -1027,7 +1098,7 @@ ast = parse(code, __name__) inferred = list(ast['f'].ilookup('a')) self.assertEqual(len(inferred), 1) - self.assertEqual(inferred[0], util.YES) + self.assertEqual(inferred[0], util.Uninferable) def test_nonregr_instance_attrs(self): """non regression for instance_attrs infinite loop : pylint / #4""" @@ -1047,40 +1118,23 @@ foo_class = ast['Foo'] bar_class = ast['Bar'] bar_self = ast['Bar']['__init__']['self'] - assattr = bar_class._instance_attrs['attr'][0] - self.assertEqual(len(foo_class._instance_attrs['attr']), 1) - self.assertEqual(len(bar_class._instance_attrs['attr']), 1) - self.assertEqual(bar_class._instance_attrs, {'attr': [assattr]}) + assattr = bar_class.instance_attrs['attr'][0] + self.assertEqual(len(foo_class.instance_attrs['attr']), 1) + self.assertEqual(len(bar_class.instance_attrs['attr']), 1) + self.assertEqual(bar_class.instance_attrs, {'attr': [assattr]}) # call 'instance_attr' via 'Instance.getattr' to trigger the bug: instance = bar_self.inferred()[0] instance.getattr('attr') - self.assertEqual(len(bar_class._instance_attrs['attr']), 1) - self.assertEqual(len(foo_class._instance_attrs['attr']), 1) - self.assertEqual(bar_class._instance_attrs, {'attr': [assattr]}) - - def test_python25_generator_exit(self): - # pylint: disable=redefined-variable-type - buffer = six.StringIO() - sys.stderr = buffer - try: - data = "b = {}[str(0)+''].a" - ast = builder.string_build(data, __name__, __file__) - list(ast['b'].infer()) - output = buffer.getvalue() - finally: - sys.stderr = sys.__stderr__ - # I have no idea how to test for this in another way... - msg = ("Exception exceptions.RuntimeError: " - "'generator ignored GeneratorExit' in " - "ignored") - self.assertNotIn("RuntimeError", output, msg) + self.assertEqual(len(bar_class.instance_attrs['attr']), 1) + self.assertEqual(len(foo_class.instance_attrs['attr']), 1) + self.assertEqual(bar_class.instance_attrs, {'attr': [assattr]}) def test_python25_no_relative_import(self): ast = resources.build_file('data/package/absimport.py') self.assertTrue(ast.absolute_import_activated(), True) inferred = next(test_utils.get_name_node(ast, 'import_package_subpackage_module').infer()) # failed to import since absolute_import is activated - self.assertIs(inferred, util.YES) + self.assertIs(inferred, util.Uninferable) def test_nonregr_absolute_import(self): ast = resources.build_file('data/absimp/string.py', 'data.absimp.string') @@ -1088,7 +1142,7 @@ inferred = next(test_utils.get_name_node(ast, 'string').infer()) self.assertIsInstance(inferred, nodes.Module) self.assertEqual(inferred.name, 'string') - self.assertIn('ascii_letters', inferred._locals) + self.assertIn('ascii_letters', inferred.locals) def test_mechanize_open(self): try: @@ -1198,7 +1252,7 @@ ast = parse(code, __name__) inferred = list(test_utils.get_name_node(ast['foo'], 'spam').infer()) self.assertEqual(len(inferred), 1) - self.assertIs(inferred[0], util.YES) + self.assertIs(inferred[0], util.Uninferable) def test_nonregr_func_global(self): code = ''' @@ -1250,7 +1304,7 @@ inferred = next(ast['Z'].infer()) self.assertIsInstance(inferred, nodes.List) self.assertEqual(len(inferred.elts), 1) - self.assertIs(inferred.elts[0], util.YES) + self.assertIsInstance(inferred.elts[0], nodes.Unknown) def test__new__(self): code = ''' @@ -1270,7 +1324,7 @@ self.assertEqual(len(inferred), 1, inferred) def test__new__bound_methods(self): - node = test_utils.extract_node(''' + node = extract_node(''' class cls(object): pass cls().__new__(cls) #@ ''') @@ -1297,12 +1351,12 @@ ast = parse(code, __name__) aclass = ast['A'].inferred()[0] self.assertIsInstance(aclass, nodes.ClassDef) - self.assertIn('a', aclass._instance_attrs) - self.assertIn('b', aclass._instance_attrs) + self.assertIn('a', aclass.instance_attrs) + self.assertIn('b', aclass.instance_attrs) bclass = ast['B'].inferred()[0] self.assertIsInstance(bclass, nodes.ClassDef) - self.assertIn('a', bclass._instance_attrs) - self.assertIn('b', bclass._instance_attrs) + self.assertIn('a', bclass.instance_attrs) + self.assertIn('b', bclass.instance_attrs) def test_infer_arguments(self): code = ''' @@ -1390,7 +1444,7 @@ ast = parse(code, __name__) sub = ast['sub'].inferred()[0] mul = ast['mul'].inferred()[0] - self.assertIs(sub, util.YES) + self.assertIs(sub, util.Uninferable) self.assertIsInstance(mul, nodes.Const) self.assertEqual(mul.value, 42) @@ -1409,7 +1463,7 @@ ast = parse(code, __name__) sub = ast['sub'].inferred()[0] mul = ast['mul'].inferred()[0] - self.assertIs(sub, util. YES) + self.assertIs(sub, util. Uninferable) self.assertIsInstance(mul, nodes.Const) self.assertEqual(mul.value, 42) @@ -1429,7 +1483,7 @@ ast = parse(code, __name__) sub = ast['sub'].inferred()[0] mul = ast['mul'].inferred()[0] - self.assertIs(sub, util.YES) + self.assertIs(sub, util.Uninferable) self.assertIsInstance(mul, nodes.List) self.assertIsInstance(mul.elts[0], nodes.Const) self.assertEqual(mul.elts[0].value, 42) @@ -1446,12 +1500,12 @@ """ ast = parse(code, __name__) node = ast['c'] - self.assertEqual(node.inferred(), [util.YES]) + self.assertEqual(node.inferred(), [util.Uninferable]) def test_infer_empty_nodes(self): # Should not crash when trying to infer EmptyNodes. node = nodes.EmptyNode() - self.assertEqual(node.inferred(), [util.YES]) + self.assertEqual(node.inferred(), [util.Uninferable]) def test_infinite_loop_for_decorators(self): # Issue https://bitbucket.org/logilab/astroid/issue/50 @@ -1471,7 +1525,7 @@ self.assertEqual(node.type, 'function') def test_no_infinite_ancestor_loop(self): - klass = test_utils.extract_node(""" + klass = extract_node(""" import datetime def method(self): @@ -1491,131 +1545,258 @@ self.config = {0: self.config[0]} self.config[0].test() #@ """ - ast = test_utils.extract_node(code, __name__) + ast = extract_node(code, __name__) expr = ast.func.expr self.assertRaises(InferenceError, next, expr.infer()) def test_tuple_builtin_inference(self): - code = """ - var = (1, 2) - tuple() #@ - tuple([1]) #@ - tuple({2}) #@ - tuple("abc") #@ - tuple({1: 2}) #@ - tuple(var) #@ - tuple(tuple([1])) #@ - - tuple(None) #@ - tuple(1) #@ - tuple(1, 2) #@ - """ - ast = test_utils.extract_node(code, __name__) - - self.assertInferTuple(ast[0], []) - self.assertInferTuple(ast[1], [1]) - self.assertInferTuple(ast[2], [2]) - self.assertInferTuple(ast[3], ["a", "b", "c"]) - self.assertInferTuple(ast[4], [1]) - self.assertInferTuple(ast[5], [1, 2]) - self.assertInferTuple(ast[6], [1]) - - for node in ast[7:]: - inferred = next(node.infer()) - self.assertIsInstance(inferred, Instance) - self.assertEqual(inferred.qname(), "{}.tuple".format(BUILTINS)) + code = """ + var = (1, 2) + tuple() #@ + tuple([1]) #@ + tuple({2}) #@ + tuple("abc") #@ + tuple({1: 2}) #@ + tuple(var) #@ + tuple(tuple([1])) #@ + tuple(frozenset((1, 2))) #@ + + tuple(None) #@ + tuple(1) #@ + tuple(1, 2) #@ + """ + ast = extract_node(code, __name__) + + self.assertInferTuple(ast[0], []) + self.assertInferTuple(ast[1], [1]) + self.assertInferTuple(ast[2], [2]) + self.assertInferTuple(ast[3], ["a", "b", "c"]) + self.assertInferTuple(ast[4], [1]) + self.assertInferTuple(ast[5], [1, 2]) + self.assertInferTuple(ast[6], [1]) + self.assertInferTuple(ast[7], [1, 2]) + + for node in ast[8:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.tuple".format(BUILTINS)) + + @test_utils.require_version('3.5') + def test_starred_in_tuple_literal(self): + code = """ + var = (1, 2, 3) + bar = (5, 6, 7) + foo = [999, 1000, 1001] + (0, *var) #@ + (0, *var, 4) #@ + (0, *var, 4, *bar) #@ + (0, *var, 4, *(*bar, 8)) #@ + (0, *var, 4, *(*bar, *foo)) #@ + """ + ast = extract_node(code, __name__) + self.assertInferTuple(ast[0], [0, 1, 2, 3]) + self.assertInferTuple(ast[1], [0, 1, 2, 3, 4]) + self.assertInferTuple(ast[2], [0, 1, 2, 3, 4, 5, 6, 7]) + self.assertInferTuple(ast[3], [0, 1, 2, 3, 4, 5, 6, 7, 8]) + self.assertInferTuple(ast[4], [0, 1, 2, 3, 4, 5, 6, 7, 999, 1000, 1001]) + + @test_utils.require_version('3.5') + def test_starred_in_list_literal(self): + code = """ + var = (1, 2, 3) + bar = (5, 6, 7) + foo = [999, 1000, 1001] + [0, *var] #@ + [0, *var, 4] #@ + [0, *var, 4, *bar] #@ + [0, *var, 4, *[*bar, 8]] #@ + [0, *var, 4, *[*bar, *foo]] #@ + """ + ast = extract_node(code, __name__) + self.assertInferList(ast[0], [0, 1, 2, 3]) + self.assertInferList(ast[1], [0, 1, 2, 3, 4]) + self.assertInferList(ast[2], [0, 1, 2, 3, 4, 5, 6, 7]) + self.assertInferList(ast[3], [0, 1, 2, 3, 4, 5, 6, 7, 8]) + self.assertInferList(ast[4], [0, 1, 2, 3, 4, 5, 6, 7, 999, 1000, 1001]) + + @test_utils.require_version('3.5') + def test_starred_in_set_literal(self): + code = """ + var = (1, 2, 3) + bar = (5, 6, 7) + foo = [999, 1000, 1001] + {0, *var} #@ + {0, *var, 4} #@ + {0, *var, 4, *bar} #@ + {0, *var, 4, *{*bar, 8}} #@ + {0, *var, 4, *{*bar, *foo}} #@ + """ + ast = extract_node(code, __name__) + self.assertInferSet(ast[0], [0, 1, 2, 3]) + self.assertInferSet(ast[1], [0, 1, 2, 3, 4]) + self.assertInferSet(ast[2], [0, 1, 2, 3, 4, 5, 6, 7]) + self.assertInferSet(ast[3], [0, 1, 2, 3, 4, 5, 6, 7, 8]) + self.assertInferSet(ast[4], [0, 1, 2, 3, 4, 5, 6, 7, 999, 1000, 1001]) + + @test_utils.require_version('3.5') + def test_starred_in_literals_inference_issues(self): + code = """ + {0, *var} #@ + {0, *var, 4} #@ + {0, *var, 4, *bar} #@ + {0, *var, 4, *{*bar, 8}} #@ + {0, *var, 4, *{*bar, *foo}} #@ + """ + ast = extract_node(code, __name__) + for node in ast: + with self.assertRaises(InferenceError): + next(node.infer()) + + @test_utils.require_version('3.5') + def test_starred_in_mapping_literal(self): + code = """ + var = {1: 'b', 2: 'c'} + bar = {4: 'e', 5: 'f'} + {0: 'a', **var} #@ + {0: 'a', **var, 3: 'd'} #@ + {0: 'a', **var, 3: 'd', **{**bar, 6: 'g'}} #@ + """ + ast = extract_node(code, __name__) + self.assertInferDict(ast[0], {0: 'a', 1: 'b', 2: 'c'}) + self.assertInferDict(ast[1], {0: 'a', 1: 'b', 2: 'c', 3: 'd'}) + self.assertInferDict(ast[2], {0: 'a', 1: 'b', 2: 'c', 3: 'd', + 4: 'e', 5: 'f', 6: 'g'}) + + @test_utils.require_version('3.5') + def test_starred_in_mapping_inference_issues(self): + code = """ + {0: 'a', **var} #@ + {0: 'a', **var, 3: 'd'} #@ + {0: 'a', **var, 3: 'd', **{**bar, 6: 'g'}} #@ + """ + ast = extract_node(code, __name__) + for node in ast: + with self.assertRaises(InferenceError): + next(node.infer()) + + @test_utils.require_version('3.5') + def test_starred_in_mapping_literal_non_const_keys_values(self): + code = """ + a, b, c, d, e, f, g, h, i, j = "ABCDEFGHIJ" + var = {c: d, e: f} + bar = {i: j} + {a: b, **var} #@ + {a: b, **var, **{g: h, **bar}} #@ + """ + ast = extract_node(code, __name__) + self.assertInferDict(ast[0], {"A": "B", "C": "D", "E": "F"}) + self.assertInferDict(ast[1], {"A": "B", "C": "D", "E": "F", "G": "H", "I": "J"}) def test_frozenset_builtin_inference(self): - code = """ - var = (1, 2) - frozenset() #@ - frozenset([1, 2, 1]) #@ - frozenset({2, 3, 1}) #@ - frozenset("abcab") #@ - frozenset({1: 2}) #@ - frozenset(var) #@ - frozenset(tuple([1])) #@ - - frozenset(set(tuple([4, 5, set([2])]))) #@ - frozenset(None) #@ - frozenset(1) #@ - frozenset(1, 2) #@ - """ - ast = test_utils.extract_node(code, __name__) - - self.assertInferFrozenSet(ast[0], []) - self.assertInferFrozenSet(ast[1], [1, 2]) - self.assertInferFrozenSet(ast[2], [1, 2, 3]) - self.assertInferFrozenSet(ast[3], ["a", "b", "c"]) - self.assertInferFrozenSet(ast[4], [1]) - self.assertInferFrozenSet(ast[5], [1, 2]) - self.assertInferFrozenSet(ast[6], [1]) - - for node in ast[7:]: - infered = next(node.infer()) - self.assertIsInstance(infered, Instance) - self.assertEqual(infered.qname(), "{}.frozenset".format(BUILTINS)) + code = """ + var = (1, 2) + frozenset() #@ + frozenset([1, 2, 1]) #@ + frozenset({2, 3, 1}) #@ + frozenset("abcab") #@ + frozenset({1: 2}) #@ + frozenset(var) #@ + frozenset(tuple([1])) #@ + + frozenset(set(tuple([4, 5, set([2])]))) #@ + frozenset(None) #@ + frozenset(1) #@ + frozenset(1, 2) #@ + """ + ast = extract_node(code, __name__) + + self.assertInferFrozenSet(ast[0], []) + self.assertInferFrozenSet(ast[1], [1, 2]) + self.assertInferFrozenSet(ast[2], [1, 2, 3]) + self.assertInferFrozenSet(ast[3], ["a", "b", "c"]) + self.assertInferFrozenSet(ast[4], [1]) + self.assertInferFrozenSet(ast[5], [1, 2]) + self.assertInferFrozenSet(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.frozenset".format(BUILTINS)) def test_set_builtin_inference(self): - code = """ - var = (1, 2) - set() #@ - set([1, 2, 1]) #@ - set({2, 3, 1}) #@ - set("abcab") #@ - set({1: 2}) #@ - set(var) #@ - set(tuple([1])) #@ - - set(set(tuple([4, 5, set([2])]))) #@ - set(None) #@ - set(1) #@ - set(1, 2) #@ - """ - ast = test_utils.extract_node(code, __name__) - - self.assertInferSet(ast[0], []) - self.assertInferSet(ast[1], [1, 2]) - self.assertInferSet(ast[2], [1, 2, 3]) - self.assertInferSet(ast[3], ["a", "b", "c"]) - self.assertInferSet(ast[4], [1]) - self.assertInferSet(ast[5], [1, 2]) - self.assertInferSet(ast[6], [1]) - - for node in ast[7:]: - inferred = next(node.infer()) - self.assertIsInstance(inferred, Instance) - self.assertEqual(inferred.qname(), "{}.set".format(BUILTINS)) + code = """ + var = (1, 2) + set() #@ + set([1, 2, 1]) #@ + set({2, 3, 1}) #@ + set("abcab") #@ + set({1: 2}) #@ + set(var) #@ + set(tuple([1])) #@ + + set(set(tuple([4, 5, set([2])]))) #@ + set(None) #@ + set(1) #@ + set(1, 2) #@ + """ + ast = extract_node(code, __name__) + + self.assertInferSet(ast[0], []) + self.assertInferSet(ast[1], [1, 2]) + self.assertInferSet(ast[2], [1, 2, 3]) + self.assertInferSet(ast[3], ["a", "b", "c"]) + self.assertInferSet(ast[4], [1]) + self.assertInferSet(ast[5], [1, 2]) + self.assertInferSet(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.set".format(BUILTINS)) def test_list_builtin_inference(self): - code = """ - var = (1, 2) - list() #@ - list([1, 2, 1]) #@ - list({2, 3, 1}) #@ - list("abcab") #@ - list({1: 2}) #@ - list(var) #@ - list(tuple([1])) #@ - - list(list(tuple([4, 5, list([2])]))) #@ - list(None) #@ - list(1) #@ - list(1, 2) #@ - """ - ast = test_utils.extract_node(code, __name__) - self.assertInferList(ast[0], []) - self.assertInferList(ast[1], [1, 1, 2]) - self.assertInferList(ast[2], [1, 2, 3]) - self.assertInferList(ast[3], ["a", "a", "b", "b", "c"]) - self.assertInferList(ast[4], [1]) - self.assertInferList(ast[5], [1, 2]) - self.assertInferList(ast[6], [1]) - - for node in ast[7:]: - inferred = next(node.infer()) - self.assertIsInstance(inferred, Instance) - self.assertEqual(inferred.qname(), "{}.list".format(BUILTINS)) + code = """ + var = (1, 2) + list() #@ + list([1, 2, 1]) #@ + list({2, 3, 1}) #@ + list("abcab") #@ + list({1: 2}) #@ + list(var) #@ + list(tuple([1])) #@ + + list(list(tuple([4, 5, list([2])]))) #@ + list(None) #@ + list(1) #@ + list(1, 2) #@ + """ + ast = extract_node(code, __name__) + self.assertInferList(ast[0], []) + self.assertInferList(ast[1], [1, 1, 2]) + self.assertInferList(ast[2], [1, 2, 3]) + self.assertInferList(ast[3], ["a", "a", "b", "b", "c"]) + self.assertInferList(ast[4], [1]) + self.assertInferList(ast[5], [1, 2]) + self.assertInferList(ast[6], [1]) + + for node in ast[7:]: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.qname(), "{}.list".format(BUILTINS)) + + def test_conversion_of_dict_methods(self): + ast_nodes = extract_node(''' + list({1:2, 2:3}.values()) #@ + list({1:2, 2:3}.keys()) #@ + tuple({1:2, 2:3}.values()) #@ + tuple({1:2, 3:4}.keys()) #@ + set({1:2, 2:4}.keys()) #@ + ''') + self.assertInferList(ast_nodes[0], [2, 3]) + self.assertInferList(ast_nodes[1], [1, 2]) + self.assertInferTuple(ast_nodes[2], [2, 3]) + self.assertInferTuple(ast_nodes[3], [1, 3]) + self.assertInferSet(ast_nodes[4], [1, 2]) @test_utils.require_version('3.0') def test_builtin_inference_py3k(self): @@ -1624,7 +1805,7 @@ tuple(b"abc") #@ set(b"abc") #@ """ - ast = test_utils.extract_node(code, __name__) + ast = extract_node(code, __name__) self.assertInferList(ast[0], [97, 98, 99]) self.assertInferTuple(ast[1], [97, 98, 99]) self.assertInferSet(ast[2], [97, 98, 99]) @@ -1657,7 +1838,7 @@ return dict(**kwargs) using_unknown_kwargs(a=1, b=2) #@ """ - ast = test_utils.extract_node(code, __name__) + ast = extract_node(code, __name__) self.assertInferDict(ast[0], {}) self.assertInferDict(ast[1], {'a': 1, 'b': 2, 'c': 3}) for i in range(2, 5): @@ -1674,7 +1855,7 @@ self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) def test_dict_inference_kwargs(self): - ast_node = test_utils.extract_node('''dict(a=1, b=2, **{'c': 3})''') + ast_node = extract_node('''dict(a=1, b=2, **{'c': 3})''') self.assertInferDict(ast_node, {'a': 1, 'b': 2, 'c': 3}) @test_utils.require_version('3.5') @@ -1685,7 +1866,7 @@ ('dict({"a":1}, b=2, **{"c":3})', {'a':1, 'b':2, 'c':3}), ] for code, expected_value in pairs: - node = test_utils.extract_node(code) + node = extract_node(code) self.assertInferDict(node, expected_value) def test_dict_invalid_args(self): @@ -1695,7 +1876,7 @@ 'dict(**[])', ] for invalid in invalid_values: - ast_node = test_utils.extract_node(invalid) + ast_node = extract_node(invalid) inferred = next(ast_node.infer()) self.assertIsInstance(inferred, Instance) self.assertEqual(inferred.qname(), "{}.dict".format(BUILTINS)) @@ -1724,7 +1905,7 @@ ' '.find() #@ ' '.count() #@ """ - ast = test_utils.extract_node(code, __name__) + ast = extract_node(code, __name__) self.assertInferConst(ast[0], u'') for i in range(1, 16): self.assertInferConst(ast[i], '') @@ -1755,7 +1936,7 @@ u' '.find() #@ u' '.count() #@ """ - ast = test_utils.extract_node(code, __name__) + ast = extract_node(code, __name__) self.assertInferConst(ast[0], '') for i in range(1, 16): self.assertInferConst(ast[i], u'') @@ -1777,122 +1958,2049 @@ self.assertIsInstance(inferred, nodes.ClassDef) self.assertEqual(inferred.qname(), 'collections.Counter') + def test_inferring_with_statement_failures(self): + module = parse(''' + class NoEnter(object): + pass + class NoMethod(object): + __enter__ = None + class NoElts(object): + def __enter__(self): + return 42 -class ArgumentsTest(unittest.TestCase): + with NoEnter() as no_enter: + pass + with NoMethod() as no_method: + pass + with NoElts() as (no_elts, no_elts1): + pass + ''') + self.assertRaises(InferenceError, next, module['no_enter'].infer()) + self.assertRaises(InferenceError, next, module['no_method'].infer()) + self.assertRaises(InferenceError, next, module['no_elts'].infer()) - @staticmethod - def _get_dict_value(inferred): - items = inferred.items - return sorted((key.value, value.value) for key, value in items) + def test_inferring_with_statement(self): + module = parse(''' + class SelfContext(object): + def __enter__(self): + return self - @staticmethod - def _get_tuple_value(inferred): - elts = inferred.elts - return tuple(elt.value for elt in elts) + class OtherContext(object): + def __enter__(self): + return SelfContext() + + class MultipleReturns(object): + def __enter__(self): + return SelfContext(), OtherContext() + + class MultipleReturns2(object): + def __enter__(self): + return [1, [2, 3]] - def test_args(self): - expected_values = [(), (1, ), (2, 3), (4, 5), - (3, ), (), (3, 4, 5), - (), (), (4, ), (4, 5), - (), (3, ), (), (), (3, ), (42, )] - ast_nodes = test_utils.extract_node(''' - def func(*args): - return args - func() #@ - func(1) #@ - func(2, 3) #@ - func(*(4, 5)) #@ - def func(a, b, *args): - return args - func(1, 2, 3) #@ - func(1, 2) #@ - func(1, 2, 3, 4, 5) #@ - def func(a, b, c=42, *args): - return args - func(1, 2) #@ - func(1, 2, 3) #@ - func(1, 2, 3, 4) #@ - func(1, 2, 3, 4, 5) #@ - func = lambda a, b, *args: args - func(1, 2) #@ - func(1, 2, 3) #@ - func = lambda a, b=42, *args: args - func(1) #@ - func(1, 2) #@ - func(1, 2, 3) #@ - func(1, 2, *(42, )) #@ + with SelfContext() as self_context: + pass + with OtherContext() as other_context: + pass + with MultipleReturns(), OtherContext() as multiple_with: + pass + with MultipleReturns2() as (stdout, (stderr, stdin)): + pass ''') - for node, expected_value in zip(ast_nodes, expected_values): - inferred = next(node.infer()) - self.assertIsInstance(inferred, nodes.Tuple) - self.assertEqual(self._get_tuple_value(inferred), expected_value) + self_context = module['self_context'] + inferred = next(self_context.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'SelfContext') - @test_utils.require_version('3.5') - def test_multiple_starred_args(self): - expected_values = [ - (1, 2, 3), - (1, 4, 2, 3, 5, 6, 7), - ] - ast_nodes = test_utils.extract_node(''' - def func(a, b, *args): - return args - func(1, 2, *(1, ), *(2, 3)) #@ - func(1, 2, *(1, ), 4, *(2, 3), 5, *(6, 7)) #@ + other_context = module['other_context'] + inferred = next(other_context.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'SelfContext') + + multiple_with = module['multiple_with'] + inferred = next(multiple_with.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'SelfContext') + + stdout = module['stdout'] + inferred = next(stdout.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 1) + stderr = module['stderr'] + inferred = next(stderr.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 2) + + def test_inferring_with_contextlib_contextmanager(self): + module = parse(''' + import contextlib + from contextlib import contextmanager + + @contextlib.contextmanager + def manager_none(): + try: + yield + finally: + pass + + @contextlib.contextmanager + def manager_something(): + try: + yield 42 + yield 24 # This should be ignored. + finally: + pass + + @contextmanager + def manager_multiple(): + with manager_none() as foo: + with manager_something() as bar: + yield foo, bar + + with manager_none() as none: + pass + with manager_something() as something: + pass + with manager_multiple() as (first, second): + pass ''') - for node, expected_value in zip(ast_nodes, expected_values): - inferred = next(node.infer()) - self.assertIsInstance(inferred, nodes.Tuple) - self.assertEqual(self._get_tuple_value(inferred), expected_value) + none = module['none'] + inferred = next(none.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertIsNone(inferred.value) - def test_defaults(self): - expected_values = [42, 3, 41, 42] - ast_nodes = test_utils.extract_node(''' - def func(a, b, c=42, *args): - return c - func(1, 2) #@ - func(1, 2, 3) #@ - func(1, 2, c=41) #@ - func(1, 2, 42, 41) #@ + something = module['something'] + inferred = something.inferred() + self.assertEqual(len(inferred), 1) + inferred = inferred[0] + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + first, second = module['first'], module['second'] + first = next(first.infer()) + second = next(second.infer()) + self.assertIsInstance(first, nodes.Const) + self.assertIsNone(first.value) + self.assertIsInstance(second, nodes.Const) + self.assertEqual(second.value, 42) + + def test_inferring_context_manager_skip_index_error(self): + # Raise an InferenceError when having multiple 'as' bindings + # from a context manager, but its result doesn't have those + # indices. This is the case of contextlib.nested, where the + # result is a list, which is mutated later on, so it's + # undetected by astroid. + module = parse(''' + class Manager(object): + def __enter__(self): + return [] + with Manager() as (a, b, c): + pass ''') - for node, expected_value in zip(ast_nodes, expected_values): - inferred = next(node.infer()) - self.assertIsInstance(inferred, nodes.Const) - self.assertEqual(inferred.value, expected_value) + self.assertRaises(InferenceError, next, module['a'].infer()) - @test_utils.require_version('3.0') - def test_kwonly_args(self): - expected_values = [24, 24, 42, 23, 24, 24, 54] - ast_nodes = test_utils.extract_node(''' - def test(*, f, b): return f - test(f=24, b=33) #@ - def test(a, *, f): return f - test(1, f=24) #@ - def test(a, *, f=42): return f - test(1) #@ - test(1, f=23) #@ - def test(a, b, c=42, *args, f=24): - return f - test(1, 2, 3) #@ - test(1, 2, 3, 4) #@ - test(1, 2, 3, 4, 5, f=54) #@ + def test_inferring_context_manager_unpacking_inference_error(self): + # https://github.com/PyCQA/pylint/issues/1463 + module = parse(''' + import contextlib + + @contextlib.contextmanager + def _select_source(a=None): + with _select_source() as result: + yield result + + result = _select_source() + with result as (a, b, c): + pass ''') - for node, expected_value in zip(ast_nodes, expected_values): + self.assertRaises(InferenceError, next, module['a'].infer()) + + def test_inferring_with_contextlib_contextmanager_failures(self): + module = parse(''' + from contextlib import contextmanager + + def no_decorators_mgr(): + yield + @no_decorators_mgr + def other_decorators_mgr(): + yield + @contextmanager + def no_yield_mgr(): + pass + + with no_decorators_mgr() as no_decorators: + pass + with other_decorators_mgr() as other_decorators: + pass + with no_yield_mgr() as no_yield: + pass + ''') + self.assertRaises(InferenceError, next, module['no_decorators'].infer()) + self.assertRaises(InferenceError, next, module['other_decorators'].infer()) + self.assertRaises(InferenceError, next, module['no_yield'].infer()) + + def test_unary_op_leaks_stop_iteration(self): + node = extract_node('+[] #@') + self.assertEqual(util.Uninferable, next(node.infer())) + + def test_unary_operands(self): + ast_nodes = extract_node(''' + import os + def func(): pass + from missing import missing + class GoodInstance(object): + def __pos__(self): + return 42 + def __neg__(self): + return +self - 41 + def __invert__(self): + return 42 + class BadInstance(object): + def __pos__(self): + return lala + def __neg__(self): + return missing + class LambdaInstance(object): + __pos__ = lambda self: self.lala + __neg__ = lambda self: self.lala + 1 + @property + def lala(self): return 24 + instance = GoodInstance() + lambda_instance = LambdaInstance() + +instance #@ + -instance #@ + ~instance #@ + --instance #@ + +lambda_instance #@ + -lambda_instance #@ + + bad_instance = BadInstance() + +bad_instance #@ + -bad_instance #@ + ~bad_instance #@ + + # These should be TypeErrors. + ~BadInstance #@ + ~os #@ + -func #@ + +BadInstance #@ + ''') + expected = [42, 1, 42, -1, 24, 25] + for node, value in zip(ast_nodes[:6], expected): inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) - self.assertEqual(inferred.value, expected_value) + self.assertEqual(inferred.value, value) - def test_kwargs(self): + for bad_node in ast_nodes[6:]: + inferred = next(bad_node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_unary_op_instance_method_not_callable(self): + ast_node = extract_node(''' + class A: + __pos__ = (i for i in range(10)) + +A() #@ + ''') + self.assertRaises(InferenceError, next, ast_node.infer()) + + def test_binary_op_type_errors(self): + ast_nodes = extract_node(''' + import collections + 1 + "a" #@ + 1 - [] #@ + 1 * {} #@ + 1 / collections #@ + 1 ** (lambda x: x) #@ + {} * {} #@ + {} - {} #@ + {} | {} #@ + {} >> {} #@ + [] + () #@ + () + [] #@ + [] * 2.0 #@ + () * 2.0 #@ + 2.0 >> 2.0 #@ + class A(object): pass + class B(object): pass + A() + B() #@ + class A1(object): + def __add__(self, other): return NotImplemented + A1() + A1() #@ + class A(object): + def __add__(self, other): return NotImplemented + class B(object): + def __radd__(self, other): return NotImplemented + A() + B() #@ + class Parent(object): + pass + class Child(Parent): + def __add__(self, other): return NotImplemented + Child() + Parent() #@ + class A(object): + def __add__(self, other): return NotImplemented + class B(A): + def __radd__(self, other): + return NotImplemented + A() + B() #@ + # Augmented + f = 1 + f+=A() #@ + x = 1 + x+=[] #@ + ''') + msg = "unsupported operand type(s) for {op}: {lhs!r} and {rhs!r}" expected = [ - [('a', 1), ('b', 2), ('c', 3)], - [('a', 1)], - [('a', 'b')], + msg.format(op="+", lhs="int", rhs="str"), + msg.format(op="-", lhs="int", rhs="list"), + msg.format(op="*", lhs="int", rhs="dict"), + msg.format(op="/", lhs="int", rhs="module"), + msg.format(op="**", lhs="int", rhs="function"), + msg.format(op="*", lhs="dict", rhs="dict"), + msg.format(op="-", lhs="dict", rhs="dict"), + msg.format(op="|", lhs="dict", rhs="dict"), + msg.format(op=">>", lhs="dict", rhs="dict"), + msg.format(op="+", lhs="list", rhs="tuple"), + msg.format(op="+", lhs="tuple", rhs="list"), + msg.format(op="*", lhs="list", rhs="float"), + msg.format(op="*", lhs="tuple", rhs="float"), + msg.format(op=">>", lhs="float", rhs="float"), + msg.format(op="+", lhs="A", rhs="B"), + msg.format(op="+", lhs="A1", rhs="A1"), + msg.format(op="+", lhs="A", rhs="B"), + msg.format(op="+", lhs="Child", rhs="Parent"), + msg.format(op="+", lhs="A", rhs="B"), + msg.format(op="+=", lhs="int", rhs="A"), + msg.format(op="+=", lhs="int", rhs="list"), ] - ast_nodes = test_utils.extract_node(''' - def test(**kwargs): - return kwargs - test(a=1, b=2, c=3) #@ - test(a=1) #@ - test(**{'a': 'b'}) #@ + for node, expected_value in zip(ast_nodes, expected): + errors = node.type_errors() + self.assertEqual(len(errors), 1) + error = errors[0] + self.assertEqual(str(error), expected_value) + + def test_unary_type_errors(self): + ast_nodes = extract_node(''' + import collections + ~[] #@ + ~() #@ + ~dict() #@ + ~{} #@ + ~set() #@ + -set() #@ + -"" #@ + ~"" #@ + +"" #@ + class A(object): pass + ~(lambda: None) #@ + ~A #@ + ~A() #@ + ~collections #@ + ~2.0 #@ + ''') + msg = "bad operand type for unary {op}: {type}" + expected = [ + msg.format(op="~", type='list'), + msg.format(op="~", type='tuple'), + msg.format(op="~", type='dict'), + msg.format(op="~", type='dict'), + msg.format(op="~", type='set'), + msg.format(op="-", type='set'), + msg.format(op="-", type='str'), + msg.format(op="~", type='str'), + msg.format(op="+", type='str'), + msg.format(op="~", type=''), + msg.format(op="~", type='A'), + msg.format(op="~", type='A'), + msg.format(op="~", type='collections'), + msg.format(op="~", type='float'), + ] + for node, expected_value in zip(ast_nodes, expected): + errors = node.type_errors() + self.assertEqual(len(errors), 1) + error = errors[0] + self.assertEqual(str(error), expected_value) + + def test_unary_empty_type_errors(self): + # These aren't supported right now + ast_nodes = extract_node(''' + ~(2 and []) #@ + -(0 or {}) #@ + ''') + expected = [ + "bad operand type for unary ~: list", + "bad operand type for unary -: dict", + ] + for node, expected_value in zip(ast_nodes, expected): + errors = node.type_errors() + self.assertEqual(len(errors), 1, (expected, node)) + self.assertEqual(str(errors[0]), expected_value) + + def test_bool_value_recursive(self): + pairs = [ + ('{}', False), + ('{1:2}', True), + ('()', False), + ('(1, 2)', True), + ('[]', False), + ('[1,2]', True), + ('frozenset()', False), + ('frozenset((1, 2))', True), + ] + for code, expected in pairs: + node = extract_node(code) + inferred = next(node.infer()) + self.assertEqual(inferred.bool_value(), expected) + + def test_genexpr_bool_value(self): + node = extract_node('''(x for x in range(10))''') + self.assertTrue(node.bool_value()) + + def test_name_bool_value(self): + node = extract_node(''' + x = 42 + y = x + y + ''') + self.assertIs(node.bool_value(), util.Uninferable) + + def test_bool_value(self): + # Verify the truth value of nodes. + module = parse(''' + import collections + collections_module = collections + def function(): pass + class Class(object): + def method(self): pass + dict_comp = {x:y for (x, y) in ((1, 2), (2, 3))} + set_comp = {x for x in range(10)} + list_comp = [x for x in range(10)] + lambda_func = lambda: None + unbound_method = Class.method + instance = Class() + bound_method = instance.method + def generator_func(): + yield + def true_value(): + return True + generator = generator_func() + bin_op = 1 + 2 + bool_op = x and y + callfunc = test() + good_callfunc = true_value() + compare = 2 < 3 + const_str_true = 'testconst' + const_str_false = '' + ''') + collections_module = next(module['collections_module'].infer()) + self.assertTrue(collections_module.bool_value()) + function = module['function'] + self.assertTrue(function.bool_value()) + klass = module['Class'] + self.assertTrue(klass.bool_value()) + dict_comp = next(module['dict_comp'].infer()) + self.assertEqual(dict_comp, util.Uninferable) + set_comp = next(module['set_comp'].infer()) + self.assertEqual(set_comp, util.Uninferable) + list_comp = next(module['list_comp'].infer()) + self.assertEqual(list_comp, util.Uninferable) + lambda_func = next(module['lambda_func'].infer()) + self.assertTrue(lambda_func) + unbound_method = next(module['unbound_method'].infer()) + self.assertTrue(unbound_method) + bound_method = next(module['bound_method'].infer()) + self.assertTrue(bound_method) + generator = next(module['generator'].infer()) + self.assertTrue(generator) + bin_op = module['bin_op'].parent.value + self.assertIs(bin_op.bool_value(), util.Uninferable) + bool_op = module['bool_op'].parent.value + self.assertEqual(bool_op.bool_value(), util.Uninferable) + callfunc = module['callfunc'].parent.value + self.assertEqual(callfunc.bool_value(), util.Uninferable) + good_callfunc = next(module['good_callfunc'].infer()) + self.assertTrue(good_callfunc.bool_value()) + compare = module['compare'].parent.value + self.assertEqual(compare.bool_value(), util.Uninferable) + + def test_bool_value_instances(self): + instances = extract_node(''' + class FalseBoolInstance(object): + def {bool}(self): + return False + class TrueBoolInstance(object): + def {bool}(self): + return True + class FalseLenInstance(object): + def __len__(self): + return 0 + class TrueLenInstance(object): + def __len__(self): + return 14 + class AlwaysTrueInstance(object): + pass + class ErrorInstance(object): + def __bool__(self): + return lala + def __len__(self): + return lala + class NonMethods(object): + __bool__ = 1 + __len__ = 2 + FalseBoolInstance() #@ + TrueBoolInstance() #@ + FalseLenInstance() #@ + TrueLenInstance() #@ + AlwaysTrueInstance() #@ + ErrorInstance() #@ + '''.format(bool=BOOL_SPECIAL_METHOD)) + expected = (False, True, False, True, True, util.Uninferable, util.Uninferable) + for node, expected_value in zip(instances, expected): + inferred = next(node.infer()) + self.assertEqual(inferred.bool_value(), expected_value) + + def test_bool_value_variable(self): + instance = extract_node(''' + class VariableBoolInstance(object): + def __init__(self, value): + self.value = value + def {bool}(self): + return self.value + + not VariableBoolInstance(True) + '''.format(bool=BOOL_SPECIAL_METHOD)) + inferred = next(instance.infer()) + self.assertIs(inferred.bool_value(), util.Uninferable) + + def test_infer_coercion_rules_for_floats_complex(self): + ast_nodes = extract_node(''' + 1 + 1.0 #@ + 1 * 1.0 #@ + 2 - 1.0 #@ + 2 / 2.0 #@ + 1 + 1j #@ + 2 * 1j #@ + 2 - 1j #@ + 3 / 1j #@ + ''') + expected_values = [2.0, 1.0, 1.0, 1.0, 1 + 1j, 2j, 2 - 1j, -3j] + for node, expected in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertEqual(inferred.value, expected) + + def test_binop_list_with_elts(self): + ast_node = extract_node(''' + x = [A] * 1 + [1] + x + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.List) + self.assertEqual(len(inferred.elts), 2) + self.assertIsInstance(inferred.elts[0], nodes.Const) + self.assertIsInstance(inferred.elts[1], nodes.Unknown) + + def test_binop_same_types(self): + ast_nodes = extract_node(''' + class A(object): + def __add__(self, other): + return 42 + 1 + 1 #@ + 1 - 1 #@ + "a" + "b" #@ + A() + A() #@ + ''') + expected_values = [2, 0, "ab", 42] + for node, expected in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected) + + def test_binop_different_types_reflected_only(self): + node = extract_node(''' + class A(object): + pass + class B(object): + def __radd__(self, other): + return other + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_binop_different_types_unknown_bases(self): + node = extract_node(''' + from foo import bar + + class A(bar): + pass + class B(object): + def __radd__(self, other): + return other + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertIs(inferred, util.Uninferable) + + def test_binop_different_types_normal_not_implemented_and_reflected(self): + node = extract_node(''' + class A(object): + def __add__(self, other): + return NotImplemented + class B(object): + def __radd__(self, other): + return other + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_binop_different_types_no_method_implemented(self): + node = extract_node(''' + class A(object): + pass + class B(object): pass + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_binop_different_types_reflected_and_normal_not_implemented(self): + node = extract_node(''' + class A(object): + def __add__(self, other): return NotImplemented + class B(object): + def __radd__(self, other): return NotImplemented + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_binop_subtype(self): + node = extract_node(''' + class A(object): pass + class B(A): + def __add__(self, other): return other + B() + A() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_binop_subtype_implemented_in_parent(self): + node = extract_node(''' + class A(object): + def __add__(self, other): return other + class B(A): pass + B() + A() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_binop_subtype_not_implemented(self): + node = extract_node(''' + class A(object): + pass + class B(A): + def __add__(self, other): return NotImplemented + B() + A() #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_binop_supertype(self): + node = extract_node(''' + class A(object): + pass + class B(A): + def __radd__(self, other): + return other + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_binop_supertype_rop_not_implemented(self): + node = extract_node(''' + class A(object): + def __add__(self, other): + return other + class B(A): + def __radd__(self, other): + return NotImplemented + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'B') + + def test_binop_supertype_both_not_implemented(self): + node = extract_node(''' + class A(object): + def __add__(self): return NotImplemented + class B(A): + def __radd__(self, other): + return NotImplemented + A() + B() #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_binop_inferrence_errors(self): + ast_nodes = extract_node(''' + from unknown import Unknown + class A(object): + def __add__(self, other): return NotImplemented + class B(object): + def __add__(self, other): return Unknown + A() + Unknown #@ + Unknown + A() #@ + B() + A() #@ + A() + B() #@ + ''') + for node in ast_nodes: + self.assertEqual(next(node.infer()), util.Uninferable) + + def test_binop_ambiguity(self): + ast_nodes = extract_node(''' + class A(object): + def __add__(self, other): + if isinstance(other, B): + return NotImplemented + if type(other) is type(self): + return 42 + return NotImplemented + class B(A): pass + class C(object): + def __radd__(self, other): + if isinstance(other, B): + return 42 + return NotImplemented + A() + B() #@ + B() + A() #@ + A() + C() #@ + C() + A() #@ + ''') + for node in ast_nodes: + self.assertEqual(next(node.infer()), util.Uninferable) + + def test_metaclass__getitem__(self): + ast_node = extract_node(''' + class Meta(type): + def __getitem__(cls, arg): + return 24 + import six + @six.add_metaclass(Meta) + class A(object): + pass + + A['Awesome'] #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 24) + + def test_bin_op_classes(self): + ast_node = extract_node(''' + class Meta(type): + def __or__(self, other): + return 24 + import six + @six.add_metaclass(Meta) + class A(object): + pass + + A | A + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 24) + + def test_bin_op_supertype_more_complicated_example(self): + ast_node = extract_node(''' + class A(object): + def __init__(self): + self.foo = 42 + def __add__(self, other): + return other.bar + self.foo / 2 + + class B(A): + def __init__(self): + self.bar = 24 + def __radd__(self, other): + return NotImplemented + + A() + B() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(int(inferred.value), 45) + + def test_aug_op_same_type_not_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + def __add__(self, other): return NotImplemented + A() + A() #@ + ''') + self.assertEqual(next(ast_node.infer()), util.Uninferable) + + def test_aug_op_same_type_aug_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return other + f = A() + f += A() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_aug_op_same_type_aug_not_implemented_normal_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + def __add__(self, other): return 42 + f = A() + f += A() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_aug_op_subtype_both_not_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + def __add__(self, other): return NotImplemented + class B(A): + pass + b = B() + b+=A() #@ + ''') + self.assertEqual(next(ast_node.infer()), util.Uninferable) + + def test_aug_op_subtype_aug_op_is_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return 42 + class B(A): + pass + b = B() + b+=A() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_aug_op_subtype_normal_op_is_implemented(self): + ast_node = extract_node(''' + class A(object): + def __add__(self, other): return 42 + class B(A): + pass + b = B() + b+=A() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_aug_different_types_no_method_implemented(self): + ast_node = extract_node(''' + class A(object): pass + class B(object): pass + f = A() + f += B() #@ + ''') + self.assertEqual(next(ast_node.infer()), util.Uninferable) + + def test_aug_different_types_augop_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return other + class B(object): pass + f = A() + f += B() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'B') + + def test_aug_different_types_aug_not_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + def __add__(self, other): return other + class B(object): pass + f = A() + f += B() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'B') + + def test_aug_different_types_aug_not_implemented_rop_fallback(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + def __add__(self, other): return NotImplemented + class B(object): + def __radd__(self, other): return other + f = A() + f += B() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_augop_supertypes_none_implemented(self): + ast_node = extract_node(''' + class A(object): pass + class B(object): pass + a = A() + a += B() #@ + ''') + self.assertEqual(next(ast_node.infer()), util.Uninferable) + + def test_augop_supertypes_not_implemented_returned_for_all(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + def __add__(self, other): return NotImplemented + class B(object): + def __add__(self, other): return NotImplemented + a = A() + a += B() #@ + ''') + self.assertEqual(next(ast_node.infer()), util.Uninferable) + + def test_augop_supertypes_augop_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return other + class B(A): pass + a = A() + a += B() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'B') + + def test_augop_supertypes_reflected_binop_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + class B(A): + def __radd__(self, other): return other + a = A() + a += B() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'A') + + def test_augop_supertypes_normal_binop_implemented(self): + ast_node = extract_node(''' + class A(object): + def __iadd__(self, other): return NotImplemented + def __add__(self, other): return other + class B(A): + def __radd__(self, other): return NotImplemented + + a = A() + a += B() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'B') + + @unittest.expectedFailure + def test_string_interpolation(self): + ast_nodes = extract_node(''' + "a%d%d" % (1, 2) #@ + "a%(x)s" % {"x": 42} #@ + ''') + expected = ["a12", "a42"] + for node, expected_value in zip(ast_nodes, expected): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + def test_mul_list_supports__index__(self): + ast_nodes = extract_node(''' + class Index(object): + def __index__(self): return 2 + class NotIndex(object): pass + class NotIndex2(object): + def __index__(self): return None + a = [1, 2] + a * Index() #@ + a * NotIndex() #@ + a * NotIndex2() #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.List) + self.assertEqual([node.value for node in first.itered()], + [1, 2, 1, 2]) + for rest in ast_nodes[1:]: + inferred = next(rest.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_subscript_supports__index__(self): + ast_nodes = extract_node(''' + class Index(object): + def __index__(self): return 2 + class LambdaIndex(object): + __index__ = lambda self: self.foo + @property + def foo(self): return 1 + class NonIndex(object): + __index__ = lambda self: None + a = [1, 2, 3, 4] + a[Index()] #@ + a[LambdaIndex()] #@ + a[NonIndex()] #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.Const) + self.assertEqual(first.value, 3) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, nodes.Const) + self.assertEqual(second.value, 2) + self.assertRaises(InferenceError, next, ast_nodes[2].infer()) + + def test_special_method_masquerading_as_another(self): + ast_node = extract_node(''' + class Info(object): + def __add__(self, other): + return "lala" + __or__ = __add__ + + f = Info() + f | Info() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, "lala") + + def test_unary_op_assignment(self): + ast_node = extract_node(''' + class A(object): pass + def pos(self): + return 42 + A.__pos__ = pos + f = A() + +f #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def test_unary_op_classes(self): + ast_node = extract_node(''' + import six + class Meta(type): + def __invert__(self): + return 42 + @six.add_metaclass(Meta) + class A(object): + pass + ~A + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 42) + + def _slicing_test_helper(self, pairs, cls, get_elts): + for code, expected in pairs: + ast_node = extract_node(code) + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, cls) + self.assertEqual(get_elts(inferred), expected, + ast_node.as_string()) + + def test_slicing_list(self): + pairs = ( + ("[1, 2, 3][:] #@", [1, 2, 3]), + ("[1, 2, 3][0:] #@", [1, 2, 3]), + ("[1, 2, 3][None:] #@", [1, 2, 3]), + ("[1, 2, 3][None:None] #@", [1, 2, 3]), + ("[1, 2, 3][0:-1] #@", [1, 2]), + ("[1, 2, 3][0:2] #@", [1, 2]), + ("[1, 2, 3][0:2:None] #@", [1, 2]), + ("[1, 2, 3][::] #@", [1, 2, 3]), + ("[1, 2, 3][::2] #@", [1, 3]), + ("[1, 2, 3][::-1] #@", [3, 2, 1]), + ("[1, 2, 3][0:2:2] #@", [1]), + ("[1, 2, 3, 4, 5, 6][0:4-1:2+0] #@", [1, 3]), + ) + self._slicing_test_helper( + pairs, nodes.List, + lambda inferred: [elt.value for elt in inferred.elts]) + + def test_slicing_tuple(self): + pairs = ( + ("(1, 2, 3)[:] #@", [1, 2, 3]), + ("(1, 2, 3)[0:] #@", [1, 2, 3]), + ("(1, 2, 3)[None:] #@", [1, 2, 3]), + ("(1, 2, 3)[None:None] #@", [1, 2, 3]), + ("(1, 2, 3)[0:-1] #@", [1, 2]), + ("(1, 2, 3)[0:2] #@", [1, 2]), + ("(1, 2, 3)[0:2:None] #@", [1, 2]), + ("(1, 2, 3)[::] #@", [1, 2, 3]), + ("(1, 2, 3)[::2] #@", [1, 3]), + ("(1, 2, 3)[::-1] #@", [3, 2, 1]), + ("(1, 2, 3)[0:2:2] #@", [1]), + ("(1, 2, 3, 4, 5, 6)[0:4-1:2+0] #@", [1, 3]), + ) + self._slicing_test_helper( + pairs, nodes.Tuple, + lambda inferred: [elt.value for elt in inferred.elts]) + + def test_slicing_str(self): + pairs = ( + ("'123'[:] #@", "123"), + ("'123'[0:] #@", "123"), + ("'123'[None:] #@", "123"), + ("'123'[None:None] #@", "123"), + ("'123'[0:-1] #@", "12"), + ("'123'[0:2] #@", "12"), + ("'123'[0:2:None] #@", "12"), + ("'123'[::] #@", "123"), + ("'123'[::2] #@", "13"), + ("'123'[::-1] #@", "321"), + ("'123'[0:2:2] #@", "1"), + ("'123456'[0:4-1:2+0] #@", "13"), + ) + self._slicing_test_helper( + pairs, nodes.Const, lambda inferred: inferred.value) + + def test_invalid_slicing_primaries(self): + examples = [ + "(lambda x: x)[1:2]", + "1[2]", + "(1, 2, 3)[a:]", + "(1, 2, 3)[object:object]", + "(1, 2, 3)[1:object]", + 'enumerate[2]' + ] + for code in examples: + node = extract_node(code) + self.assertRaises(InferenceError, next, node.infer()) + + def test_instance_slicing(self): + ast_nodes = extract_node(''' + class A(object): + def __getitem__(self, index): + return [1, 2, 3, 4, 5][index] + A()[1:] #@ + A()[:2] #@ + A()[1:4] #@ + ''') + expected_values = [ + [2, 3, 4, 5], + [1, 2], + [2, 3, 4], + ] + for expected, node in zip(expected_values, ast_nodes): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.List) + self.assertEqual([elt.value for elt in inferred.elts], expected) + + def test_instance_slicing_slices(self): + ast_node = extract_node(''' + class A(object): + def __getitem__(self, index): + return index + A()[1:] #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Slice) + self.assertEqual(inferred.lower.value, 1) + self.assertIsNone(inferred.upper) + + def test_instance_slicing_fails(self): + ast_nodes = extract_node(''' + class A(object): + def __getitem__(self, index): + return 1[index] + A()[4:5] #@ + A()[2:] #@ + ''') + for node in ast_nodes: + self.assertEqual(next(node.infer()), util.Uninferable) + + def test_type__new__with_metaclass(self): + ast_node = extract_node(''' + class Metaclass(type): + pass + class Entity(object): + pass + type.__new__(Metaclass, 'NewClass', (Entity,), {'a': 1}) #@ + ''') + inferred = next(ast_node.infer()) + + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertEqual(inferred.name, 'NewClass') + metaclass = inferred.metaclass() + self.assertEqual(metaclass, inferred.root()['Metaclass']) + ancestors = list(inferred.ancestors()) + self.assertEqual(len(ancestors), 2) + self.assertEqual(ancestors[0], inferred.root()['Entity']) + attributes = inferred.getattr('a') + self.assertEqual(len(attributes), 1) + self.assertIsInstance(attributes[0], nodes.Const) + self.assertEqual(attributes[0].value, 1) + + def test_type__new__not_enough_arguments(self): + ast_nodes = extract_node(''' + type.__new__(1) #@ + type.__new__(1, 2) #@ + type.__new__(1, 2, 3) #@ + type.__new__(1, 2, 3, 4, 5) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + + def test_type__new__invalid_mcs_argument(self): + ast_nodes = extract_node(''' + class Class(object): pass + type.__new__(1, 2, 3, 4) #@ + type.__new__(Class, 2, 3, 4) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + + def test_type__new__invalid_name(self): + ast_nodes = extract_node(''' + class Class(type): pass + type.__new__(Class, object, 1, 2) #@ + type.__new__(Class, 1, 1, 2) #@ + type.__new__(Class, [], 1, 2) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + + def test_type__new__invalid_bases(self): + ast_nodes = extract_node(''' + type.__new__(type, 'a', 1, 2) #@ + type.__new__(type, 'a', [], 2) #@ + type.__new__(type, 'a', {}, 2) #@ + type.__new__(type, 'a', (1, ), 2) #@ + type.__new__(type, 'a', (object, 1), 2) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + + def test_type__new__invalid_attrs(self): + ast_nodes = extract_node(''' + type.__new__(type, 'a', (), ()) #@ + type.__new__(type, 'a', (), object) #@ + type.__new__(type, 'a', (), 1) #@ + type.__new__(type, 'a', (), {object: 1}) #@ + type.__new__(type, 'a', (), {1:2, "a":5}) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, Instance) + + def test_type__new__metaclass_lookup(self): + ast_node = extract_node(''' + class Metaclass(type): + def test(cls): pass + @classmethod + def test1(cls): pass + attr = 42 + type.__new__(Metaclass, 'A', (), {}) #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + test = inferred.getattr('test') + self.assertEqual(len(test), 1) + self.assertIsInstance(test[0], BoundMethod) + self.assertIsInstance(test[0].bound, nodes.ClassDef) + self.assertEqual(test[0].bound, inferred) + test1 = inferred.getattr('test1') + self.assertEqual(len(test1), 1) + self.assertIsInstance(test1[0], BoundMethod) + self.assertIsInstance(test1[0].bound, nodes.ClassDef) + self.assertEqual(test1[0].bound, inferred.metaclass()) + attr = inferred.getattr('attr') + self.assertEqual(len(attr), 1) + self.assertIsInstance(attr[0], nodes.Const) + self.assertEqual(attr[0].value, 42) + + def test_type__new__metaclass_and_ancestors_lookup(self): + ast_node = extract_node(''' + class Book(object): + title = 'Ubik' + class MetaBook(type): + title = 'Grimus' + type.__new__(MetaBook, 'book', (Book, ), {'title':'Catch 22'}) #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + titles = [title.value for title in inferred.igetattr('title')] + self.assertEqual(titles, ['Catch 22', 'Ubik', 'Grimus']) + + @unittest.expectedFailure + def test_function_metaclasses(self): + # These are not supported right now, although + # they will be in the future. + ast_node = extract_node(''' + import six + + class BookMeta(type): + author = 'Rushdie' + + def metaclass_function(*args): + return BookMeta + + @six.add_metaclass(metaclass_function) + class Book(object): + pass + Book #@ + ''') + inferred = next(ast_node.infer()) + metaclass = inferred.metaclass() + self.assertIsInstance(metaclass, nodes.ClassDef) + self.assertEqual(metaclass.name, 'BookMeta') + author = next(inferred.igetattr('author')) + self.assertIsInstance(author, nodes.Const) + self.assertEqual(author.value, 'Rushdie') + + def test_subscript_inference_error(self): + # Used to raise StopIteration + ast_node = extract_node(''' + class AttributeDict(dict): + def __getitem__(self, name): + return self + flow = AttributeDict() + flow['app'] = AttributeDict() + flow['app']['config'] = AttributeDict() + flow['app']['config']['doffing'] = AttributeDict() #@ + ''') + self.assertIsNone(helpers.safe_infer(ast_node.targets[0])) + + def test_classmethod_inferred_by_context(self): + ast_node = extract_node(''' + class Super(object): + def instance(cls): + return cls() + instance = classmethod(instance) + + class Sub(Super): + def method(self): + return self + + # should see the Sub.instance() is returning a Sub + # instance, not a Super instance + Sub.instance().method() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, Instance) + self.assertEqual(inferred.name, 'Sub') + + def test_infer_call_result_invalid_dunder_call_on_instance(self): + ast_nodes = extract_node(''' + class A: + __call__ = 42 + class B: + __call__ = A() + class C: + __call = None + A() #@ + B() #@ + C() #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertRaises(InferenceError, next, inferred.infer_call_result(node)) + + def test_context_call_for_context_managers(self): + ast_nodes = extract_node(''' + class A: + def __enter__(self): + return self + class B: + __enter__ = lambda self: self + class C: + @property + def a(self): return A() + def __enter__(self): + return self.a + with A() as a: + a #@ + with B() as b: + b #@ + with C() as c: + c #@ + ''') + first_a = next(ast_nodes[0].infer()) + self.assertIsInstance(first_a, Instance) + self.assertEqual(first_a.name, 'A') + second_b = next(ast_nodes[1].infer()) + self.assertIsInstance(second_b, Instance) + self.assertEqual(second_b.name, 'B') + third_c = next(ast_nodes[2].infer()) + self.assertIsInstance(third_c, Instance) + self.assertEqual(third_c.name, 'A') + + def test_metaclass_subclasses_arguments_are_classes_not_instances(self): + ast_node = extract_node(''' + class A(type): + def test(cls): + return cls + import six + @six.add_metaclass(A) + class B(object): + pass + + B.test() #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertEqual(inferred.name, 'B') + + def test_infer_cls_in_class_methods(self): + ast_nodes = extract_node(''' + class A(type): + def __call__(cls): + cls #@ + class B(object): + def __call__(cls): + cls #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.ClassDef) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, Instance) + + @unittest.expectedFailure + def test_metaclass_arguments_are_classes_not_instances(self): + ast_node = extract_node(''' + class A(type): + def test(cls): return cls + A.test() #@ + ''') + # This is not supported yet + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertEqual(inferred.name, 'A') + + @test_utils.require_version(minver='3.0') + def test_metaclass_with_keyword_args(self): + ast_node = extract_node(''' + class TestMetaKlass(type): + def __new__(mcs, name, bases, ns, kwo_arg): + return super().__new__(mcs, name, bases, ns) + + class TestKlass(metaclass=TestMetaKlass, kwo_arg=42): #@ + pass + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + + def test_delayed_attributes_without_slots(self): + ast_node = extract_node(''' + class A(object): + __slots__ = ('a', ) + a = A() + a.teta = 24 + a.a = 24 + a #@ + ''') + inferred = next(ast_node.infer()) + with self.assertRaises(exceptions.NotFoundError): + inferred.getattr('teta') + inferred.getattr('a') + + @test_utils.require_version(maxver='3.0') + def test_delayed_attributes_with_old_style_classes(self): + ast_node = extract_node(''' + class A: + __slots__ = ('a', ) + a = A() + a.teta = 42 + a #@ + ''') + next(ast_node.infer()).getattr('teta') + + def test_lambda_as_methods(self): + ast_node = extract_node(''' + class X: + m = lambda self, arg: self.z + arg + z = 24 + + X().m(4) #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 28) + + def test_inner_value_redefined_by_subclass(self): + ast_node = extract_node(''' + class X(object): + M = lambda self, arg: "a" + x = 24 + def __init__(self): + x = 24 + self.m = self.M(x) + + class Y(X): + M = lambda self, arg: arg + 1 + def blurb(self): + self.m #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 25) + + @unittest.expectedFailure + def test_inner_value_redefined_by_subclass_with_mro(self): + # This might work, but it currently doesn't due to not being able + # to reuse inference contexts. + ast_node = extract_node(''' + class X(object): + M = lambda self, arg: arg + 1 + x = 24 + def __init__(self): + y = self + self.m = y.M(1) + y.z + + class C(object): + z = 24 + + class Y(X, C): + M = lambda self, arg: arg + 1 + def blurb(self): + self.m #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, 25) + + def test_getitem_of_class_raised_type_error(self): + # Test that we wrap an AttributeInferenceError + # and reraise it as a TypeError in Class.getitem + node = extract_node(''' + def test(): + yield + test() + ''') + inferred = next(node.infer()) + with self.assertRaises(exceptions.AstroidTypeError): + inferred.getitem(nodes.Const('4')) + + +class GetattrTest(unittest.TestCase): + + def test_yes_when_unknown(self): + ast_nodes = extract_node(''' + from missing import Missing + getattr(1, Unknown) #@ + getattr(Unknown, 'a') #@ + getattr(Unknown, Unknown) #@ + getattr(Unknown, Unknown, Unknown) #@ + + getattr(Missing, 'a') #@ + getattr(Missing, Missing) #@ + getattr('a', Missing) #@ + getattr('a', Missing, Missing) #@ + ''') + for node in ast_nodes[:4]: + self.assertRaises(InferenceError, next, node.infer()) + + for node in ast_nodes[4:]: + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable, node) + + def test_attrname_not_string(self): + ast_nodes = extract_node(''' + getattr(1, 1) #@ + c = int + getattr(1, c) #@ + ''') + for node in ast_nodes: + self.assertRaises(InferenceError, next, node.infer()) + + def test_attribute_missing(self): + ast_nodes = extract_node(''' + getattr(1, 'ala') #@ + getattr(int, 'ala') #@ + getattr(float, 'bala') #@ + getattr({}, 'portocala') #@ + ''') + for node in ast_nodes: + self.assertRaises(InferenceError, next, node.infer()) + + def test_default(self): + ast_nodes = extract_node(''' + getattr(1, 'ala', None) #@ + getattr(int, 'bala', int) #@ + getattr(int, 'bala', getattr(int, 'portocala', None)) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.Const) + self.assertIsNone(first.value) + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, nodes.ClassDef) + self.assertEqual(second.qname(), "%s.int" % BUILTINS) + + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, nodes.Const) + self.assertIsNone(third.value) + + def test_lookup(self): + ast_nodes = extract_node(''' + class A(object): + def test(self): pass + class B(A): + def test_b(self): pass + class C(A): pass + class E(C, B): + def test_e(self): pass + + getattr(A(), 'test') #@ + getattr(A, 'test') #@ + getattr(E(), 'test_b') #@ + getattr(E(), 'test') #@ + + class X(object): + def test(self): + getattr(self, 'test') #@ + ''') + + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, BoundMethod) + self.assertEqual(first.bound.name, 'A') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, UnboundMethod) + self.assertIsInstance(second.parent, nodes.ClassDef) + self.assertEqual(second.parent.name, 'A') + + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, BoundMethod) + # Bound to E, but the provider is B. + self.assertEqual(third.bound.name, 'E') + self.assertEqual(third._proxied._proxied.parent.name, 'B') + + fourth = next(ast_nodes[3].infer()) + self.assertIsInstance(fourth, BoundMethod) + self.assertEqual(fourth.bound.name, 'E') + self.assertEqual(third._proxied._proxied.parent.name, 'B') + + fifth = next(ast_nodes[4].infer()) + self.assertIsInstance(fifth, BoundMethod) + self.assertEqual(fifth.bound.name, 'X') + + def test_lambda(self): + node = extract_node(''' + getattr(lambda x: x, 'f') #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + +class HasattrTest(unittest.TestCase): + + def test_inference_errors(self): + ast_nodes = extract_node(''' + from missing import Missing + + hasattr(Unknown, 'ala') #@ + + hasattr(Missing, 'bala') #@ + hasattr('portocala', Missing) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_attribute_is_missing(self): + ast_nodes = extract_node(''' + class A: pass + hasattr(int, 'ala') #@ + hasattr({}, 'bala') #@ + hasattr(A(), 'portocala') #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertFalse(inferred.value) + + def test_attribute_is_not_missing(self): + ast_nodes = extract_node(''' + class A(object): + def test(self): pass + class B(A): + def test_b(self): pass + class C(A): pass + class E(C, B): + def test_e(self): pass + + hasattr(A(), 'test') #@ + hasattr(A, 'test') #@ + hasattr(E(), 'test_b') #@ + hasattr(E(), 'test') #@ + + class X(object): + def test(self): + hasattr(self, 'test') #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertTrue(inferred.value) + + def test_lambda(self): + node = extract_node(''' + hasattr(lambda x: x, 'f') #@ + ''') + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + +class BoolOpTest(unittest.TestCase): + + def test_bool_ops(self): + expected = [ + ('1 and 2', 2), + ('0 and 2', 0), + ('1 or 2', 1), + ('0 or 2', 2), + ('0 or 0 or 1', 1), + ('1 and 2 and 3', 3), + ('1 and 2 or 3', 2), + ('1 and 0 or 3', 3), + ('1 or 0 and 2', 1), + ('(1 and 2) and (2 and 3)', 3), + ('not 2 and 3', False), + ('2 and not 3', False), + ('not 0 and 3', 3), + ('True and False', False), + ('not (True or False) and True', False), + ] + for code, expected_value in expected: + node = extract_node(code) + inferred = next(node.infer()) + self.assertEqual(inferred.value, expected_value) + + def test_yes_when_unknown(self): + ast_nodes = extract_node(''' + from unknown import unknown, any, not_any + 0 and unknown #@ + unknown or 0 #@ + any or not_any and unknown #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_other_nodes(self): + ast_nodes = extract_node(''' + def test(): pass + test and 0 #@ + 1 and test #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertEqual(first.value, 0) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, nodes.FunctionDef) + self.assertEqual(second.name, 'test') + + +class TestCallable(unittest.TestCase): + + def test_callable(self): + expected = [ + ('callable(len)', True), + ('callable("a")', False), + ('callable(callable)', True), + ('callable(lambda x, y: x+y)', True), + ('import os; __(callable(os))', False), + ('callable(int)', True), + (''' + def test(): pass + callable(test) #@''', True), + (''' + class C1: + def meth(self): pass + callable(C1) #@''', True), + ] + for code, expected_value in expected: + node = extract_node(code) + inferred = next(node.infer()) + self.assertEqual(inferred.value, expected_value) + + def test_callable_methods(self): + ast_nodes = extract_node(''' + class C: + def test(self): pass + @staticmethod + def static(): pass + @classmethod + def class_method(cls): pass + def __call__(self): pass + class D(C): + pass + class NotReallyCallableDueToPythonMisfeature(object): + __call__ = 42 + callable(C.test) #@ + callable(C.static) #@ + callable(C.class_method) #@ + callable(C().test) #@ + callable(C().static) #@ + callable(C().class_method) #@ + C #@ + C() #@ + NotReallyCallableDueToPythonMisfeature() #@ + staticmethod #@ + classmethod #@ + property #@ + D #@ + D() #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertTrue(inferred) + + def test_inference_errors(self): + ast_nodes = extract_node(''' + from unknown import unknown + callable(unknown) #@ + def test(): + return unknown + callable(test()) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + def test_not_callable(self): + ast_nodes = extract_node(''' + callable("") #@ + callable(1) #@ + callable(True) #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertFalse(inferred.value) + + +class TestBool(unittest.TestCase): + + def test_bool(self): + pairs = [ + ('bool()', False), + ('bool(1)', True), + ('bool(0)', False), + ('bool([])', False), + ('bool([1])', True), + ('bool({})', False), + ('bool(True)', True), + ('bool(False)', False), + ('bool(None)', False), + ('from unknown import Unknown; __(bool(Unknown))', util.Uninferable), + ] + for code, expected in pairs: + node = extract_node(code) + inferred = next(node.infer()) + if expected is util.Uninferable: + self.assertEqual(expected, inferred) + else: + self.assertEqual(inferred.value, expected) + + def test_bool_bool_special_method(self): + ast_nodes = extract_node(''' + class FalseClass: + def {method}(self): + return False + class TrueClass: + def {method}(self): + return True + class C(object): + def __call__(self): + return False + class B(object): + {method} = C() + class LambdaBoolFalse(object): + {method} = lambda self: self.foo + @property + def foo(self): return 0 + class FalseBoolLen(object): + __len__ = lambda self: self.foo + @property + def foo(self): return 0 + bool(FalseClass) #@ + bool(TrueClass) #@ + bool(FalseClass()) #@ + bool(TrueClass()) #@ + bool(B()) #@ + bool(LambdaBoolFalse()) #@ + bool(FalseBoolLen()) #@ + '''.format(method=BOOL_SPECIAL_METHOD)) + expected = [True, True, False, True, False, False, False] + for node, expected_value in zip(ast_nodes, expected): + inferred = next(node.infer()) + self.assertEqual(inferred.value, expected_value) + + def test_bool_instance_not_callable(self): + ast_nodes = extract_node(''' + class BoolInvalid(object): + {method} = 42 + class LenInvalid(object): + __len__ = "a" + bool(BoolInvalid()) #@ + bool(LenInvalid()) #@ + '''.format(method=BOOL_SPECIAL_METHOD)) + for node in ast_nodes: + inferred = next(node.infer()) + self.assertEqual(inferred, util.Uninferable) + + +class TestType(unittest.TestCase): + + def test_type(self): + pairs = [ + ('type(1)', 'int'), + ('type(type)', 'type'), + ('type(None)', 'NoneType'), + ('type(object)', 'type'), + ('type(dict())', 'dict'), + ('type({})', 'dict'), + ('type(frozenset())', 'frozenset'), + ] + for code, expected in pairs: + node = extract_node(code) + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertEqual(inferred.name, expected) + + +class ArgumentsTest(unittest.TestCase): + + @staticmethod + def _get_dict_value(inferred): + items = inferred.items + return sorted((key.value, value.value) for key, value in items) + + @staticmethod + def _get_tuple_value(inferred): + elts = inferred.elts + return tuple(elt.value for elt in elts) + + def test_args(self): + expected_values = [(), (1, ), (2, 3), (4, 5), + (3, ), (), (3, 4, 5), + (), (), (4, ), (4, 5), + (), (3, ), (), (), (3, ), (42, )] + ast_nodes = extract_node(''' + def func(*args): + return args + func() #@ + func(1) #@ + func(2, 3) #@ + func(*(4, 5)) #@ + def func(a, b, *args): + return args + func(1, 2, 3) #@ + func(1, 2) #@ + func(1, 2, 3, 4, 5) #@ + def func(a, b, c=42, *args): + return args + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, 3, 4) #@ + func(1, 2, 3, 4, 5) #@ + func = lambda a, b, *args: args + func(1, 2) #@ + func(1, 2, 3) #@ + func = lambda a, b=42, *args: args + func(1) #@ + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, *(42, )) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertEqual(self._get_tuple_value(inferred), expected_value) + + @test_utils.require_version('3.5') + def test_multiple_starred_args(self): + expected_values = [ + (1, 2, 3), + (1, 4, 2, 3, 5, 6, 7), + ] + ast_nodes = extract_node(''' + def func(a, b, *args): + return args + func(1, 2, *(1, ), *(2, 3)) #@ + func(1, 2, *(1, ), 4, *(2, 3), 5, *(6, 7)) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertEqual(self._get_tuple_value(inferred), expected_value) + + def test_defaults(self): + expected_values = [42, 3, 41, 42] + ast_nodes = extract_node(''' + def func(a, b, c=42, *args): + return c + func(1, 2) #@ + func(1, 2, 3) #@ + func(1, 2, c=41) #@ + func(1, 2, 42, 41) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + @test_utils.require_version('3.0') + def test_kwonly_args(self): + expected_values = [24, 24, 42, 23, 24, 24, 54] + ast_nodes = extract_node(''' + def test(*, f, b): return f + test(f=24, b=33) #@ + def test(a, *, f): return f + test(1, f=24) #@ + def test(a, *, f=42): return f + test(1) #@ + test(1, f=23) #@ + def test(a, b, c=42, *args, f=24): + return f + test(1, 2, 3) #@ + test(1, 2, 3, 4) #@ + test(1, 2, 3, 4, 5, f=54) #@ + ''') + for node, expected_value in zip(ast_nodes, expected_values): + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected_value) + + def test_kwargs(self): + expected = [ + [('a', 1), ('b', 2), ('c', 3)], + [('a', 1)], + [('a', 'b')], + ] + ast_nodes = extract_node(''' + def test(**kwargs): + return kwargs + test(a=1, b=2, c=3) #@ + test(a=1) #@ + test(**{'a': 'b'}) #@ ''') for node, expected_value in zip(ast_nodes, expected): inferred = next(node.infer()) @@ -1901,7 +4009,7 @@ self.assertEqual(value, expected_value) def test_kwargs_and_other_named_parameters(self): - ast_nodes = test_utils.extract_node(''' + ast_nodes = extract_node(''' def test(a=42, b=24, **kwargs): return kwargs test(42, 24, c=3, d=4) #@ @@ -1923,7 +4031,7 @@ def test_kwargs_access_by_name(self): expected_values = [42, 42, 42, 24] - ast_nodes = test_utils.extract_node(''' + ast_nodes = extract_node(''' def test(**kwargs): return kwargs['f'] test(f=42) #@ @@ -1934,58 +4042,9 @@ test(l=24) #@ ''') for ast_node, value in zip(ast_nodes, expected_values): - inferred = next(ast_node.infer()) - self.assertIsInstance(inferred, nodes.Const) - self.assertEqual(inferred.value, value) - - def test_infer_call_result_invalid_dunder_call_on_instance(self): - ast_nodes = test_utils.extract_node(''' - class A: - __call__ = 42 - class B: - __call__ = A() - class C: - __call = None - A() #@ - B() #@ - C() #@ - ''') - for node in ast_nodes: - inferred = next(node.infer()) - self.assertRaises(InferenceError, next, inferred.infer_call_result(node)) - - - def test_subscript_inference_error(self): - # Used to raise StopIteration - ast_node = test_utils.extract_node(''' - class AttributeDict(dict): - def __getitem__(self, name): - return self - flow = AttributeDict() - flow['app'] = AttributeDict() - flow['app']['config'] = AttributeDict() - flow['app']['config']['doffing'] = AttributeDict() #@ - ''') - self.assertIsNone(util.safe_infer(ast_node.targets[0])) - - def test_classmethod_inferred_by_context(self): - ast_node = test_utils.extract_node(''' - class Super(object): - def instance(cls): - return cls() - instance = classmethod(instance) - - class Sub(Super): - def method(self): - return self - - # should see the Sub.instance() is returning a Sub - # instance, not a Super instance - Sub.instance().method() #@ - ''') - inferred = next(ast_node.infer()) - self.assertIsInstance(inferred, Instance) - self.assertEqual(inferred.name, 'Sub') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, value) @test_utils.require_version('3.5') def test_multiple_kwargs(self): @@ -1996,7 +4055,7 @@ ('d', 4), ('f', 42), ] - ast_node = test_utils.extract_node(''' + ast_node = extract_node(''' def test(**kwargs): return kwargs test(a=1, b=2, **{'c': 3}, **{'d': 4}, f=42) #@ @@ -2006,8 +4065,8 @@ value = self._get_dict_value(inferred) self.assertEqual(value, expected_value) - def test_kwargs_are_overriden(self): - ast_nodes = test_utils.extract_node(''' + def test_kwargs_are_overridden(self): + ast_nodes = extract_node(''' def test(f): return f test(f=23, **{'f': 34}) #@ @@ -2017,10 +4076,10 @@ ''') for ast_node in ast_nodes: inferred = next(ast_node.infer()) - self.assertEqual(inferred, util.YES) + self.assertEqual(inferred, util.Uninferable) def test_fail_to_infer_args(self): - ast_nodes = test_utils.extract_node(''' + ast_nodes = extract_node(''' def test(a, **kwargs): return a test(*missing) #@ test(*object) #@ @@ -2047,7 +4106,70 @@ ''') for node in ast_nodes: inferred = next(node.infer()) - self.assertEqual(inferred, util.YES) + self.assertEqual(inferred, util.Uninferable) + + +class SliceTest(unittest.TestCase): + + def test_slice(self): + ast_nodes = [ + ('[1, 2, 3][slice(None)]', [1, 2, 3]), + ('[1, 2, 3][slice(None, None)]', [1, 2, 3]), + ('[1, 2, 3][slice(None, None, None)]', [1, 2, 3]), + ('[1, 2, 3][slice(1, None)]', [2, 3]), + ('[1, 2, 3][slice(None, 1, None)]', [1]), + ('[1, 2, 3][slice(0, 1)]', [1]), + ('[1, 2, 3][slice(0, 3, 2)]', [1, 3]), + ] + for node, expected_value in ast_nodes: + ast_node = extract_node("__({})".format(node)) + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.List) + self.assertEqual([elt.value for elt in inferred.elts], expected_value) + + def test_slice_inference_error(self): + ast_nodes = extract_node(''' + from unknown import unknown + [1, 2, 3][slice(None, unknown, unknown)] #@ + [1, 2, 3][slice(None, missing, missing)] #@ + [1, 2, 3][slice(object, list, tuple)] #@ + [1, 2, 3][slice(b'a')] #@ + [1, 2, 3][slice(1, 'aa')] #@ + [1, 2, 3][slice(1, 2.0, 3.0)] #@ + [1, 2, 3][slice()] #@ + [1, 2, 3][slice(1, 2, 3, 4)] #@ + ''') + for node in ast_nodes: + self.assertRaises(InferenceError, next, node.infer()) + + def test_slice_attributes(self): + ast_nodes = [ + ('slice(2, 3, 4)', (2, 3, 4)), + ('slice(None, None, 4)', (None, None, 4)), + ('slice(None, 1, None)', (None, 1, None)), + ] + for code, values in ast_nodes: + lower, upper, step = values + node = extract_node(code) + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Slice) + lower_value = next(inferred.igetattr('start')) + self.assertIsInstance(lower_value, nodes.Const) + self.assertEqual(lower_value.value, lower) + higher_value = next(inferred.igetattr('stop')) + self.assertIsInstance(higher_value, nodes.Const) + self.assertEqual(higher_value.value, upper) + step_value = next(inferred.igetattr('step')) + self.assertIsInstance(step_value, nodes.Const) + self.assertEqual(step_value.value, step) + self.assertEqual(inferred.pytype(), '%s.slice' % BUILTINS) + + def test_slice_type(self): + ast_node = extract_node('type(slice(None, None, None))') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, nodes.ClassDef) + self.assertEqual(inferred.name, 'slice') + class CallSiteTest(unittest.TestCase): @@ -2056,7 +4178,7 @@ return arguments.CallSite.from_call(call) def _test_call_site_pair(self, code, expected_args, expected_keywords): - ast_node = test_utils.extract_node(code) + ast_node = extract_node(code) call_site = self._call_site_from_call(ast_node) self.assertEqual(len(call_site.positional_arguments), len(expected_args)) self.assertEqual([arg.value for arg in call_site.positional_arguments], @@ -2074,18 +4196,18 @@ def test_call_site_starred_args(self): pairs = [ ( - "f(*(1, 2), *(2, 3), *(3, 4), **{'a':1}, **{'b': 2})", - [1, 2, 2, 3, 3, 4], - {'a': 1, 'b': 2} + "f(*(1, 2), *(2, 3), *(3, 4), **{'a':1}, **{'b': 2})", + [1, 2, 2, 3, 3, 4], + {'a': 1, 'b': 2} ), ( - "f(1, 2, *(3, 4), 5, *(6, 7), f=24, **{'c':3})", - [1, 2, 3, 4, 5, 6, 7], - {'f':24, 'c': 3}, + "f(1, 2, *(3, 4), 5, *(6, 7), f=24, **{'c':3})", + [1, 2, 3, 4, 5, 6, 7], + {'f':24, 'c': 3}, ), # Too many fs passed into. ( - "f(f=24, **{'f':24})", [], {}, + "f(f=24, **{'f':24})", [], {}, ), ] self._test_call_site(pairs) @@ -2093,20 +4215,20 @@ def test_call_site(self): pairs = [ ( - "f(1, 2)", [1, 2], {} + "f(1, 2)", [1, 2], {} ), ( - "f(1, 2, *(1, 2))", [1, 2, 1, 2], {} + "f(1, 2, *(1, 2))", [1, 2, 1, 2], {} ), ( - "f(a=1, b=2, c=3)", [], {'a':1, 'b':2, 'c':3} + "f(a=1, b=2, c=3)", [], {'a':1, 'b':2, 'c':3} ) ] self._test_call_site(pairs) def _test_call_site_valid_arguments(self, values, invalid): for value in values: - ast_node = test_utils.extract_node(value) + ast_node = extract_node(value) call_site = self._call_site_from_call(ast_node) self.assertEqual(call_site.has_invalid_arguments(), invalid) @@ -2121,7 +4243,7 @@ self._test_call_site_valid_arguments(values, invalid=False) def test_duplicated_keyword_arguments(self): - ast_node = test_utils.extract_node('f(f=24, **{"f": 25})') + ast_node = extract_node('f(f=24, **{"f": 25})') site = self._call_site_from_call(ast_node) self.assertIn('f', site.duplicated_keywords) diff -Nru astroid-1.4.9/astroid/tests/unittest_lookup.py astroid-1.5.3/astroid/tests/unittest_lookup.py --- astroid-1.4.9/astroid/tests/unittest_lookup.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_lookup.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,20 +1,11 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2007-2013 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """tests for the astroid variable lookup capabilities """ import functools @@ -25,7 +16,6 @@ from astroid import exceptions from astroid import nodes from astroid import scoped_nodes -from astroid import test_utils from astroid import util from astroid.tests import resources @@ -57,12 +47,11 @@ if sys.version_info < (3, 0): self.assertEqual(len(astroid.lookup('b')[1]), 1) self.assertEqual(len(astroid.lookup('a')[1]), 1) - b = astroid._locals['b'][1] + b = astroid.locals['b'][1] else: self.assertEqual(len(astroid.lookup('b')[1]), 1) self.assertEqual(len(astroid.lookup('a')[1]), 1) - b = astroid._locals['b'][0] - + b = astroid.locals['b'][0] stmts = a.lookup('a')[1] self.assertEqual(len(stmts), 1) self.assertEqual(b.lineno, 6) @@ -71,7 +60,7 @@ self.assertEqual(b_value.value, 1) # c self.assertRaises(StopIteration, functools.partial(next, b_infer)) - func = astroid._locals['func'][0] + func = astroid.locals['func'][0] self.assertEqual(len(func.lookup('c')[1]), 1) def test_module(self): @@ -97,8 +86,8 @@ pass ''' astroid = builder.parse(code, __name__) - cls1 = astroid._locals['A'][0] - cls2 = astroid._locals['A'][1] + cls1 = astroid.locals['A'][0] + cls2 = astroid.locals['A'][1] name = next(cls2.nodes_of_class(nodes.Name)) self.assertEqual(next(name.infer()), cls1) @@ -173,9 +162,9 @@ """) var = astroid.body[1].value if sys.version_info < (3, 0): - self.assertEqual(var.inferred(), [util.YES]) + self.assertEqual(var.inferred(), [util.Uninferable]) else: - self.assertRaises(exceptions.UnresolvableName, var.inferred) + self.assertRaises(exceptions.NameInferenceError, var.inferred) def test_dict_comps(self): astroid = builder.parse(""" @@ -211,7 +200,7 @@ var """) var = astroid.body[1].value - self.assertRaises(exceptions.UnresolvableName, var.inferred) + self.assertRaises(exceptions.NameInferenceError, var.inferred) def test_generator_attributes(self): tree = builder.parse(""" @@ -251,7 +240,7 @@ self.assertTrue(p2.getattr('__name__')) self.assertTrue(astroid['NoName'].getattr('__name__')) p3 = next(astroid['p3'].infer()) - self.assertRaises(exceptions.NotFoundError, p3.getattr, '__name__') + self.assertRaises(exceptions.AttributeInferenceError, p3.getattr, '__name__') def test_function_module_special(self): astroid = builder.parse(''' @@ -268,6 +257,7 @@ self.assertEqual(len(intstmts), 1) self.assertIsInstance(intstmts[0], nodes.ClassDef) self.assertEqual(intstmts[0].name, 'int') + # pylint: disable=no-member; union type in const_factory, this shouldn't happen self.assertIs(intstmts[0], nodes.const_factory(1)._proxied) def test_decorator_arguments_lookup(self): @@ -284,7 +274,7 @@ def test(self): pass ''' - member = test_utils.extract_node(code, __name__).targets[0] + member = builder.extract_node(code, __name__).targets[0] it = member.infer() obj = next(it) self.assertIsInstance(obj, nodes.Const) @@ -301,7 +291,7 @@ def funcA(): return 4 ''' - decname = test_utils.extract_node(code, __name__) + decname = builder.extract_node(code, __name__) it = decname.infer() obj = next(it) self.assertIsInstance(obj, nodes.FunctionDef) diff -Nru astroid-1.4.9/astroid/tests/unittest_manager.py astroid-1.5.3/astroid/tests/unittest_manager.py --- astroid-1.4.9/astroid/tests/unittest_manager.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_manager.py 2017-06-03 13:47:01.000000000 +0000 @@ -1,27 +1,20 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006, 2009-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + import os import platform +import site import sys import unittest +import pkg_resources import six +import astroid from astroid import exceptions from astroid import manager from astroid.tests import resources @@ -51,25 +44,25 @@ def test_ast_from_file(self): filepath = unittest.__file__ - astroid = self.manager.ast_from_file(filepath) - self.assertEqual(astroid.name, 'unittest') + ast = self.manager.ast_from_file(filepath) + self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_file_cache(self): filepath = unittest.__file__ self.manager.ast_from_file(filepath) - astroid = self.manager.ast_from_file('unhandledName', 'unittest') - self.assertEqual(astroid.name, 'unittest') + ast = self.manager.ast_from_file('unhandledName', 'unittest') + self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_file_astro_builder(self): filepath = unittest.__file__ - astroid = self.manager.ast_from_file(filepath, None, True, True) - self.assertEqual(astroid.name, 'unittest') + ast = self.manager.ast_from_file(filepath, None, True, True) + self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_file_name_astro_builder_exception(self): - self.assertRaises(exceptions.AstroidBuildingException, + self.assertRaises(exceptions.AstroidBuildingError, self.manager.ast_from_file, 'unhandledName') def test_do_not_expose_main(self): @@ -78,21 +71,74 @@ self.assertEqual(obj.items(), []) def test_ast_from_module_name(self): - astroid = self.manager.ast_from_module_name('unittest') - self.assertEqual(astroid.name, 'unittest') + ast = self.manager.ast_from_module_name('unittest') + self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_module_name_not_python_source(self): - astroid = self.manager.ast_from_module_name('time') - self.assertEqual(astroid.name, 'time') + ast = self.manager.ast_from_module_name('time') + self.assertEqual(ast.name, 'time') self.assertIn('time', self.manager.astroid_cache) - self.assertEqual(astroid.pure_python, False) + self.assertEqual(ast.pure_python, False) def test_ast_from_module_name_astro_builder_exception(self): - self.assertRaises(exceptions.AstroidBuildingException, + self.assertRaises(exceptions.AstroidBuildingError, self.manager.ast_from_module_name, 'unhandledModule') + def _test_ast_from_old_namespace_package_protocol(self, root): + origpath = sys.path[:] + paths = [resources.find('data/path_{}_{}'.format(root, index)) + for index in range(1, 4)] + sys.path.extend(paths) + try: + for name in ('foo', 'bar', 'baz'): + module = self.manager.ast_from_module_name('package.' + name) + self.assertIsInstance(module, astroid.Module) + finally: + sys.path = origpath + + def test_ast_from_namespace_pkgutil(self): + self._test_ast_from_old_namespace_package_protocol('pkgutil') + + def test_ast_from_namespace_pkg_resources(self): + self._test_ast_from_old_namespace_package_protocol('pkg_resources') + + @unittest.skipUnless(sys.version_info[:2] > (3, 3), "Needs PEP 420 namespace protocol") + def test_implicit_namespace_package(self): + data_dir = os.path.dirname(resources.find('data/namespace_pep_420')) + contribute = os.path.join(data_dir, 'contribute_to_namespace') + for value in (data_dir, contribute): + sys.path.insert(0, value) + + try: + module = self.manager.ast_from_module_name('namespace_pep_420.module') + self.assertIsInstance(module, astroid.Module) + self.assertEqual(module.name, 'namespace_pep_420.module') + var = next(module.igetattr('var')) + self.assertIsInstance(var, astroid.Const) + self.assertEqual(var.value, 42) + finally: + for _ in range(2): + sys.path.pop(0) + + def test_namespace_package_pth_support(self): + pth = 'foogle_fax-0.12.5-py2.7-nspkg.pth' + site.addpackage(resources.RESOURCE_PATH, pth, []) + # pylint: disable=no-member; can't infer _namespace_packages, created at runtime. + pkg_resources._namespace_packages['foogle'] = [] + + try: + module = self.manager.ast_from_module_name('foogle.fax') + submodule = next(module.igetattr('a')) + value = next(submodule.igetattr('x')) + self.assertIsInstance(value, astroid.Const) + with self.assertRaises(exceptions.AstroidImportError): + self.manager.ast_from_module_name('foogle.moogle') + finally: + del pkg_resources._namespace_packages['foogle'] + sys.modules.pop('foogle') + def _test_ast_from_zip(self, archive): origpath = sys.path[:] sys.modules.pop('mypypa', None) @@ -102,8 +148,8 @@ module = self.manager.ast_from_module_name('mypypa') self.assertEqual(module.name, 'mypypa') end = os.path.join(archive, 'mypypa') - self.assertTrue(module.source_file.endswith(end), - "%s doesn't endswith %s" % (module.source_file, end)) + self.assertTrue(module.file.endswith(end), + "%s doesn't endswith %s" % (module.file, end)) finally: # remove the module, else after importing egg, we don't get the zip if 'mypypa' in self.manager.astroid_cache: @@ -126,8 +172,8 @@ def test_zip_import_data(self): """check if zip_import_data works""" filepath = resources.find('data/MyPyPa-0.1.0-py2.5.zip/mypypa') - astroid = self.manager.zip_import_data(filepath) - self.assertEqual(astroid.name, 'mypypa') + ast = self.manager.zip_import_data(filepath) + self.assertEqual(ast.name, 'mypypa') def test_zip_import_data_without_zipimport(self): """check if zip_import_data return None without zipimport""" @@ -137,50 +183,51 @@ """check if the unittest filepath is equals to the result of the method""" self.assertEqual( _get_file_from_object(unittest), - self.manager.file_from_module_name('unittest', None)[0]) + # pylint: disable=no-member; can't infer the ModuleSpec + self.manager.file_from_module_name('unittest', None).location) def test_file_from_module_name_astro_building_exception(self): """check if the method launch a exception with a wrong module name""" - self.assertRaises(exceptions.AstroidBuildingException, + self.assertRaises(exceptions.AstroidBuildingError, self.manager.file_from_module_name, 'unhandledModule', None) def test_ast_from_module(self): - astroid = self.manager.ast_from_module(unittest) - self.assertEqual(astroid.pure_python, True) + ast = self.manager.ast_from_module(unittest) + self.assertEqual(ast.pure_python, True) import time - astroid = self.manager.ast_from_module(time) - self.assertEqual(astroid.pure_python, False) + ast = self.manager.ast_from_module(time) + self.assertEqual(ast.pure_python, False) def test_ast_from_module_cache(self): """check if the module is in the cache manager""" - astroid = self.manager.ast_from_module(unittest) - self.assertEqual(astroid.name, 'unittest') + ast = self.manager.ast_from_module(unittest) + self.assertEqual(ast.name, 'unittest') self.assertIn('unittest', self.manager.astroid_cache) def test_ast_from_class(self): - astroid = self.manager.ast_from_class(int) - self.assertEqual(astroid.name, 'int') - self.assertEqual(astroid.parent.frame().name, BUILTINS) - - astroid = self.manager.ast_from_class(object) - self.assertEqual(astroid.name, 'object') - self.assertEqual(astroid.parent.frame().name, BUILTINS) - self.assertIn('__setattr__', astroid) + ast = self.manager.ast_from_class(int) + self.assertEqual(ast.name, 'int') + self.assertEqual(ast.parent.frame().name, BUILTINS) + + ast = self.manager.ast_from_class(object) + self.assertEqual(ast.name, 'object') + self.assertEqual(ast.parent.frame().name, BUILTINS) + self.assertIn('__setattr__', ast) def test_ast_from_class_with_module(self): """check if the method works with the module name""" - astroid = self.manager.ast_from_class(int, int.__module__) - self.assertEqual(astroid.name, 'int') - self.assertEqual(astroid.parent.frame().name, BUILTINS) - - astroid = self.manager.ast_from_class(object, object.__module__) - self.assertEqual(astroid.name, 'object') - self.assertEqual(astroid.parent.frame().name, BUILTINS) - self.assertIn('__setattr__', astroid) + ast = self.manager.ast_from_class(int, int.__module__) + self.assertEqual(ast.name, 'int') + self.assertEqual(ast.parent.frame().name, BUILTINS) + + ast = self.manager.ast_from_class(object, object.__module__) + self.assertEqual(ast.name, 'object') + self.assertEqual(ast.parent.frame().name, BUILTINS) + self.assertIn('__setattr__', ast) def test_ast_from_class_attr_error(self): """give a wrong class at the ast_from_class method""" - self.assertRaises(exceptions.AstroidBuildingException, + self.assertRaises(exceptions.AstroidBuildingError, self.manager.ast_from_class, None) def testFailedImportHooks(self): @@ -188,13 +235,13 @@ if modname == 'foo.bar': return unittest else: - raise exceptions.AstroidBuildingException() + raise exceptions.AstroidBuildingError() - with self.assertRaises(exceptions.AstroidBuildingException): + with self.assertRaises(exceptions.AstroidBuildingError): self.manager.ast_from_module_name('foo.bar') self.manager.register_failed_import_hook(hook) self.assertEqual(unittest, self.manager.ast_from_module_name('foo.bar')) - with self.assertRaises(exceptions.AstroidBuildingException): + with self.assertRaises(exceptions.AstroidBuildingError): self.manager.ast_from_module_name('foo.bar.baz') del self.manager._failed_import_hooks[0] diff -Nru astroid-1.4.9/astroid/tests/unittest_modutils.py astroid-1.5.3/astroid/tests/unittest_modutils.py --- astroid-1.4.9/astroid/tests/unittest_modutils.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_modutils.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,27 +1,23 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# astroid is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# -*- coding: utf-8 -*- +# Copyright (c) 2014-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015 Florian Bruhin +# Copyright (c) 2015 Radosław Ganczarek + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """ unit tests for module modutils (module manipulation utilities) """ +import email import os import sys import unittest +from xml import etree +import astroid +from astroid.interpreter._import import spec from astroid import modutils from astroid.tests import resources @@ -34,21 +30,25 @@ package = "mypypa" def tearDown(self): - for k in list(sys.path_importer_cache.keys()): + for k in list(sys.path_importer_cache): if 'MyPyPa' in k: del sys.path_importer_cache[k] def test_find_zipped_module(self): - mtype, mfile = modutils._module_file( + found_spec = spec.find_spec( [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.zip')]) - self.assertEqual(mtype, modutils.PY_ZIPMODULE) - self.assertEqual(mfile.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.zip", self.package]) + self.assertEqual(found_spec.type, + spec.ModuleType.PY_ZIPMODULE) + self.assertEqual(found_spec.location.split(os.sep)[-3:], + ["data", "MyPyPa-0.1.0-py2.5.zip", self.package]) def test_find_egg_module(self): - mtype, mfile = modutils._module_file( + found_spec = spec.find_spec( [self.package], [resources.find('data/MyPyPa-0.1.0-py2.5.egg')]) - self.assertEqual(mtype, modutils.PY_ZIPMODULE) - self.assertEqual(mfile.split(os.sep)[-3:], ["data", "MyPyPa-0.1.0-py2.5.egg", self.package]) + self.assertEqual(found_spec.type, + spec.ModuleType.PY_ZIPMODULE) + self.assertEqual(found_spec.location.split(os.sep)[-3:], + ["data", "MyPyPa-0.1.0-py2.5.egg", self.package]) class LoadModuleFromNameTest(unittest.TestCase): @@ -116,6 +116,7 @@ def test_do_not_load_twice(self): modutils.load_module_from_modpath(['data', 'lmfp', 'foo']) modutils.load_module_from_modpath(['data', 'lmfp']) + # pylint: disable=no-member; just-once is added by a test file dynamically. self.assertEqual(len(sys.just_once), 1) del sys.just_once @@ -131,24 +132,12 @@ self.assertEqual(os.path.realpath(result), os.path.realpath(filename)) def test_std_lib(self): - from os import path - self.assertEqual(os.path.realpath(modutils.file_from_modpath(['os', 'path']).replace('.pyc', '.py')), - os.path.realpath(path.__file__.replace('.pyc', '.py'))) - - def test_xmlplus(self): - try: - # don't fail if pyxml isn't installed - from xml.dom import ext - except ImportError: - pass - else: - self.assertEqual(os.path.realpath(modutils.file_from_modpath(['xml', 'dom', 'ext']).replace('.pyc', '.py')), - os.path.realpath(ext.__file__.replace('.pyc', '.py'))) + path = modutils.file_from_modpath(['os', 'path']).replace('.pyc', '.py') + self.assertEqual(os.path.realpath(path), + os.path.realpath(os.path.__file__.replace('.pyc', '.py'))) def test_builtin(self): - self.assertEqual(modutils.file_from_modpath(['sys']), - None) - + self.assertIsNone(modutils.file_from_modpath(['sys'])) def test_unexisting(self): self.assertRaises(ImportError, modutils.file_from_modpath, ['turlututu']) @@ -183,65 +172,61 @@ def test_builtins(self): if sys.version_info < (3, 0): - self.assertEqual(modutils.is_standard_module('__builtin__'), True) - self.assertEqual(modutils.is_standard_module('builtins'), False) + self.assertTrue(modutils.is_standard_module('__builtin__')) + self.assertFalse(modutils.is_standard_module('builtins')) else: - self.assertEqual(modutils.is_standard_module('__builtin__'), False) - self.assertEqual(modutils.is_standard_module('builtins'), True) + self.assertFalse(modutils.is_standard_module('__builtin__')) + self.assertTrue(modutils.is_standard_module('builtins')) def test_builtin(self): - self.assertEqual(modutils.is_standard_module('sys'), True) - self.assertEqual(modutils.is_standard_module('marshal'), True) + self.assertTrue(modutils.is_standard_module('sys')) + self.assertTrue(modutils.is_standard_module('marshal')) def test_nonstandard(self): - self.assertEqual(modutils.is_standard_module('astroid'), False) + self.assertFalse(modutils.is_standard_module('astroid')) def test_unknown(self): - self.assertEqual(modutils.is_standard_module('unknown'), False) + self.assertFalse(modutils.is_standard_module('unknown')) def test_4(self): - self.assertEqual(modutils.is_standard_module('hashlib'), True) - self.assertEqual(modutils.is_standard_module('pickle'), True) - self.assertEqual(modutils.is_standard_module('email'), True) - self.assertEqual(modutils.is_standard_module('io'), sys.version_info >= (2, 6)) - self.assertEqual(modutils.is_standard_module('StringIO'), sys.version_info < (3, 0)) - self.assertEqual(modutils.is_standard_module('unicodedata'), True) + self.assertTrue(modutils.is_standard_module('hashlib')) + self.assertTrue(modutils.is_standard_module('pickle')) + self.assertTrue(modutils.is_standard_module('email')) + self.assertEqual(modutils.is_standard_module('io'), + sys.version_info >= (2, 6)) + self.assertEqual(modutils.is_standard_module('StringIO'), + sys.version_info < (3, 0)) + self.assertTrue(modutils.is_standard_module('unicodedata')) def test_custom_path(self): datadir = resources.find('') if datadir.startswith(modutils.EXT_LIB_DIR): self.skipTest('known breakage of is_standard_module on installed package') - self.assertEqual(modutils.is_standard_module('data.module', (datadir,)), True) - self.assertEqual(modutils.is_standard_module('data.module', (os.path.abspath(datadir),)), True) + + self.assertTrue(modutils.is_standard_module('data.module', (datadir,))) + self.assertTrue(modutils.is_standard_module('data.module', (os.path.abspath(datadir),))) def test_failing_edge_cases(self): - from xml import etree # using a subpackage/submodule path as std_path argument - self.assertEqual(modutils.is_standard_module('xml.etree', etree.__path__), False) + self.assertFalse(modutils.is_standard_module('xml.etree', etree.__path__)) # using a module + object name as modname argument - self.assertEqual(modutils.is_standard_module('sys.path'), True) + self.assertTrue(modutils.is_standard_module('sys.path')) # this is because only the first package/module is considered - self.assertEqual(modutils.is_standard_module('sys.whatever'), True) - self.assertEqual(modutils.is_standard_module('xml.whatever', etree.__path__), False) + self.assertTrue(modutils.is_standard_module('sys.whatever')) + self.assertFalse(modutils.is_standard_module('xml.whatever', etree.__path__)) class IsRelativeTest(unittest.TestCase): - def test_knownValues_is_relative_1(self): - import email - self.assertEqual(modutils.is_relative('utils', email.__path__[0]), - True) + self.assertTrue(modutils.is_relative('utils', email.__path__[0])) def test_knownValues_is_relative_2(self): - from xml.etree import ElementTree - self.assertEqual(modutils.is_relative('ElementPath', ElementTree.__file__), - True) + self.assertTrue(modutils.is_relative('ElementPath', + etree.ElementTree.__file__)) def test_knownValues_is_relative_3(self): - import astroid - self.assertEqual(modutils.is_relative('astroid', astroid.__path__[0]), - False) + self.assertFalse(modutils.is_relative('astroid', astroid.__path__[0])) class GetModuleFilesTest(unittest.TestCase): @@ -254,6 +239,16 @@ self.assertEqual(modules, {os.path.join(package, x) for x in expected}) + def test_get_all_files(self): + """test that list_all returns all Python files from given location + """ + non_package = resources.find('data/notamodule') + modules = modutils.get_module_files(non_package, [], list_all=True) + self.assertEqual( + modules, + [os.path.join(non_package, 'file.py')], + ) + def test_load_module_set_attribute(self): import xml.etree.ElementTree import xml diff -Nru astroid-1.4.9/astroid/tests/unittest_nodes.py astroid-1.5.3/astroid/tests/unittest_nodes.py --- astroid-1.4.9/astroid/tests/unittest_nodes.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_nodes.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,20 +1,12 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2007, 2009-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2013-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015 Florian Bruhin +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """tests for specific behaviour of astroid nodes """ import os @@ -25,6 +17,7 @@ import six +import astroid from astroid import bases from astroid import builder from astroid import context as contextmod @@ -53,9 +46,18 @@ self.assertEqual(build('(1, )').as_string(), '(1, )') self.assertEqual(build('1, 2, 3').as_string(), '(1, 2, 3)') + @test_utils.require_version(minver='3.0') + def test_func_signature_issue_185(self): + code = textwrap.dedent(''' + def test(a, b, c=42, *, x=42, **kwargs): + print(a, b, c, args) + ''') + node = parse(code) + self.assertEqual(node.as_string().strip(), code.strip()) + def test_as_string_for_list_containing_uninferable(self): - node = test_utils.extract_node(''' - def foo(arg): + node = builder.extract_node(''' + def foo(): bar = [arg] * 1 ''') binop = node.body[0].value @@ -64,7 +66,7 @@ self.assertEqual(binop.as_string(), '([arg]) * (1)') def test_frozenset_as_string(self): - nodes = test_utils.extract_node(''' + ast_nodes = builder.extract_node(''' frozenset((1, 2, 3)) #@ frozenset({1, 2, 3}) #@ frozenset([1, 2, 3,]) #@ @@ -72,23 +74,15 @@ frozenset(None) #@ frozenset(1) #@ ''') - nodes = [next(node.infer()) for node in nodes] + ast_nodes = [next(node.infer()) for node in ast_nodes] - self.assertEqual(nodes[0].as_string(), 'frozenset((1, 2, 3))') - self.assertEqual(nodes[1].as_string(), 'frozenset({1, 2, 3})') - self.assertEqual(nodes[2].as_string(), 'frozenset([1, 2, 3])') + self.assertEqual(ast_nodes[0].as_string(), 'frozenset((1, 2, 3))') + self.assertEqual(ast_nodes[1].as_string(), 'frozenset({1, 2, 3})') + self.assertEqual(ast_nodes[2].as_string(), 'frozenset([1, 2, 3])') - self.assertNotEqual(nodes[3].as_string(), 'frozenset(None)') - self.assertNotEqual(nodes[4].as_string(), 'frozenset(1)') + self.assertNotEqual(ast_nodes[3].as_string(), 'frozenset(None)') + self.assertNotEqual(ast_nodes[4].as_string(), 'frozenset(1)') - @test_utils.require_version(minver='3.0') - def test_func_signature_issue_185(self): - code = textwrap.dedent(''' - def test(a, b, c=42, *, x=42, **kwargs): - print(a, b, c, args) - ''') - node = parse(code) - self.assertEqual(node.as_string().strip(), code.strip()) def test_varargs_kwargs_as_string(self): ast = abuilder.string_build('raise_string(*args, **kwargs)').body[0] self.assertEqual(ast.as_string(), 'raise_string(*args, **kwargs)') @@ -184,9 +178,9 @@ try: return self.__class__.__dict__['CODE_Astroid'] except KeyError: - astroid = builder.parse(self.CODE) - self.__class__.CODE_Astroid = astroid - return astroid + module = builder.parse(self.CODE) + self.__class__.CODE_Astroid = module + return module class IfNodeTest(_NodeTest): @@ -340,13 +334,13 @@ self.assertEqual(from_.real_name('NameNode'), 'Name') imp_ = self.module['os'] self.assertEqual(imp_.real_name('os'), 'os') - self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'os.path') + self.assertRaises(exceptions.AttributeInferenceError, imp_.real_name, 'os.path') imp_ = self.module['NameNode'] self.assertEqual(imp_.real_name('NameNode'), 'Name') - self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'Name') + self.assertRaises(exceptions.AttributeInferenceError, imp_.real_name, 'Name') imp_ = self.module2['YO'] self.assertEqual(imp_.real_name('YO'), 'YO') - self.assertRaises(exceptions.NotFoundError, imp_.real_name, 'data') + self.assertRaises(exceptions.AttributeInferenceError, imp_.real_name, 'data') def test_as_string(self): ast = self.module['modutils'] @@ -368,7 +362,7 @@ method of this From node will be made by unpack_infer. inference.infer_from will try to import this module, which will fail and raise a InferenceException (by mixins.do_import_module). The infer_name - will catch this exception and yield and YES instead. + will catch this exception and yield and Uninferable instead. ''' code = ''' @@ -382,8 +376,8 @@ except PickleError: pass ''' - astroid = builder.parse(code) - handler_type = astroid.body[1].handlers[0].type + module = builder.parse(code) + handler_type = module.body[1].handlers[0].type excs = list(node_classes.unpack_infer(handler_type)) # The number of returned object can differ on Python 2 @@ -392,21 +386,21 @@ # present in the other version. self.assertIsInstance(excs[0], nodes.ClassDef) self.assertEqual(excs[0].name, 'PickleError') - self.assertIs(excs[-1], util.YES) + self.assertIs(excs[-1], util.Uninferable) def test_absolute_import(self): - astroid = resources.build_file('data/absimport.py') + module = resources.build_file('data/absimport.py') ctx = contextmod.InferenceContext() # will fail if absolute import failed ctx.lookupname = 'message' - next(astroid['message'].infer(ctx)) + next(module['message'].infer(ctx)) ctx.lookupname = 'email' - m = next(astroid['email'].infer(ctx)) - self.assertFalse(m.source_file.startswith(os.path.join('data', 'email.py'))) + m = next(module['email'].infer(ctx)) + self.assertFalse(m.file.startswith(os.path.join('data', 'email.py'))) def test_more_absolute_import(self): - astroid = resources.build_file('data/module1abs/__init__.py', 'data.module1abs') - self.assertIn('sys', astroid._locals) + module = resources.build_file('data/module1abs/__init__.py', 'data.module1abs') + self.assertIn('sys', module.locals) class CmpNodeTest(unittest.TestCase): @@ -418,6 +412,7 @@ class ConstNodeTest(unittest.TestCase): def _test(self, value): + # pylint: disable=no-member; union type in const_factory, this shouldn't happen node = nodes.const_factory(value) self.assertIsInstance(node._proxied, nodes.ClassDef) self.assertEqual(node._proxied.name, value.__class__.__name__) @@ -449,7 +444,7 @@ class NameNodeTest(unittest.TestCase): def test_assign_to_True(self): - """test that True and False assignements don't crash""" + """test that True and False assignments don't crash""" code = """ True = False def hello(False): @@ -457,7 +452,7 @@ del True """ if sys.version_info >= (3, 0): - with self.assertRaises(exceptions.AstroidBuildingException): + with self.assertRaises(exceptions.AstroidBuildingError): builder.parse(code) else: ast = builder.parse(code) @@ -469,6 +464,53 @@ self.assertEqual(del_true.name, "True") +class AnnAssignNodeTest(unittest.TestCase): + @test_utils.require_version(minver='3.6') + def test_primitive(self): + code = textwrap.dedent(""" + test: int = 5 + """) + assign = builder.extract_node(code) + self.assertIsInstance(assign, nodes.AnnAssign) + self.assertEqual(assign.target.name, "test") + self.assertEqual(assign.annotation.name, "int") + self.assertEqual(assign.value.value, 5) + self.assertEqual(assign.simple, 1) + + @test_utils.require_version(minver='3.6') + def test_primitive_without_initial_value(self): + code = textwrap.dedent(""" + test: str + """) + assign = builder.extract_node(code) + self.assertIsInstance(assign, nodes.AnnAssign) + self.assertEqual(assign.target.name, "test") + self.assertEqual(assign.annotation.name, "str") + self.assertEqual(assign.value, None) + + @test_utils.require_version(minver='3.6') + def test_complex(self): + code = textwrap.dedent(""" + test: Dict[List[str]] = {} + """) + assign = builder.extract_node(code) + self.assertIsInstance(assign, nodes.AnnAssign) + self.assertEqual(assign.target.name, "test") + self.assertIsInstance(assign.annotation, astroid.Subscript) + self.assertIsInstance(assign.value, astroid.Dict) + + @test_utils.require_version(minver='3.6') + def test_as_string(self): + code = textwrap.dedent(""" + print() + test: int = 5 + test2: str + test3: List[Dict[(str, str)]] = [] + """) + ast = abuilder.string_build(code) + self.assertEqual(ast.as_string().strip(), code.strip()) + + class ArgumentsNodeTC(unittest.TestCase): def test_linenumbering(self): ast = builder.parse(''' @@ -488,13 +530,14 @@ self.skipTest('FIXME http://bugs.python.org/issue10445 ' '(no line number on function args)') - def test_builtin_fromlineno_missing(self): - cls = test_utils.extract_node(''' - class Foo(Exception): #@ - pass + @test_utils.require_version(minver='3.0') + def test_kwoargs(self): + ast = builder.parse(''' + def func(*, x): + pass ''') - new = cls.getattr('__new__')[-1] - self.assertEqual(new.args.fromlineno, 0) + args = ast['func'].args + self.assertTrue(args.is_argument('x')) class UnboundMethodNodeTest(unittest.TestCase): @@ -511,7 +554,7 @@ meth = A.test ''') node = next(ast['meth'].infer()) - with self.assertRaises(exceptions.NotFoundError): + with self.assertRaises(exceptions.AttributeInferenceError): node.getattr('__missssing__') name = node.getattr('__name__')[0] self.assertIsInstance(name, nodes.Const) @@ -603,9 +646,8 @@ def test_assname(node): if node.name == 'foo': - n = nodes.AssignName() - n.name = 'bar' - return n + return nodes.AssignName('bar', node.lineno, node.col_offset, + node.parent) def test_assattr(node): if node.attrname == 'a': node.attrname = 'b' @@ -618,7 +660,8 @@ def test_genexpr(node): if node.elt.value == 1: - node.elt = nodes.Const(2) + node.elt = nodes.Const(2, node.lineno, node.col_offset, + node.parent) return node self.transformer.register_transform(nodes.From, test_from) @@ -690,17 +733,18 @@ assign_type_mixin = module.body[1].targets[0] parent_assign_type_mixin = module.body[2] - warnings.simplefilter('always') - with warnings.catch_warnings(record=True) as w: - filter_stmts_mixin.ass_type() - self.assertIsInstance(w[0].message, PendingDeprecationWarning) + with test_utils.enable_warning(PendingDeprecationWarning): + filter_stmts_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) with warnings.catch_warnings(record=True) as w: - assign_type_mixin.ass_type() - self.assertIsInstance(w[0].message, PendingDeprecationWarning) + with test_utils.enable_warning(PendingDeprecationWarning): + assign_type_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) with warnings.catch_warnings(record=True) as w: - parent_assign_type_mixin.ass_type() - self.assertIsInstance(w[0].message, PendingDeprecationWarning) + with test_utils.enable_warning(PendingDeprecationWarning): + parent_assign_type_mixin.ass_type() + self.assertIsInstance(w[0].message, PendingDeprecationWarning) def test_isinstance_warnings(self): msg_format = ("%r is deprecated and slated for removal in astroid " @@ -708,8 +752,8 @@ for cls in (nodes.Discard, nodes.Backquote, nodes.AssName, nodes.AssAttr, nodes.Getattr, nodes.CallFunc, nodes.From): with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('always') - isinstance(42, cls) + with test_utils.enable_warning(PendingDeprecationWarning): + isinstance(42, cls) self.assertIsInstance(w[0].message, PendingDeprecationWarning) actual_msg = msg_format % (cls.__class__.__name__, cls.__wrapped__.__name__) self.assertEqual(str(w[0].message), actual_msg) @@ -719,7 +763,7 @@ class Python35AsyncTest(unittest.TestCase): def test_async_await_keywords(self): - async_def, async_for, async_with, await_node = test_utils.extract_node(''' + async_def, async_for, async_with, await_node = builder.extract_node(''' async def func(): #@ async for i in range(10): #@ f = __(await i) @@ -760,5 +804,53 @@ self._test_await_async_as_string(code) +class ContextTest(unittest.TestCase): + + def test_subscript_load(self): + node = builder.extract_node('f[1]') + self.assertIs(node.ctx, astroid.Load) + + def test_subscript_del(self): + node = builder.extract_node('del f[1]') + self.assertIs(node.targets[0].ctx, astroid.Del) + + def test_subscript_store(self): + node = builder.extract_node('f[1] = 2') + subscript = node.targets[0] + self.assertIs(subscript.ctx, astroid.Store) + + def test_list_load(self): + node = builder.extract_node('[]') + self.assertIs(node.ctx, astroid.Load) + + def test_list_del(self): + node = builder.extract_node('del []') + self.assertIs(node.targets[0].ctx, astroid.Del) + + def test_list_store(self): + with self.assertRaises(exceptions.AstroidSyntaxError): + builder.extract_node('[0] = 2') + + def test_tuple_load(self): + node = builder.extract_node('(1, )') + self.assertIs(node.ctx, astroid.Load) + + def test_tuple_store(self): + with self.assertRaises(exceptions.AstroidSyntaxError): + builder.extract_node('(1, ) = 3') + + @test_utils.require_version(minver='3.5') + def test_starred_load(self): + node = builder.extract_node('a = *b') + starred = node.value + self.assertIs(starred.ctx, astroid.Load) + + @test_utils.require_version(minver='3.0') + def test_starred_store(self): + node = builder.extract_node('a, *b = 1, 2') + starred = node.targets[0].elts[1] + self.assertIs(starred.ctx, astroid.Store) + + if __name__ == '__main__': unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_object_model.py astroid-1.5.3/astroid/tests/unittest_object_model.py --- astroid-1.4.9/astroid/tests/unittest_object_model.py 1970-01-01 00:00:00.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_object_model.py 2017-04-12 14:11:58.000000000 +0000 @@ -0,0 +1,611 @@ +# Copyright (c) 2016 Claudiu Popa +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import unittest +import xml + +import six + +import astroid +from astroid import builder +from astroid import exceptions +from astroid import MANAGER +from astroid import test_utils +from astroid import objects + + +BUILTINS = MANAGER.astroid_cache[six.moves.builtins.__name__] + + +class InstanceModelTest(unittest.TestCase): + + def test_instance_special_model(self): + ast_nodes = builder.extract_node(''' + class A: + "test" + def __init__(self): + self.a = 42 + a = A() + a.__class__ #@ + a.__module__ #@ + a.__doc__ #@ + a.__dict__ #@ + ''', module_name='fake_module') + + cls = next(ast_nodes[0].infer()) + self.assertIsInstance(cls, astroid.ClassDef) + self.assertEqual(cls.name, 'A') + + module = next(ast_nodes[1].infer()) + self.assertIsInstance(module, astroid.Const) + self.assertEqual(module.value, 'fake_module') + + doc = next(ast_nodes[2].infer()) + self.assertIsInstance(doc, astroid.Const) + self.assertEqual(doc.value, 'test') + + dunder_dict = next(ast_nodes[3].infer()) + self.assertIsInstance(dunder_dict, astroid.Dict) + attr = next(dunder_dict.getitem(astroid.Const('a')).infer()) + self.assertIsInstance(attr, astroid.Const) + self.assertEqual(attr.value, 42) + + @unittest.expectedFailure + def test_instance_local_attributes_overrides_object_model(self): + # The instance lookup needs to be changed in order for this to work. + ast_node = builder.extract_node(''' + class A: + @property + def __dict__(self): + return [] + A().__dict__ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, astroid.List) + self.assertEqual(inferred.elts, []) + + +class BoundMethodModelTest(unittest.TestCase): + + def test_bound_method_model(self): + ast_nodes = builder.extract_node(''' + class A: + def test(self): pass + a = A() + a.test.__func__ #@ + a.test.__self__ #@ + ''') + + func = next(ast_nodes[0].infer()) + self.assertIsInstance(func, astroid.FunctionDef) + self.assertEqual(func.name, 'test') + + self_ = next(ast_nodes[1].infer()) + self.assertIsInstance(self_, astroid.Instance) + self.assertEqual(self_.name, 'A') + + +class UnboundMethodModelTest(unittest.TestCase): + + def test_unbound_method_model(self): + ast_nodes = builder.extract_node(''' + class A: + def test(self): pass + t = A.test + t.__class__ #@ + t.__func__ #@ + t.__self__ #@ + t.im_class #@ + t.im_func #@ + t.im_self #@ + ''') + + cls = next(ast_nodes[0].infer()) + self.assertIsInstance(cls, astroid.ClassDef) + if six.PY2: + unbound_name = 'instancemethod' + else: + unbound_name = 'function' + + self.assertEqual(cls.name, unbound_name) + + func = next(ast_nodes[1].infer()) + self.assertIsInstance(func, astroid.FunctionDef) + self.assertEqual(func.name, 'test') + + self_ = next(ast_nodes[2].infer()) + self.assertIsInstance(self_, astroid.Const) + self.assertIsNone(self_.value) + + self.assertEqual(cls.name, next(ast_nodes[3].infer()).name) + self.assertEqual(func, next(ast_nodes[4].infer())) + self.assertIsNone(next(ast_nodes[5].infer()).value) + + +class ClassModelTest(unittest.TestCase): + + def test_priority_to_local_defined_values(self): + ast_node = builder.extract_node(''' + class A: + __doc__ = "first" + A.__doc__ #@ + ''') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, astroid.Const) + self.assertEqual(inferred.value, "first") + + @test_utils.require_version(maxver='3.0') + def test__mro__old_style(self): + ast_node = builder.extract_node(''' + class A: + pass + A.__mro__ + ''') + with self.assertRaises(exceptions.InferenceError): + next(ast_node.infer()) + + @test_utils.require_version(maxver='3.0') + def test__subclasses__old_style(self): + ast_node = builder.extract_node(''' + class A: + pass + A.__subclasses__ + ''') + with self.assertRaises(exceptions.InferenceError): + next(ast_node.infer()) + + def test_class_model_correct_mro_subclasses_proxied(self): + ast_nodes = builder.extract_node(''' + class A(object): + pass + A.mro #@ + A.__subclasses__ #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, astroid.BoundMethod) + self.assertIsInstance(inferred._proxied, astroid.FunctionDef) + self.assertIsInstance(inferred.bound, astroid.ClassDef) + self.assertEqual(inferred.bound.name, 'type') + + @unittest.skipUnless(six.PY2, "Needs old style classes") + def test_old_style_classes_no_mro(self): + ast_node = builder.extract_node(''' + class A: + pass + A.mro #@ + ''') + with self.assertRaises(exceptions.InferenceError): + next(ast_node.infer()) + + def test_class_model(self): + ast_nodes = builder.extract_node(''' + class A(object): + "test" + + class B(A): pass + class C(A): pass + + A.__module__ #@ + A.__name__ #@ + A.__qualname__ #@ + A.__doc__ #@ + A.__mro__ #@ + A.mro() #@ + A.__bases__ #@ + A.__class__ #@ + A.__dict__ #@ + A.__subclasses__() #@ + ''', module_name='fake_module') + + module = next(ast_nodes[0].infer()) + self.assertIsInstance(module, astroid.Const) + self.assertEqual(module.value, 'fake_module') + + name = next(ast_nodes[1].infer()) + self.assertIsInstance(name, astroid.Const) + self.assertEqual(name.value, 'A') + + qualname = next(ast_nodes[2].infer()) + self.assertIsInstance(qualname, astroid.Const) + self.assertEqual(qualname.value, 'fake_module.A') + + doc = next(ast_nodes[3].infer()) + self.assertIsInstance(doc, astroid.Const) + self.assertEqual(doc.value, 'test') + + mro = next(ast_nodes[4].infer()) + self.assertIsInstance(mro, astroid.Tuple) + self.assertEqual([cls.name for cls in mro.elts], + ['A', 'object']) + + called_mro = next(ast_nodes[5].infer()) + self.assertEqual(called_mro.elts, mro.elts) + + bases = next(ast_nodes[6].infer()) + self.assertIsInstance(bases, astroid.Tuple) + self.assertEqual([cls.name for cls in bases.elts], + ['object']) + + cls = next(ast_nodes[7].infer()) + self.assertIsInstance(cls, astroid.ClassDef) + self.assertEqual(cls.name, 'type') + + cls_dict = next(ast_nodes[8].infer()) + self.assertIsInstance(cls_dict, astroid.Dict) + + subclasses = next(ast_nodes[9].infer()) + self.assertIsInstance(subclasses, astroid.List) + self.assertEqual([cls.name for cls in subclasses.elts], ['B', 'C']) + + +class ModuleModelTest(unittest.TestCase): + + def test_priority_to_local_defined_values(self): + ast_node = astroid.parse(''' + __file__ = "mine" + ''') + file_value = next(ast_node.igetattr('__file__')) + self.assertIsInstance(file_value, astroid.Const) + self.assertEqual(file_value.value, "mine") + + def test__path__not_a_package(self): + ast_node = builder.extract_node(''' + import sys + sys.__path__ #@ + ''') + with self.assertRaises(exceptions.InferenceError): + next(ast_node.infer()) + + def test_module_model(self): + ast_nodes = builder.extract_node(''' + import xml + xml.__path__ #@ + xml.__name__ #@ + xml.__doc__ #@ + xml.__file__ #@ + xml.__spec__ #@ + xml.__loader__ #@ + xml.__cached__ #@ + xml.__package__ #@ + xml.__dict__ #@ + ''') + + path = next(ast_nodes[0].infer()) + self.assertIsInstance(path, astroid.List) + self.assertIsInstance(path.elts[0], astroid.Const) + self.assertEqual(path.elts[0].value, xml.__path__[0]) + + name = next(ast_nodes[1].infer()) + self.assertIsInstance(name, astroid.Const) + self.assertEqual(name.value, 'xml') + + doc = next(ast_nodes[2].infer()) + self.assertIsInstance(doc, astroid.Const) + self.assertEqual(doc.value, xml.__doc__) + + file_ = next(ast_nodes[3].infer()) + self.assertIsInstance(file_, astroid.Const) + self.assertEqual(file_.value, xml.__file__.replace(".pyc", ".py")) + + for ast_node in ast_nodes[4:7]: + inferred = next(ast_node.infer()) + self.assertIs(inferred, astroid.Uninferable) + + package = next(ast_nodes[7].infer()) + self.assertIsInstance(package, astroid.Const) + self.assertEqual(package.value, 'xml') + + dict_ = next(ast_nodes[8].infer()) + self.assertIsInstance(dict_, astroid.Dict) + + +class FunctionModelTest(unittest.TestCase): + + def test_partial_descriptor_support(self): + bound, result = builder.extract_node(''' + class A(object): pass + def test(self): return 42 + f = test.__get__(A(), A) + f #@ + f() #@ + ''') + bound = next(bound.infer()) + self.assertIsInstance(bound, astroid.BoundMethod) + self.assertEqual(bound._proxied._proxied.name, 'test') + result = next(result.infer()) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, 42) + + @unittest.expectedFailure + def test_descriptor_not_inferrring_self(self): + # We can't infer __get__(X, Y)() when the bounded function + # uses self, because of the tree's parent not being propagating good enough. + result = builder.extract_node(''' + class A(object): + x = 42 + def test(self): return self.x + f = test.__get__(A(), A) + f() #@ + ''') + result = next(result.infer()) + self.assertIsInstance(result, astroid.Const) + self.assertEqual(result.value, 42) + + def test_descriptors_binding_invalid(self): + ast_nodes = builder.extract_node(''' + class A: pass + def test(self): return 42 + test.__get__()() #@ + test.__get__(1)() #@ + test.__get__(2, 3, 4) #@ + ''') + for node in ast_nodes: + with self.assertRaises(exceptions.InferenceError): + next(node.infer()) + + def test_function_model(self): + ast_nodes = builder.extract_node(''' + def func(a=1, b=2): + """test""" + func.__name__ #@ + func.__doc__ #@ + func.__qualname__ #@ + func.__module__ #@ + func.__defaults__ #@ + func.__dict__ #@ + func.__globals__ #@ + func.__code__ #@ + func.__closure__ #@ + ''', module_name='fake_module') + + name = next(ast_nodes[0].infer()) + self.assertIsInstance(name, astroid.Const) + self.assertEqual(name.value, 'func') + + doc = next(ast_nodes[1].infer()) + self.assertIsInstance(doc, astroid.Const) + self.assertEqual(doc.value, 'test') + + qualname = next(ast_nodes[2].infer()) + self.assertIsInstance(qualname, astroid.Const) + self.assertEqual(qualname.value, 'fake_module.func') + + module = next(ast_nodes[3].infer()) + self.assertIsInstance(module, astroid.Const) + self.assertEqual(module.value, 'fake_module') + + defaults = next(ast_nodes[4].infer()) + self.assertIsInstance(defaults, astroid.Tuple) + self.assertEqual([default.value for default in defaults.elts], [1, 2]) + + dict_ = next(ast_nodes[5].infer()) + self.assertIsInstance(dict_, astroid.Dict) + + globals_ = next(ast_nodes[6].infer()) + self.assertIsInstance(globals_, astroid.Dict) + + for ast_node in ast_nodes[7:9]: + self.assertIs(next(ast_node.infer()), astroid.Uninferable) + + @test_utils.require_version(minver='3.0') + def test_empty_return_annotation(self): + ast_node = builder.extract_node(''' + def test(): pass + test.__annotations__ + ''') + annotations = next(ast_node.infer()) + self.assertIsInstance(annotations, astroid.Dict) + self.assertEqual(len(annotations.items), 0) + + @test_utils.require_version(minver='3.0') + def test_annotations_kwdefaults(self): + ast_node = builder.extract_node(''' + def test(a: 1, *args: 2, f:4='lala', **kwarg:3)->2: pass + test.__annotations__ #@ + test.__kwdefaults__ #@ + ''') + annotations = next(ast_node[0].infer()) + self.assertIsInstance(annotations, astroid.Dict) + self.assertIsInstance(annotations.getitem(astroid.Const('return')), astroid.Const) + self.assertEqual(annotations.getitem(astroid.Const('return')).value, 2) + self.assertIsInstance(annotations.getitem(astroid.Const('a')), astroid.Const) + self.assertEqual(annotations.getitem(astroid.Const('a')).value, 1) + self.assertEqual(annotations.getitem(astroid.Const('args')).value, 2) + self.assertEqual(annotations.getitem(astroid.Const('kwarg')).value, 3) + + self.assertEqual(annotations.getitem(astroid.Const('f')).value, 4) + + kwdefaults = next(ast_node[1].infer()) + self.assertIsInstance(kwdefaults, astroid.Dict) + # self.assertEqual(kwdefaults.getitem('f').value, 'lala') + + @test_utils.require_version(maxver='3.0') + def test_function_model_for_python2(self): + ast_nodes = builder.extract_node(''' + def test(a=1): + "a" + + test.func_name #@ + test.func_doc #@ + test.func_dict #@ + test.func_globals #@ + test.func_defaults #@ + test.func_code #@ + test.func_closure #@ + ''') + name = next(ast_nodes[0].infer()) + self.assertIsInstance(name, astroid.Const) + self.assertEqual(name.value, 'test') + doc = next(ast_nodes[1].infer()) + self.assertIsInstance(doc, astroid.Const) + self.assertEqual(doc.value, 'a') + pydict = next(ast_nodes[2].infer()) + self.assertIsInstance(pydict, astroid.Dict) + pyglobals = next(ast_nodes[3].infer()) + self.assertIsInstance(pyglobals, astroid.Dict) + defaults = next(ast_nodes[4].infer()) + self.assertIsInstance(defaults, astroid.Tuple) + for node in ast_nodes[5:]: + self.assertIs(next(node.infer()), astroid.Uninferable) + + +class GeneratorModelTest(unittest.TestCase): + + def test_model(self): + ast_nodes = builder.extract_node(''' + def test(): + "a" + yield + + gen = test() + gen.__name__ #@ + gen.__doc__ #@ + gen.gi_code #@ + gen.gi_frame #@ + gen.send #@ + ''') + + name = next(ast_nodes[0].infer()) + self.assertEqual(name.value, 'test') + + doc = next(ast_nodes[1].infer()) + self.assertEqual(doc.value, 'a') + + gi_code = next(ast_nodes[2].infer()) + self.assertIsInstance(gi_code, astroid.ClassDef) + self.assertEqual(gi_code.name, 'gi_code') + + gi_frame = next(ast_nodes[3].infer()) + self.assertIsInstance(gi_frame, astroid.ClassDef) + self.assertEqual(gi_frame.name, 'gi_frame') + + send = next(ast_nodes[4].infer()) + self.assertIsInstance(send, astroid.BoundMethod) + + +class ExceptionModelTest(unittest.TestCase): + + @unittest.skipIf(six.PY2, "needs Python 3") + def test_model_py3(self): + ast_nodes = builder.extract_node(''' + try: + x[42] + except ValueError as err: + err.args #@ + err.__traceback__ #@ + + err.message #@ + ''') + args = next(ast_nodes[0].infer()) + self.assertIsInstance(args, astroid.Tuple) + tb = next(ast_nodes[1].infer()) + self.assertIsInstance(tb, astroid.Instance) + self.assertEqual(tb.name, 'traceback') + + with self.assertRaises(exceptions.InferenceError): + next(ast_nodes[2].infer()) + + @unittest.skipUnless(six.PY2, "needs Python 2") + def test_model_py2(self): + ast_nodes = builder.extract_node(''' + try: + x[42] + except ValueError as err: + err.args #@ + err.message #@ + + err.__traceback__ #@ + ''') + args = next(ast_nodes[0].infer()) + self.assertIsInstance(args, astroid.Tuple) + message = next(ast_nodes[1].infer()) + self.assertIsInstance(message, astroid.Const) + + with self.assertRaises(exceptions.InferenceError): + next(ast_nodes[2].infer()) + + +class DictObjectModelTest(unittest.TestCase): + + def test__class__(self): + ast_node = builder.extract_node('{}.__class__') + inferred = next(ast_node.infer()) + self.assertIsInstance(inferred, astroid.ClassDef) + self.assertEqual(inferred.name, 'dict') + + def test_attributes_inferred_as_methods(self): + ast_nodes = builder.extract_node(''' + {}.values #@ + {}.items #@ + {}.keys #@ + ''') + for node in ast_nodes: + inferred = next(node.infer()) + self.assertIsInstance(inferred, astroid.BoundMethod) + + @unittest.skipUnless(six.PY2, "needs Python 2") + def test_concrete_objects_for_dict_methods(self): + ast_nodes = builder.extract_node(''' + {1:1, 2:3}.values() #@ + {1:1, 2:3}.keys() #@ + {1:1, 2:3}.items() #@ + ''') + values = next(ast_nodes[0].infer()) + self.assertIsInstance(values, astroid.List) + self.assertEqual([value.value for value in values.elts], [1, 3]) + + keys = next(ast_nodes[1].infer()) + self.assertIsInstance(keys, astroid.List) + self.assertEqual([key.value for key in keys.elts], [1, 2]) + + items = next(ast_nodes[2].infer()) + self.assertIsInstance(items, astroid.List) + for expected, elem in zip([(1, 1), (2, 3)], items.elts): + self.assertIsInstance(elem, astroid.Tuple) + self.assertEqual(list(expected), [elt.value for elt in elem.elts]) + + @unittest.skipIf(six.PY2, "needs Python 3") + def test_wrapper_objects_for_dict_methods_python3(self): + ast_nodes = builder.extract_node(''' + {1:1, 2:3}.values() #@ + {1:1, 2:3}.keys() #@ + {1:1, 2:3}.items() #@ + ''') + values = next(ast_nodes[0].infer()) + self.assertIsInstance(values, objects.DictValues) + self.assertEqual([elt.value for elt in values.elts], [1, 3]) + keys = next(ast_nodes[1].infer()) + self.assertIsInstance(keys, objects.DictKeys) + self.assertEqual([elt.value for elt in keys.elts], [1, 2]) + items = next(ast_nodes[2].infer()) + self.assertIsInstance(items, objects.DictItems) + + +class LruCacheModelTest(unittest.TestCase): + + @unittest.skipIf(six.PY2, "needs Python 3") + def test_lru_cache(self): + ast_nodes = builder.extract_node(''' + import functools + class Foo(object): + @functools.lru_cache() + def foo(): + pass + f = Foo() + f.foo.cache_clear #@ + f.foo.__wrapped__ #@ + f.foo.cache_info() #@ + ''') + cache_clear = next(ast_nodes[0].infer()) + self.assertIsInstance(cache_clear, astroid.BoundMethod) + wrapped = next(ast_nodes[1].infer()) + self.assertIsInstance(wrapped, astroid.FunctionDef) + self.assertEqual(wrapped.name, 'foo') + cache_info = next(ast_nodes[2].infer()) + self.assertIsInstance(cache_info, astroid.Instance) + + +if __name__ == '__main__': + unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_objects.py astroid-1.5.3/astroid/tests/unittest_objects.py --- astroid-1.4.9/astroid/tests/unittest_objects.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_objects.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,530 +1,507 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . - -import unittest - -from astroid import bases -from astroid import exceptions -from astroid import nodes -from astroid import objects -from astroid import test_utils - - -class ObjectsTest(unittest.TestCase): - - def test_frozenset(self): - node = test_utils.extract_node(""" - frozenset({1: 2, 2: 3}) #@ - """) - infered = next(node.infer()) - self.assertIsInstance(infered, objects.FrozenSet) - - self.assertEqual(infered.pytype(), "%s.frozenset" % bases.BUILTINS) - - itered = infered.itered() - self.assertEqual(len(itered), 2) - self.assertIsInstance(itered[0], nodes.Const) - self.assertEqual([const.value for const in itered], [1, 2]) - - proxied = infered._proxied - self.assertEqual(infered.qname(), "%s.frozenset" % bases.BUILTINS) - self.assertIsInstance(proxied, nodes.ClassDef) - - -class SuperTests(unittest.TestCase): - - def test_inferring_super_outside_methods(self): - ast_nodes = test_utils.extract_node(''' - class Module(object): - pass - class StaticMethod(object): - @staticmethod - def static(): - # valid, but we don't bother with it. - return super(StaticMethod, StaticMethod) #@ - # super outside methods aren't inferred - super(Module, Module) #@ - # no argument super is not recognised outside methods as well. - super() #@ - ''') - in_static = next(ast_nodes[0].value.infer()) - self.assertIsInstance(in_static, bases.Instance) - self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) - - module_level = next(ast_nodes[1].infer()) - self.assertIsInstance(module_level, bases.Instance) - self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) - - no_arguments = next(ast_nodes[2].infer()) - self.assertIsInstance(no_arguments, bases.Instance) - self.assertEqual(no_arguments.qname(), "%s.super" % bases.BUILTINS) - - def test_inferring_unbound_super_doesnt_work(self): - node = test_utils.extract_node(''' - class Test(object): - def __init__(self): - super(Test) #@ - ''') - unbounded = next(node.infer()) - self.assertIsInstance(unbounded, bases.Instance) - self.assertEqual(unbounded.qname(), "%s.super" % bases.BUILTINS) - - def test_use_default_inference_on_not_inferring_args(self): - ast_nodes = test_utils.extract_node(''' - class Test(object): - def __init__(self): - super(Lala, self) #@ - super(Test, lala) #@ - ''') - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, bases.Instance) - self.assertEqual(first.qname(), "%s.super" % bases.BUILTINS) - - second = next(ast_nodes[1].infer()) - self.assertIsInstance(second, bases.Instance) - self.assertEqual(second.qname(), "%s.super" % bases.BUILTINS) - - @test_utils.require_version(maxver='3.0') - def test_super_on_old_style_class(self): - # super doesn't work on old style class, but leave - # that as an error for pylint. We'll infer Super objects, - # but every call will result in a failure at some point. - node = test_utils.extract_node(''' - class OldStyle: - def __init__(self): - super(OldStyle, self) #@ - ''') - old = next(node.infer()) - self.assertIsInstance(old, objects.Super) - self.assertIsInstance(old.mro_pointer, nodes.ClassDef) - self.assertEqual(old.mro_pointer.name, 'OldStyle') - with self.assertRaises(exceptions.SuperError) as cm: - old.super_mro() - self.assertEqual(str(cm.exception), - "Unable to call super on old-style classes.") - - @test_utils.require_version(minver='3.0') - def test_no_arguments_super(self): - ast_nodes = test_utils.extract_node(''' - class First(object): pass - class Second(First): - def test(self): - super() #@ - @classmethod - def test_classmethod(cls): - super() #@ - ''') - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, objects.Super) - self.assertIsInstance(first.type, bases.Instance) - self.assertEqual(first.type.name, 'Second') - self.assertIsInstance(first.mro_pointer, nodes.ClassDef) - self.assertEqual(first.mro_pointer.name, 'Second') - - second = next(ast_nodes[1].infer()) - self.assertIsInstance(second, objects.Super) - self.assertIsInstance(second.type, nodes.ClassDef) - self.assertEqual(second.type.name, 'Second') - self.assertIsInstance(second.mro_pointer, nodes.ClassDef) - self.assertEqual(second.mro_pointer.name, 'Second') - - def test_super_simple_cases(self): - ast_nodes = test_utils.extract_node(''' - class First(object): pass - class Second(First): pass - class Third(First): - def test(self): - super(Third, self) #@ - super(Second, self) #@ - - # mro position and the type - super(Third, Third) #@ - super(Third, Second) #@ - super(Fourth, Fourth) #@ - - class Fourth(Third): - pass - ''') - - # .type is the object which provides the mro. - # .mro_pointer is the position in the mro from where - # the lookup should be done. - - # super(Third, self) - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, objects.Super) - self.assertIsInstance(first.type, bases.Instance) - self.assertEqual(first.type.name, 'Third') - self.assertIsInstance(first.mro_pointer, nodes.ClassDef) - self.assertEqual(first.mro_pointer.name, 'Third') - - # super(Second, self) - second = next(ast_nodes[1].infer()) - self.assertIsInstance(second, objects.Super) - self.assertIsInstance(second.type, bases.Instance) - self.assertEqual(second.type.name, 'Third') - self.assertIsInstance(first.mro_pointer, nodes.ClassDef) - self.assertEqual(second.mro_pointer.name, 'Second') - - # super(Third, Third) - third = next(ast_nodes[2].infer()) - self.assertIsInstance(third, objects.Super) - self.assertIsInstance(third.type, nodes.ClassDef) - self.assertEqual(third.type.name, 'Third') - self.assertIsInstance(third.mro_pointer, nodes.ClassDef) - self.assertEqual(third.mro_pointer.name, 'Third') - - # super(Third, second) - fourth = next(ast_nodes[3].infer()) - self.assertIsInstance(fourth, objects.Super) - self.assertIsInstance(fourth.type, nodes.ClassDef) - self.assertEqual(fourth.type.name, 'Second') - self.assertIsInstance(fourth.mro_pointer, nodes.ClassDef) - self.assertEqual(fourth.mro_pointer.name, 'Third') - - # Super(Fourth, Fourth) - fifth = next(ast_nodes[4].infer()) - self.assertIsInstance(fifth, objects.Super) - self.assertIsInstance(fifth.type, nodes.ClassDef) - self.assertEqual(fifth.type.name, 'Fourth') - self.assertIsInstance(fifth.mro_pointer, nodes.ClassDef) - self.assertEqual(fifth.mro_pointer.name, 'Fourth') - - def test_super_infer(self): - node = test_utils.extract_node(''' - class Super(object): - def __init__(self): - super(Super, self) #@ - ''') - inferred = next(node.infer()) - self.assertIsInstance(inferred, objects.Super) - reinferred = next(inferred.infer()) - self.assertIsInstance(reinferred, objects.Super) - self.assertIs(inferred, reinferred) - - def test_inferring_invalid_supers(self): - ast_nodes = test_utils.extract_node(''' - class Super(object): - def __init__(self): - # MRO pointer is not a type - super(1, self) #@ - # MRO type is not a subtype - super(Super, 1) #@ - # self is not a subtype of Bupper - super(Bupper, self) #@ - class Bupper(Super): - pass - ''') - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, objects.Super) - with self.assertRaises(exceptions.SuperError) as cm: - first.super_mro() - self.assertEqual(str(cm.exception), "The first super argument must be type.") - - for node in ast_nodes[1:]: - inferred = next(node.infer()) - self.assertIsInstance(inferred, objects.Super, node) - with self.assertRaises(exceptions.SuperArgumentTypeError) as cm: - inferred.super_mro() - self.assertEqual(str(cm.exception), - "super(type, obj): obj must be an instance " - "or subtype of type", node) - - def test_proxied(self): - node = test_utils.extract_node(''' - class Super(object): - def __init__(self): - super(Super, self) #@ - ''') - infered = next(node.infer()) - proxied = infered._proxied - self.assertEqual(proxied.qname(), "%s.super" % bases.BUILTINS) - self.assertIsInstance(proxied, nodes.ClassDef) - - def test_super_bound_model(self): - ast_nodes = test_utils.extract_node(''' - class First(object): - def method(self): - pass - @classmethod - def class_method(cls): - pass - class Super_Type_Type(First): - def method(self): - super(Super_Type_Type, Super_Type_Type).method #@ - super(Super_Type_Type, Super_Type_Type).class_method #@ - @classmethod - def class_method(cls): - super(Super_Type_Type, Super_Type_Type).method #@ - super(Super_Type_Type, Super_Type_Type).class_method #@ - - class Super_Type_Object(First): - def method(self): - super(Super_Type_Object, self).method #@ - super(Super_Type_Object, self).class_method #@ - ''') - # Super(type, type) is the same for both functions and classmethods. - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, nodes.FunctionDef) - self.assertEqual(first.name, 'method') - - second = next(ast_nodes[1].infer()) - self.assertIsInstance(second, bases.BoundMethod) - self.assertEqual(second.bound.name, 'First') - self.assertEqual(second.type, 'classmethod') - - third = next(ast_nodes[2].infer()) - self.assertIsInstance(third, nodes.FunctionDef) - self.assertEqual(third.name, 'method') - - fourth = next(ast_nodes[3].infer()) - self.assertIsInstance(fourth, bases.BoundMethod) - self.assertEqual(fourth.bound.name, 'First') - self.assertEqual(fourth.type, 'classmethod') - - # Super(type, obj) can lead to different attribute bindings - # depending on the type of the place where super was called. - fifth = next(ast_nodes[4].infer()) - self.assertIsInstance(fifth, bases.BoundMethod) - self.assertEqual(fifth.bound.name, 'First') - self.assertEqual(fifth.type, 'method') - - sixth = next(ast_nodes[5].infer()) - self.assertIsInstance(sixth, bases.BoundMethod) - self.assertEqual(sixth.bound.name, 'First') - self.assertEqual(sixth.type, 'classmethod') - - def test_super_getattr_single_inheritance(self): - ast_nodes = test_utils.extract_node(''' - class First(object): - def test(self): pass - class Second(First): - def test2(self): pass - class Third(Second): - test3 = 42 - def __init__(self): - super(Third, self).test2 #@ - super(Third, self).test #@ - # test3 is local, no MRO lookup is done. - super(Third, self).test3 #@ - super(Third, self) #@ - - # Unbounds. - super(Third, Third).test2 #@ - super(Third, Third).test #@ - - ''') - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, bases.BoundMethod) - self.assertEqual(first.bound.name, 'Second') - - second = next(ast_nodes[1].infer()) - self.assertIsInstance(second, bases.BoundMethod) - self.assertEqual(second.bound.name, 'First') - - with self.assertRaises(exceptions.InferenceError): - next(ast_nodes[2].infer()) - fourth = next(ast_nodes[3].infer()) - with self.assertRaises(exceptions.NotFoundError): - fourth.getattr('test3') - with self.assertRaises(exceptions.NotFoundError): - next(fourth.igetattr('test3')) - - first_unbound = next(ast_nodes[4].infer()) - self.assertIsInstance(first_unbound, nodes.FunctionDef) - self.assertEqual(first_unbound.name, 'test2') - self.assertEqual(first_unbound.parent.name, 'Second') - - second_unbound = next(ast_nodes[5].infer()) - self.assertIsInstance(second_unbound, nodes.FunctionDef) - self.assertEqual(second_unbound.name, 'test') - self.assertEqual(second_unbound.parent.name, 'First') - - def test_super_invalid_mro(self): - node = test_utils.extract_node(''' - class A(object): - test = 42 - class Super(A, A): - def __init__(self): - super(Super, self) #@ - ''') - inferred = next(node.infer()) - with self.assertRaises(exceptions.NotFoundError): - next(inferred.getattr('test')) - - def test_super_complex_mro(self): - ast_nodes = test_utils.extract_node(''' - class A(object): - def spam(self): return "A" - def foo(self): return "A" - @staticmethod - def static(self): pass - class B(A): - def boo(self): return "B" - def spam(self): return "B" - class C(A): - def boo(self): return "C" - class E(C, B): - def __init__(self): - super(E, self).boo #@ - super(C, self).boo #@ - super(E, self).spam #@ - super(E, self).foo #@ - super(E, self).static #@ - ''') - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, bases.BoundMethod) - self.assertEqual(first.bound.name, 'C') - second = next(ast_nodes[1].infer()) - self.assertIsInstance(second, bases.BoundMethod) - self.assertEqual(second.bound.name, 'B') - third = next(ast_nodes[2].infer()) - self.assertIsInstance(third, bases.BoundMethod) - self.assertEqual(third.bound.name, 'B') - fourth = next(ast_nodes[3].infer()) - self.assertEqual(fourth.bound.name, 'A') - static = next(ast_nodes[4].infer()) - self.assertIsInstance(static, nodes.FunctionDef) - self.assertEqual(static.parent.scope().name, 'A') - - def test_super_data_model(self): - ast_nodes = test_utils.extract_node(''' - class X(object): pass - class A(X): - def __init__(self): - super(A, self) #@ - super(A, A) #@ - super(X, A) #@ - ''') - first = next(ast_nodes[0].infer()) - thisclass = first.getattr('__thisclass__')[0] - self.assertIsInstance(thisclass, nodes.ClassDef) - self.assertEqual(thisclass.name, 'A') - selfclass = first.getattr('__self_class__')[0] - self.assertIsInstance(selfclass, nodes.ClassDef) - self.assertEqual(selfclass.name, 'A') - self_ = first.getattr('__self__')[0] - self.assertIsInstance(self_, bases.Instance) - self.assertEqual(self_.name, 'A') - cls = first.getattr('__class__')[0] - self.assertEqual(cls, first._proxied) - - second = next(ast_nodes[1].infer()) - thisclass = second.getattr('__thisclass__')[0] - self.assertEqual(thisclass.name, 'A') - self_ = second.getattr('__self__')[0] - self.assertIsInstance(self_, nodes.ClassDef) - self.assertEqual(self_.name, 'A') - - third = next(ast_nodes[2].infer()) - thisclass = third.getattr('__thisclass__')[0] - self.assertEqual(thisclass.name, 'X') - selfclass = third.getattr('__self_class__')[0] - self.assertEqual(selfclass.name, 'A') - - def assertEqualMro(self, klass, expected_mro): - self.assertEqual( - [member.name for member in klass.super_mro()], - expected_mro) - - def test_super_mro(self): - ast_nodes = test_utils.extract_node(''' - class A(object): pass - class B(A): pass - class C(A): pass - class E(C, B): - def __init__(self): - super(E, self) #@ - super(C, self) #@ - super(B, self) #@ - - super(B, 1) #@ - super(1, B) #@ - ''') - first = next(ast_nodes[0].infer()) - self.assertEqualMro(first, ['C', 'B', 'A', 'object']) - second = next(ast_nodes[1].infer()) - self.assertEqualMro(second, ['B', 'A', 'object']) - third = next(ast_nodes[2].infer()) - self.assertEqualMro(third, ['A', 'object']) - - fourth = next(ast_nodes[3].infer()) - with self.assertRaises(exceptions.SuperError): - fourth.super_mro() - fifth = next(ast_nodes[4].infer()) - with self.assertRaises(exceptions.SuperError): - fifth.super_mro() - - def test_super_yes_objects(self): - ast_nodes = test_utils.extract_node(''' - from collections import Missing - class A(object): - def __init__(self): - super(Missing, self) #@ - super(A, Missing) #@ - ''') - first = next(ast_nodes[0].infer()) - self.assertIsInstance(first, bases.Instance) - second = next(ast_nodes[1].infer()) - self.assertIsInstance(second, bases.Instance) - - def test_super_invalid_types(self): - node = test_utils.extract_node(''' - import collections - class A(object): - def __init__(self): - super(A, collections) #@ - ''') - inferred = next(node.infer()) - with self.assertRaises(exceptions.SuperError): - inferred.super_mro() - with self.assertRaises(exceptions.SuperArgumentTypeError): - inferred.super_mro() - - def test_super_pytype_display_type_name(self): - node = test_utils.extract_node(''' - class A(object): - def __init__(self): - super(A, self) #@ - ''') - inferred = next(node.infer()) - self.assertEqual(inferred.pytype(), "%s.super" % bases.BUILTINS) - self.assertEqual(inferred.display_type(), 'Super of') - self.assertEqual(inferred.name, 'A') - - def test_super_properties(self): - node = test_utils.extract_node(''' - class Foo(object): - @property - def dict(self): - return 42 - - class Bar(Foo): - @property - def dict(self): - return super(Bar, self).dict - - Bar().dict - ''') - inferred = next(node.infer()) - self.assertIsInstance(inferred, nodes.Const) +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + + +import unittest + +from astroid import bases +from astroid import builder +from astroid import exceptions +from astroid import nodes +from astroid import objects +from astroid import test_utils + +class ObjectsTest(unittest.TestCase): + + def test_frozenset(self): + node = builder.extract_node(""" + frozenset({1: 2, 2: 3}) #@ + """) + inferred = next(node.infer()) + self.assertIsInstance(inferred, objects.FrozenSet) + + self.assertEqual(inferred.pytype(), "%s.frozenset" % bases.BUILTINS) + + itered = inferred.itered() + self.assertEqual(len(itered), 2) + self.assertIsInstance(itered[0], nodes.Const) + self.assertEqual([const.value for const in itered], [1, 2]) + + proxied = inferred._proxied + self.assertEqual(inferred.qname(), "%s.frozenset" % bases.BUILTINS) + self.assertIsInstance(proxied, nodes.ClassDef) + + +class SuperTests(unittest.TestCase): + + def test_inferring_super_outside_methods(self): + ast_nodes = builder.extract_node(''' + class Module(object): + pass + class StaticMethod(object): + @staticmethod + def static(): + # valid, but we don't bother with it. + return super(StaticMethod, StaticMethod) #@ + # super outside methods aren't inferred + super(Module, Module) #@ + # no argument super is not recognised outside methods as well. + super() #@ + ''') + in_static = next(ast_nodes[0].value.infer()) + self.assertIsInstance(in_static, bases.Instance) + self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) + + module_level = next(ast_nodes[1].infer()) + self.assertIsInstance(module_level, bases.Instance) + self.assertEqual(in_static.qname(), "%s.super" % bases.BUILTINS) + + no_arguments = next(ast_nodes[2].infer()) + self.assertIsInstance(no_arguments, bases.Instance) + self.assertEqual(no_arguments.qname(), "%s.super" % bases.BUILTINS) + + def test_inferring_unbound_super_doesnt_work(self): + node = builder.extract_node(''' + class Test(object): + def __init__(self): + super(Test) #@ + ''') + unbounded = next(node.infer()) + self.assertIsInstance(unbounded, bases.Instance) + self.assertEqual(unbounded.qname(), "%s.super" % bases.BUILTINS) + + def test_use_default_inference_on_not_inferring_args(self): + ast_nodes = builder.extract_node(''' + class Test(object): + def __init__(self): + super(Lala, self) #@ + super(Test, lala) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.Instance) + self.assertEqual(first.qname(), "%s.super" % bases.BUILTINS) + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.Instance) + self.assertEqual(second.qname(), "%s.super" % bases.BUILTINS) + + @test_utils.require_version(maxver='3.0') + def test_super_on_old_style_class(self): + # super doesn't work on old style class, but leave + # that as an error for pylint. We'll infer Super objects, + # but every call will result in a failure at some point. + node = builder.extract_node(''' + class OldStyle: + def __init__(self): + super(OldStyle, self) #@ + ''') + old = next(node.infer()) + self.assertIsInstance(old, objects.Super) + self.assertIsInstance(old.mro_pointer, nodes.ClassDef) + self.assertEqual(old.mro_pointer.name, 'OldStyle') + with self.assertRaises(exceptions.SuperError) as cm: + old.super_mro() + self.assertEqual(str(cm.exception), + "Unable to call super on old-style classes.") + + @test_utils.require_version(minver='3.0') + def test_no_arguments_super(self): + ast_nodes = builder.extract_node(''' + class First(object): pass + class Second(First): + def test(self): + super() #@ + @classmethod + def test_classmethod(cls): + super() #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + self.assertIsInstance(first.type, bases.Instance) + self.assertEqual(first.type.name, 'Second') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(first.mro_pointer.name, 'Second') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, objects.Super) + self.assertIsInstance(second.type, nodes.ClassDef) + self.assertEqual(second.type.name, 'Second') + self.assertIsInstance(second.mro_pointer, nodes.ClassDef) + self.assertEqual(second.mro_pointer.name, 'Second') + + def test_super_simple_cases(self): + ast_nodes = builder.extract_node(''' + class First(object): pass + class Second(First): pass + class Third(First): + def test(self): + super(Third, self) #@ + super(Second, self) #@ + + # mro position and the type + super(Third, Third) #@ + super(Third, Second) #@ + super(Fourth, Fourth) #@ + + class Fourth(Third): + pass + ''') + + # .type is the object which provides the mro. + # .mro_pointer is the position in the mro from where + # the lookup should be done. + + # super(Third, self) + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + self.assertIsInstance(first.type, bases.Instance) + self.assertEqual(first.type.name, 'Third') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(first.mro_pointer.name, 'Third') + + # super(Second, self) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, objects.Super) + self.assertIsInstance(second.type, bases.Instance) + self.assertEqual(second.type.name, 'Third') + self.assertIsInstance(first.mro_pointer, nodes.ClassDef) + self.assertEqual(second.mro_pointer.name, 'Second') + + # super(Third, Third) + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, objects.Super) + self.assertIsInstance(third.type, nodes.ClassDef) + self.assertEqual(third.type.name, 'Third') + self.assertIsInstance(third.mro_pointer, nodes.ClassDef) + self.assertEqual(third.mro_pointer.name, 'Third') + + # super(Third, second) + fourth = next(ast_nodes[3].infer()) + self.assertIsInstance(fourth, objects.Super) + self.assertIsInstance(fourth.type, nodes.ClassDef) + self.assertEqual(fourth.type.name, 'Second') + self.assertIsInstance(fourth.mro_pointer, nodes.ClassDef) + self.assertEqual(fourth.mro_pointer.name, 'Third') + + # Super(Fourth, Fourth) + fifth = next(ast_nodes[4].infer()) + self.assertIsInstance(fifth, objects.Super) + self.assertIsInstance(fifth.type, nodes.ClassDef) + self.assertEqual(fifth.type.name, 'Fourth') + self.assertIsInstance(fifth.mro_pointer, nodes.ClassDef) + self.assertEqual(fifth.mro_pointer.name, 'Fourth') + + def test_super_infer(self): + node = builder.extract_node(''' + class Super(object): + def __init__(self): + super(Super, self) #@ + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, objects.Super) + reinferred = next(inferred.infer()) + self.assertIsInstance(reinferred, objects.Super) + self.assertIs(inferred, reinferred) + + def test_inferring_invalid_supers(self): + ast_nodes = builder.extract_node(''' + class Super(object): + def __init__(self): + # MRO pointer is not a type + super(1, self) #@ + # MRO type is not a subtype + super(Super, 1) #@ + # self is not a subtype of Bupper + super(Bupper, self) #@ + class Bupper(Super): + pass + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, objects.Super) + with self.assertRaises(exceptions.SuperError) as cm: + first.super_mro() + self.assertIsInstance(cm.exception.super_.mro_pointer, nodes.Const) + self.assertEqual(cm.exception.super_.mro_pointer.value, 1) + for node, invalid_type in zip(ast_nodes[1:], + (nodes.Const, bases.Instance)): + inferred = next(node.infer()) + self.assertIsInstance(inferred, objects.Super, node) + with self.assertRaises(exceptions.SuperError) as cm: + inferred.super_mro() + self.assertIsInstance(cm.exception.super_.type, invalid_type) + + def test_proxied(self): + node = builder.extract_node(''' + class Super(object): + def __init__(self): + super(Super, self) #@ + ''') + inferred = next(node.infer()) + proxied = inferred._proxied + self.assertEqual(proxied.qname(), "%s.super" % bases.BUILTINS) + self.assertIsInstance(proxied, nodes.ClassDef) + + def test_super_bound_model(self): + ast_nodes = builder.extract_node(''' + class First(object): + def method(self): + pass + @classmethod + def class_method(cls): + pass + class Super_Type_Type(First): + def method(self): + super(Super_Type_Type, Super_Type_Type).method #@ + super(Super_Type_Type, Super_Type_Type).class_method #@ + @classmethod + def class_method(cls): + super(Super_Type_Type, Super_Type_Type).method #@ + super(Super_Type_Type, Super_Type_Type).class_method #@ + + class Super_Type_Object(First): + def method(self): + super(Super_Type_Object, self).method #@ + super(Super_Type_Object, self).class_method #@ + ''') + # Super(type, type) is the same for both functions and classmethods. + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, nodes.FunctionDef) + self.assertEqual(first.name, 'method') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'First') + self.assertEqual(second.type, 'classmethod') + + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, nodes.FunctionDef) + self.assertEqual(third.name, 'method') + + fourth = next(ast_nodes[3].infer()) + self.assertIsInstance(fourth, bases.BoundMethod) + self.assertEqual(fourth.bound.name, 'First') + self.assertEqual(fourth.type, 'classmethod') + + # Super(type, obj) can lead to different attribute bindings + # depending on the type of the place where super was called. + fifth = next(ast_nodes[4].infer()) + self.assertIsInstance(fifth, bases.BoundMethod) + self.assertEqual(fifth.bound.name, 'First') + self.assertEqual(fifth.type, 'method') + + sixth = next(ast_nodes[5].infer()) + self.assertIsInstance(sixth, bases.BoundMethod) + self.assertEqual(sixth.bound.name, 'First') + self.assertEqual(sixth.type, 'classmethod') + + def test_super_getattr_single_inheritance(self): + ast_nodes = builder.extract_node(''' + class First(object): + def test(self): pass + class Second(First): + def test2(self): pass + class Third(Second): + test3 = 42 + def __init__(self): + super(Third, self).test2 #@ + super(Third, self).test #@ + # test3 is local, no MRO lookup is done. + super(Third, self).test3 #@ + super(Third, self) #@ + + # Unbounds. + super(Third, Third).test2 #@ + super(Third, Third).test #@ + + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.BoundMethod) + self.assertEqual(first.bound.name, 'Second') + + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'First') + + with self.assertRaises(exceptions.InferenceError): + next(ast_nodes[2].infer()) + fourth = next(ast_nodes[3].infer()) + with self.assertRaises(exceptions.AttributeInferenceError): + fourth.getattr('test3') + with self.assertRaises(exceptions.AttributeInferenceError): + next(fourth.igetattr('test3')) + + first_unbound = next(ast_nodes[4].infer()) + self.assertIsInstance(first_unbound, nodes.FunctionDef) + self.assertEqual(first_unbound.name, 'test2') + self.assertEqual(first_unbound.parent.name, 'Second') + + second_unbound = next(ast_nodes[5].infer()) + self.assertIsInstance(second_unbound, nodes.FunctionDef) + self.assertEqual(second_unbound.name, 'test') + self.assertEqual(second_unbound.parent.name, 'First') + + def test_super_invalid_mro(self): + node = builder.extract_node(''' + class A(object): + test = 42 + class Super(A, A): + def __init__(self): + super(Super, self) #@ + ''') + inferred = next(node.infer()) + with self.assertRaises(exceptions.AttributeInferenceError): + next(inferred.getattr('test')) + + def test_super_complex_mro(self): + ast_nodes = builder.extract_node(''' + class A(object): + def spam(self): return "A" + def foo(self): return "A" + @staticmethod + def static(self): pass + class B(A): + def boo(self): return "B" + def spam(self): return "B" + class C(A): + def boo(self): return "C" + class E(C, B): + def __init__(self): + super(E, self).boo #@ + super(C, self).boo #@ + super(E, self).spam #@ + super(E, self).foo #@ + super(E, self).static #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.BoundMethod) + self.assertEqual(first.bound.name, 'C') + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.BoundMethod) + self.assertEqual(second.bound.name, 'B') + third = next(ast_nodes[2].infer()) + self.assertIsInstance(third, bases.BoundMethod) + self.assertEqual(third.bound.name, 'B') + fourth = next(ast_nodes[3].infer()) + self.assertEqual(fourth.bound.name, 'A') + static = next(ast_nodes[4].infer()) + self.assertIsInstance(static, nodes.FunctionDef) + self.assertEqual(static.parent.scope().name, 'A') + + def test_super_data_model(self): + ast_nodes = builder.extract_node(''' + class X(object): pass + class A(X): + def __init__(self): + super(A, self) #@ + super(A, A) #@ + super(X, A) #@ + ''') + first = next(ast_nodes[0].infer()) + thisclass = first.getattr('__thisclass__')[0] + self.assertIsInstance(thisclass, nodes.ClassDef) + self.assertEqual(thisclass.name, 'A') + selfclass = first.getattr('__self_class__')[0] + self.assertIsInstance(selfclass, nodes.ClassDef) + self.assertEqual(selfclass.name, 'A') + self_ = first.getattr('__self__')[0] + self.assertIsInstance(self_, bases.Instance) + self.assertEqual(self_.name, 'A') + cls = first.getattr('__class__')[0] + self.assertEqual(cls, first._proxied) + + second = next(ast_nodes[1].infer()) + thisclass = second.getattr('__thisclass__')[0] + self.assertEqual(thisclass.name, 'A') + self_ = second.getattr('__self__')[0] + self.assertIsInstance(self_, nodes.ClassDef) + self.assertEqual(self_.name, 'A') + + third = next(ast_nodes[2].infer()) + thisclass = third.getattr('__thisclass__')[0] + self.assertEqual(thisclass.name, 'X') + selfclass = third.getattr('__self_class__')[0] + self.assertEqual(selfclass.name, 'A') + + def assertEqualMro(self, klass, expected_mro): + self.assertEqual( + [member.name for member in klass.super_mro()], + expected_mro) + + def test_super_mro(self): + ast_nodes = builder.extract_node(''' + class A(object): pass + class B(A): pass + class C(A): pass + class E(C, B): + def __init__(self): + super(E, self) #@ + super(C, self) #@ + super(B, self) #@ + + super(B, 1) #@ + super(1, B) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertEqualMro(first, ['C', 'B', 'A', 'object']) + second = next(ast_nodes[1].infer()) + self.assertEqualMro(second, ['B', 'A', 'object']) + third = next(ast_nodes[2].infer()) + self.assertEqualMro(third, ['A', 'object']) + + fourth = next(ast_nodes[3].infer()) + with self.assertRaises(exceptions.SuperError): + fourth.super_mro() + fifth = next(ast_nodes[4].infer()) + with self.assertRaises(exceptions.SuperError): + fifth.super_mro() + + def test_super_yes_objects(self): + ast_nodes = builder.extract_node(''' + from collections import Missing + class A(object): + def __init__(self): + super(Missing, self) #@ + super(A, Missing) #@ + ''') + first = next(ast_nodes[0].infer()) + self.assertIsInstance(first, bases.Instance) + second = next(ast_nodes[1].infer()) + self.assertIsInstance(second, bases.Instance) + + def test_super_invalid_types(self): + node = builder.extract_node(''' + import collections + class A(object): + def __init__(self): + super(A, collections) #@ + ''') + inferred = next(node.infer()) + with self.assertRaises(exceptions.SuperError): + inferred.super_mro() + with self.assertRaises(exceptions.SuperError): + inferred.super_mro() + + def test_super_properties(self): + node = builder.extract_node(''' + class Foo(object): + @property + def dict(self): + return 42 + + class Bar(Foo): + @property + def dict(self): + return super(Bar, self).dict + + Bar().dict + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) self.assertEqual(inferred.value, 42) - - -if __name__ == '__main__': - unittest.main() + + +if __name__ == '__main__': + unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_peephole.py astroid-1.5.3/astroid/tests/unittest_peephole.py --- astroid-1.4.9/astroid/tests/unittest_peephole.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_peephole.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,20 +1,8 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Tests for the astroid AST peephole optimizer.""" diff -Nru astroid-1.4.9/astroid/tests/unittest_protocols.py astroid-1.5.3/astroid/tests/unittest_protocols.py --- astroid-1.4.9/astroid/tests/unittest_protocols.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_protocols.py 2017-04-12 05:57:16.000000000 +0000 @@ -1,26 +1,15 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + import contextlib import unittest import astroid -from astroid.test_utils import extract_node, require_version +from astroid import extract_node +from astroid.test_utils import require_version from astroid import InferenceError from astroid import nodes from astroid import util @@ -78,7 +67,7 @@ for1_starred = next(assign_stmts.nodes_of_class(Starred)) assigned = next(for1_starred.assigned_stmts()) - self.assertEqual(assigned, util.YES) + self.assertEqual(assigned, util.Uninferable) def _get_starred_stmts(self, code): assign_stmt = extract_node("{} #@".format(code)) @@ -119,16 +108,16 @@ @require_version(minver='3.0') def test_assigned_stmts_starred_yes(self): # Not something iterable and known - self._helper_starred_expected("a, *b = range(3) #@", util.YES) + self._helper_starred_expected("a, *b = range(3) #@", util.Uninferable) # Not something inferrable - self._helper_starred_expected("a, *b = balou() #@", util.YES) + self._helper_starred_expected("a, *b = balou() #@", util.Uninferable) # In function, unknown. self._helper_starred_expected(""" def test(arg): - head, *tail = arg #@""", util.YES) + head, *tail = arg #@""", util.Uninferable) # These cases aren't worth supporting. self._helper_starred_expected( - "a, (*b, c), d = (1, (2, 3, 4), 5) #@", util.YES) + "a, (*b, c), d = (1, (2, 3, 4), 5) #@", util.Uninferable) @require_version(minver='3.0') def test_assign_stmts_starred_fails(self): @@ -137,7 +126,7 @@ # Too many lhs values self._helper_starred_inference_error("a, *b, c = (1, 2) #@") # This could be solved properly, but it complicates needlessly the - # code for assigned_stmts, without oferring real benefit. + # code for assigned_stmts, without offering real benefit. self._helper_starred_inference_error( "(*a, b), (c, *d) = (1, 2, 3), (4, 5, 6) #@") @@ -160,6 +149,23 @@ assigned = list(simple_mul_assnode_2.assigned_stmts()) self.assertNameNodesEqual(['c'], assigned) + @require_version(minver='3.6') + def test_assigned_stmts_annassignments(self): + annassign_stmts = extract_node(""" + a: str = "abc" #@ + b: str #@ + """) + simple_annassign_node = next(annassign_stmts[0].nodes_of_class(AssignName)) + assigned = list(simple_annassign_node.assigned_stmts()) + self.assertEqual(1, len(assigned)) + self.assertIsInstance(assigned[0], Const) + self.assertEqual(assigned[0].value, "abc") + + empty_annassign_node = next(annassign_stmts[1].nodes_of_class(AssignName)) + assigned = list(empty_annassign_node.assigned_stmts()) + self.assertEqual(1, len(assigned)) + self.assertIs(assigned[0], util.Uninferable) + def test_sequence_assigned_stmts_not_accepting_empty_node(self): def transform(node): node.root().locals['__all__'] = [node.value] diff -Nru astroid-1.4.9/astroid/tests/unittest_python3.py astroid-1.5.3/astroid/tests/unittest_python3.py --- astroid-1.4.9/astroid/tests/unittest_python3.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_python3.py 2017-04-12 14:07:17.000000000 +0000 @@ -1,28 +1,18 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2013-2016 Claudiu Popa +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + from textwrap import dedent import unittest from astroid import nodes from astroid.node_classes import Assign, Expr, YieldFrom, Name, Const -from astroid.builder import AstroidBuilder +from astroid.builder import AstroidBuilder, extract_node from astroid.scoped_nodes import ClassDef, FunctionDef -from astroid.test_utils import require_version, extract_node +from astroid.test_utils import require_version class Python3TC(unittest.TestCase): @@ -97,7 +87,7 @@ @require_version('3.0') def test_metaclass_imported(self): astroid = self.builder.string_build(dedent(""" - from abc import ABCMeta + from abc import ABCMeta class Test(metaclass=ABCMeta): pass""")) klass = astroid.body[1] @@ -106,9 +96,18 @@ self.assertEqual(metaclass.name, 'ABCMeta') @require_version('3.0') + def test_metaclass_multiple_keywords(self): + astroid = self.builder.string_build("class Test(magic=None, metaclass=type): pass") + klass = astroid.body[0] + + metaclass = klass.metaclass() + self.assertIsInstance(metaclass, ClassDef) + self.assertEqual(metaclass.name, 'type') + + @require_version('3.0') def test_as_string(self): body = dedent(""" - from abc import ABCMeta + from abc import ABCMeta class Test(metaclass=ABCMeta): pass""") astroid = self.builder.string_build(body) klass = astroid.body[1] @@ -215,6 +214,23 @@ self.assertIsNone(func.returns) @require_version('3.0') + def test_kwonlyargs_annotations_supper(self): + node = self.builder.string_build(dedent(""" + def test(*, a: int, b: str, c: None, d, e): + pass + """)) + func = node['test'] + arguments = func.args + self.assertIsInstance(arguments.kwonlyargs_annotations[0], Name) + self.assertEqual(arguments.kwonlyargs_annotations[0].name, 'int') + self.assertIsInstance(arguments.kwonlyargs_annotations[1], Name) + self.assertEqual(arguments.kwonlyargs_annotations[1].name, 'str') + self.assertIsInstance(arguments.kwonlyargs_annotations[2], Const) + self.assertIsNone(arguments.kwonlyargs_annotations[2].value) + self.assertIsNone(arguments.kwonlyargs_annotations[3]) + self.assertIsNone(arguments.kwonlyargs_annotations[4]) + + @require_version('3.0') def test_annotation_as_string(self): code1 = dedent(''' def test(a, b:int=4, c=2, f:'lala'=4)->2: @@ -245,10 +261,94 @@ def test_unpacking_in_dict_getitem(self): node = extract_node('{1:2, **{2:3, 3:4}, **{5: 6}}') for key, expected in ((1, 2), (2, 3), (3, 4), (5, 6)): - value = node.getitem(key) + value = node.getitem(nodes.Const(key)) self.assertIsInstance(value, nodes.Const) self.assertEqual(value.value, expected) + @require_version('3.6') + def test_format_string(self): + code = "f'{greetings} {person}'" + node = extract_node(code) + self.assertEqual(node.as_string(), code) + + @require_version('3.6') + def test_underscores_in_numeral_literal(self): + pairs = [ + ('10_1000', 101000), + ('10_000_000', 10000000), + ('0x_FF_FF', 65535), + ] + for value, expected in pairs: + node = extract_node(value) + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Const) + self.assertEqual(inferred.value, expected) + + @require_version('3.6') + def test_async_comprehensions(self): + async_comprehensions = [ + extract_node("async def f(): return __([i async for i in aiter() if i % 2])"), + extract_node("async def f(): return __({i async for i in aiter() if i % 2})"), + extract_node("async def f(): return __((i async for i in aiter() if i % 2))"), + extract_node("async def f(): return __({i: i async for i in aiter() if i % 2})") + ] + non_async_comprehensions = [ + extract_node("async def f(): return __({i: i for i in iter() if i % 2})") + ] + + for comp in async_comprehensions: + self.assertTrue(comp.generators[0].is_async) + for comp in non_async_comprehensions: + self.assertFalse(comp.generators[0].is_async) + + @require_version('3.7') + def test_async_comprehensions_outside_coroutine(self): + # When async and await will become keywords, async comprehensions + # will be allowed outside of coroutines body + comprehensions = [ + "[i async for i in aiter() if condition(i)]", + "[await fun() for fun in funcs]", + "{await fun() for fun in funcs}", + "{fun: await fun() for fun in funcs}", + "[await fun() for fun in funcs if await smth]", + "{await fun() for fun in funcs if await smth}", + "{fun: await fun() for fun in funcs if await smth}", + "[await fun() async for fun in funcs]", + "{await fun() async for fun in funcs}", + "{fun: await fun() async for fun in funcs}", + "[await fun() async for fun in funcs if await smth]", + "{await fun() async for fun in funcs if await smth}", + "{fun: await fun() async for fun in funcs if await smth}", + ] + + for comp in comprehensions: + node = extract_node(comp) + self.assertTrue(node.generators[0].is_async) + + @require_version('3.6') + def test_async_comprehensions_as_string(self): + func_bodies = [ + "return [i async for i in aiter() if condition(i)]", + "return [await fun() for fun in funcs]", + "return {await fun() for fun in funcs}", + "return {fun: await fun() for fun in funcs}", + "return [await fun() for fun in funcs if await smth]", + "return {await fun() for fun in funcs if await smth}", + "return {fun: await fun() for fun in funcs if await smth}", + "return [await fun() async for fun in funcs]", + "return {await fun() async for fun in funcs}", + "return {fun: await fun() async for fun in funcs}", + "return [await fun() async for fun in funcs if await smth]", + "return {await fun() async for fun in funcs if await smth}", + "return {fun: await fun() async for fun in funcs if await smth}", + ] + for func_body in func_bodies: + code = dedent(''' + async def f(): + {}'''.format(func_body)) + func = extract_node(code) + self.assertEqual(func.as_string().strip(), code.strip()) + if __name__ == '__main__': unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_raw_building.py astroid-1.5.3/astroid/tests/unittest_raw_building.py --- astroid-1.4.9/astroid/tests/unittest_raw_building.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_raw_building.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,10 +1,16 @@ +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + import inspect import os import unittest -from six.moves import builtins # pylint: disable=import-error +from six.moves import builtins -from astroid.builder import AstroidBuilder +from astroid.builder import AstroidBuilder, extract_node from astroid.raw_building import ( attach_dummy_node, build_module, build_class, build_function, build_from_import @@ -40,12 +46,14 @@ def test_build_function_args(self): args = ['myArgs1', 'myArgs2'] + # pylint: disable=no-member; not aware of postinit node = build_function('MyFunction', args) self.assertEqual('myArgs1', node.args.args[0].name) self.assertEqual('myArgs2', node.args.args[1].name) self.assertEqual(2, len(node.args.args)) def test_build_function_defaults(self): + # pylint: disable=no-member; not aware of postinit defaults = ['defaults1', 'defaults2'] node = build_function('MyFunction', None, defaults) self.assertEqual(2, len(node.args.defaults)) @@ -75,7 +83,7 @@ for name, _ in inspect.getmembers(builtins, predicate=inspect.isbuiltin): if name == 'print': continue - node = test_utils.extract_node('{0} #@'.format(name)) + node = extract_node('{0} #@'.format(name)) inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.FunctionDef, name) self.assertEqual(inferred.root().name, BUILTINS, name) diff -Nru astroid-1.4.9/astroid/tests/unittest_regrtest.py astroid-1.5.3/astroid/tests/unittest_regrtest.py --- astroid-1.4.9/astroid/tests/unittest_regrtest.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_regrtest.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,20 +1,10 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2008, 2010-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2013-2014 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + import sys import unittest import textwrap @@ -23,11 +13,11 @@ from astroid import MANAGER, Instance, nodes from astroid.bases import BUILTINS -from astroid.builder import AstroidBuilder +from astroid.builder import AstroidBuilder, extract_node from astroid import exceptions from astroid.raw_building import build_module from astroid.manager import AstroidManager -from astroid.test_utils import require_version, extract_node +from astroid.test_utils import require_version from astroid.tests import resources from astroid import transforms @@ -113,28 +103,9 @@ a = astroid['A'] self.assertTrue(a.newstyle) - - def test_pylint_config_attr(self): - try: - from pylint import lint # pylint: disable=unused-variable - except ImportError: - self.skipTest('pylint not available') - mod = MANAGER.ast_from_module_name('pylint.lint') - pylinter = mod['PyLinter'] - expect = ['OptionsManagerMixIn', 'object', 'MessagesHandlerMixIn', - 'ReportsHandlerMixIn', 'BaseTokenChecker', 'BaseChecker', - 'OptionsProviderMixIn'] - self.assertListEqual([c.name for c in pylinter.ancestors()], - expect) - self.assertTrue(list(Instance(pylinter).getattr('config'))) - inferred = list(Instance(pylinter).igetattr('config')) - self.assertEqual(len(inferred), 1) - self.assertEqual(inferred[0].root().name, 'optparse') - self.assertEqual(inferred[0].name, 'Values') - def test_numpy_crash(self): """test don't crash on numpy""" - #a crash occured somewhere in the past, and an + #a crash occurred somewhere in the past, and an # InferenceError instead of a crash was better, but now we even infer! try: import numpy # pylint: disable=unused-variable @@ -149,7 +120,7 @@ astroid = builder.string_build(data, __name__, __file__) callfunc = astroid.body[1].value.func inferred = callfunc.inferred() - self.assertEqual(len(inferred), 2) + self.assertEqual(len(inferred), 1) @require_version('3.0') def test_nameconstant(self): @@ -251,7 +222,7 @@ def test_ancestors_yes_in_bases(self): # Test for issue https://bitbucket.org/logilab/astroid/issue/84 - # This used to crash astroid with a TypeError, because an YES + # This used to crash astroid with a TypeError, because an Uninferable # node was present in the bases node = extract_node(""" def with_metaclass(meta, *bases): @@ -282,51 +253,51 @@ ''') self.assertRaises(exceptions.InferenceError, next, node.infer()) - def test_unicode_in_docstring(self): - # Crashed for astroid==1.4.1 - # Test for https://bitbucket.org/logilab/astroid/issues/273/ - - # In a regular file, "coding: utf-8" would have been used. - node = extract_node(u''' - from __future__ import unicode_literals - - class MyClass(object): - def method(self): - "With unicode : %s " - - instance = MyClass() - ''' % u"\u2019") - - next(node.value.infer()).as_string() - - def test_binop_generates_nodes_with_parents(self): - node = extract_node(''' - def no_op(*args): - pass - def foo(*args): - def inner(*more_args): - args + more_args #@ - return inner - ''') - inferred = next(node.infer()) - self.assertIsInstance(inferred, nodes.Tuple) - self.assertIsNotNone(inferred.parent) - self.assertIsInstance(inferred.parent, nodes.BinOp) - - def test_decorator_names_inference_error_leaking(self): - node = extract_node(''' - class Parent(object): - @property - def foo(self): - pass - - class Child(Parent): - @Parent.foo.getter - def foo(self): #@ - return super(Child, self).foo + ['oink'] - ''') - inferred = next(node.infer()) - self.assertEqual(inferred.decoratornames(), set()) + def test_unicode_in_docstring(self): + # Crashed for astroid==1.4.1 + # Test for https://bitbucket.org/logilab/astroid/issues/273/ + + # In a regular file, "coding: utf-8" would have been used. + node = extract_node(u''' + from __future__ import unicode_literals + + class MyClass(object): + def method(self): + "With unicode : %s " + + instance = MyClass() + ''' % u"\u2019") + + next(node.value.infer()).as_string() + + def test_binop_generates_nodes_with_parents(self): + node = extract_node(''' + def no_op(*args): + pass + def foo(*args): + def inner(*more_args): + args + more_args #@ + return inner + ''') + inferred = next(node.infer()) + self.assertIsInstance(inferred, nodes.Tuple) + self.assertIsNotNone(inferred.parent) + self.assertIsInstance(inferred.parent, nodes.BinOp) + + def test_decorator_names_inference_error_leaking(self): + node = extract_node(''' + class Parent(object): + @property + def foo(self): + pass + + class Child(Parent): + @Parent.foo.getter + def foo(self): #@ + return super(Child, self).foo + ['oink'] + ''') + inferred = next(node.infer()) + self.assertEqual(inferred.decoratornames(), set()) def test_ssl_protocol(self): node = extract_node(''' @@ -336,6 +307,16 @@ inferred = next(node.infer()) self.assertIsInstance(inferred, nodes.Const) + def test_recursive_property_method(self): + node = extract_node(''' + class APropert(): + @property + def property(self): + return self + APropert().property + ''') + next(node.infer()) + def test_uninferable_string_argument_of_namedtuple(self): node = extract_node(''' import collections diff -Nru astroid-1.4.9/astroid/tests/unittest_scoped_nodes.py astroid-1.5.3/astroid/tests/unittest_scoped_nodes.py --- astroid-1.4.9/astroid/tests/unittest_scoped_nodes.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_scoped_nodes.py 2017-04-17 11:23:27.000000000 +0000 @@ -1,20 +1,13 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2011, 2013-2015 Google, Inc. +# Copyright (c) 2013-2016 Claudiu Popa +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015 Philip Lorenz +# Copyright (c) 2015 Rene Zhang + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """tests for specific behaviour of astroid scoped nodes (i.e. module, class and function) """ @@ -29,9 +22,10 @@ from astroid import scoped_nodes from astroid import util from astroid.exceptions import ( - InferenceError, NotFoundError, + InferenceError, AttributeInferenceError, NoDefault, ResolveError, MroError, InconsistentMroError, DuplicateBasesError, + TooManyLevelsError, ) from astroid.bases import ( BUILTINS, Instance, @@ -75,7 +69,7 @@ os.path.abspath(resources.find('data/module.py'))) self.assertEqual(len(self.module.getattr('__dict__')), 1) self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) - self.assertRaises(NotFoundError, self.module.getattr, '__path__') + self.assertRaises(AttributeInferenceError, self.module.getattr, '__path__') self.assertEqual(len(self.pack.getattr('__path__')), 1) self.assertIsInstance(self.pack.getattr('__path__')[0], nodes.List) @@ -101,7 +95,6 @@ self.assertEqual(cnx.name, 'Connection') self.assertEqual(cnx.root().name, 'data.SSL1.Connection1') self.assertEqual(len(self.nonregr.getattr('enumerate')), 2) - # raise ResolveError self.assertRaises(InferenceError, self.nonregr.igetattr, 'YOAA') def test_wildcard_import_names(self): @@ -121,7 +114,7 @@ __all__ = 'Aaa', '_bla', 'name' ''') values = sorted(['Aaa', 'name', 'other', 'func']) - self.assertEqual(sorted(m._public_names()), values) + self.assertEqual(sorted(m.public_names()), values) m = builder.parse(''' name = 'a' _bla = 2 @@ -130,7 +123,7 @@ def func(): return 'yo' ''') - res = sorted(m._public_names()) + res = sorted(m.public_names()) self.assertEqual(res, values) m = builder.parse(''' @@ -138,14 +131,14 @@ trop = "test" __all__ = (trop, "test1", tzop, 42) ''') - res = sorted(m._public_names()) + res = sorted(m.public_names()) self.assertEqual(res, ["trop", "tzop"]) m = builder.parse(''' test = tzop = 42 __all__ = ('test', ) + ('tzop', ) ''') - res = sorted(m._public_names()) + res = sorted(m.public_names()) self.assertEqual(res, ['test', 'tzop']) def test_module_getattr(self): @@ -183,6 +176,18 @@ modname = mod.relative_to_absolute_name('', 1) self.assertEqual(modname, 'very.multi') + def test_relative_to_absolute_name_beyond_top_level(self): + mod = nodes.Module('a.b.c', '') + mod.package = True + for level in (5, 4): + with self.assertRaises(TooManyLevelsError) as cm: + mod.relative_to_absolute_name('test', level) + + expected = ("Relative import with too many levels " + "({level}) for module {name!r}".format( + level=level - 1, name=mod.name)) + self.assertEqual(expected, str(cm.exception)) + def test_import_1(self): data = '''from . import subpackage''' sys.path.insert(0, resources.find('data')) @@ -239,8 +244,8 @@ # only Module.stream as the recommended way to retrieve # its file stream. with warnings.catch_warnings(record=True) as cm: - warnings.simplefilter("always") - self.assertIsNot(astroid.file_stream, astroid.file_stream) + with test_utils.enable_warning(PendingDeprecationWarning): + self.assertIsNot(astroid.file_stream, astroid.file_stream) self.assertGreater(len(cm), 1) self.assertEqual(cm[0].category, PendingDeprecationWarning) @@ -263,7 +268,8 @@ self.assertEqual(func.getattr('__name__')[0].value, 'make_class') self.assertEqual(len(func.getattr('__doc__')), 1) self.assertIsInstance(func.getattr('__doc__')[0], nodes.Const) - self.assertEqual(func.getattr('__doc__')[0].value, 'check base is correctly resolved to Concrete0') + self.assertEqual(func.getattr('__doc__')[0].value, + 'check base is correctly resolved to Concrete0') self.assertEqual(len(self.module.getattr('__dict__')), 1) self.assertIsInstance(self.module.getattr('__dict__')[0], nodes.Dict) @@ -307,12 +313,11 @@ ''' tree = builder.parse(code) func = tree['nested_args'] - self.assertEqual(sorted(func._locals), ['a', 'b', 'c', 'd']) + self.assertEqual(sorted(func.locals), ['a', 'b', 'c', 'd']) self.assertEqual(func.args.format_args(), 'a, (b, c, d)') def test_four_args(self): func = self.module['four_args'] - #self.assertEqual(func.args.args, ['a', ('b', 'c', 'd')]) local = sorted(func.keys()) self.assertEqual(local, ['a', 'b', 'c', 'd']) self.assertEqual(func.type, 'function') @@ -324,6 +329,15 @@ func = self.module['four_args'] self.assertEqual(func.args.format_args(), 'a, b, c, d') + @test_utils.require_version('3.0') + def test_format_args_keyword_only_args(self): + node = builder.parse(''' + def test(a: int, *, b: dict): + pass + ''').body[-1].args + formatted = node.format_args() + self.assertEqual(formatted, 'a:int, *, b:dict') + def test_is_generator(self): self.assertTrue(self.module2['generator'].is_generator()) self.assertFalse(self.module2['not_a_generator'].is_generator()) @@ -341,7 +355,7 @@ self.assertFalse(func.is_abstract(pass_is_abstract=False)) def test_is_abstract_decorated(self): - methods = test_utils.extract_node(""" + methods = builder.extract_node(""" import abc class Klass(object): @@ -473,14 +487,14 @@ def stcmethod(cls): pass """) - node = astroid._locals['Node'][0] - self.assertEqual(node._locals['clsmethod_subclass'][0].type, + node = astroid.locals['Node'][0] + self.assertEqual(node.locals['clsmethod_subclass'][0].type, 'classmethod') - self.assertEqual(node._locals['clsmethod'][0].type, + self.assertEqual(node.locals['clsmethod'][0].type, 'classmethod') - self.assertEqual(node._locals['staticmethod_subclass'][0].type, + self.assertEqual(node.locals['staticmethod_subclass'][0].type, 'staticmethod') - self.assertEqual(node._locals['stcmethod'][0].type, + self.assertEqual(node.locals['stcmethod'][0].type, 'staticmethod') def test_decorator_builtin_descriptors(self): @@ -546,28 +560,28 @@ def long_classmethod(cls): pass """) - node = astroid._locals['SomeClass'][0] - self.assertEqual(node._locals['static'][0].type, + node = astroid.locals['SomeClass'][0] + self.assertEqual(node.locals['static'][0].type, 'staticmethod') - self.assertEqual(node._locals['classmethod'][0].type, + self.assertEqual(node.locals['classmethod'][0].type, 'classmethod') - self.assertEqual(node._locals['not_so_static'][0].type, + self.assertEqual(node.locals['not_so_static'][0].type, 'method') - self.assertEqual(node._locals['not_so_classmethod'][0].type, + self.assertEqual(node.locals['not_so_classmethod'][0].type, 'method') - self.assertEqual(node._locals['classmethod_wrapped'][0].type, + self.assertEqual(node.locals['classmethod_wrapped'][0].type, 'classmethod') - self.assertEqual(node._locals['staticmethod_wrapped'][0].type, + self.assertEqual(node.locals['staticmethod_wrapped'][0].type, 'staticmethod') - self.assertEqual(node._locals['long_classmethod'][0].type, + self.assertEqual(node.locals['long_classmethod'][0].type, 'classmethod') def test_igetattr(self): - func = test_utils.extract_node(''' + func = builder.extract_node(''' def test(): pass ''') - func._instance_attrs['value'] = [nodes.Const(42)] + func.instance_attrs['value'] = [nodes.Const(42)] value = func.getattr('value') self.assertEqual(len(value), 1) self.assertIsInstance(value[0], nodes.Const) @@ -578,16 +592,27 @@ @test_utils.require_version(minver='3.0') def test_return_annotation_is_not_the_last(self): - func = builder.parse(''' + func = builder.extract_node(''' def test() -> bytes: pass pass return - ''').body[0] + ''') last_child = func.last_child() self.assertIsInstance(last_child, nodes.Return) self.assertEqual(func.tolineno, 5) + @test_utils.require_version(minver='3.6') + def test_method_init_subclass(self): + klass = builder.extract_node(''' + class MyClass: + def __init_subclass__(cls): + pass + ''') + method = klass['__init_subclass__'] + self.assertEqual([n.name for n in method.args.args], ['cls']) + self.assertEqual(method.type, 'classmethod') + class ClassNodeTest(ModuleLoader, unittest.TestCase): @@ -608,7 +633,7 @@ self.assertEqual(cls.getattr('__module__')[0].value, 'data.module') self.assertEqual(len(cls.getattr('__dict__')), 1) if not cls.newstyle: - self.assertRaises(NotFoundError, cls.getattr, '__mro__') + self.assertRaises(AttributeInferenceError, cls.getattr, '__mro__') for cls in (nodes.List._proxied, nodes.Const(1)._proxied): self.assertEqual(len(cls.getattr('__bases__')), 1) self.assertEqual(len(cls.getattr('__name__')), 1) @@ -619,7 +644,7 @@ self.assertEqual(len(cls.getattr('__mro__')), 1) def test__mro__attribute(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' class A(object): pass class B(object): pass class C(A, B): pass @@ -629,7 +654,7 @@ self.assertEqual(mro.elts, node.mro()) def test__bases__attribute(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' class A(object): pass class B(object): pass class C(A, B): pass @@ -643,20 +668,20 @@ def test_cls_special_attributes_2(self): astroid = builder.parse(''' - class A: pass - class B: pass + class A(object): pass + class B(object): pass A.__bases__ += (B,) ''', __name__) self.assertEqual(len(astroid['A'].getattr('__bases__')), 2) - self.assertIsInstance(astroid['A'].getattr('__bases__')[0], nodes.Tuple) - self.assertIsInstance(astroid['A'].getattr('__bases__')[1], nodes.AssignAttr) + self.assertIsInstance(astroid['A'].getattr('__bases__')[1], nodes.Tuple) + self.assertIsInstance(astroid['A'].getattr('__bases__')[0], nodes.AssignAttr) def test_instance_special_attributes(self): for inst in (Instance(self.module['YO']), nodes.List(), nodes.Const(1)): - self.assertRaises(NotFoundError, inst.getattr, '__mro__') - self.assertRaises(NotFoundError, inst.getattr, '__bases__') - self.assertRaises(NotFoundError, inst.getattr, '__name__') + self.assertRaises(AttributeInferenceError, inst.getattr, '__mro__') + self.assertRaises(AttributeInferenceError, inst.getattr, '__bases__') + self.assertRaises(AttributeInferenceError, inst.getattr, '__name__') self.assertEqual(len(inst.getattr('__dict__')), 1) self.assertEqual(len(inst.getattr('__doc__')), 1) @@ -752,7 +777,7 @@ method_locals = klass2.local_attr('method') self.assertEqual(len(method_locals), 1) self.assertEqual(method_locals[0].name, 'method') - self.assertRaises(NotFoundError, klass2.local_attr, 'nonexistant') + self.assertRaises(AttributeInferenceError, klass2.local_attr, 'nonexistent') methods = {m.name for m in klass2.methods()} self.assertTrue(methods.issuperset(expected_methods)) @@ -810,7 +835,7 @@ ''' astroid = builder.parse(data, __name__) cls = astroid['WebAppObject'] - self.assertEqual(sorted(cls._locals.keys()), + self.assertEqual(sorted(cls.locals.keys()), ['appli', 'config', 'registered', 'schema']) def test_class_getattr(self): @@ -1009,9 +1034,9 @@ class D: pass """) a = astroid['A'] - b = a._locals['B'][0] + b = a.locals['B'][0] c = astroid['C'] - d = c._locals['D'][0] + d = c.locals['D'][0] self.assertEqual(a.metaclass().name, 'ABCMeta') self.assertFalse(b.newstyle) self.assertIsNone(b.metaclass()) @@ -1063,7 +1088,7 @@ self.assertEqual(meta.name, metaclass) def test_metaclass_type(self): - klass = test_utils.extract_node(""" + klass = builder.extract_node(""" def with_metaclass(meta, base=object): return meta("NewBase", (base, ), {}) @@ -1075,7 +1100,7 @@ [base.name for base in klass.ancestors()]) def test_no_infinite_metaclass_loop(self): - klass = test_utils.extract_node(""" + klass = builder.extract_node(""" class SSS(object): class JJJ(object): @@ -1097,7 +1122,7 @@ self.assertIn('JJJ', ancestors) def test_no_infinite_metaclass_loop_with_redefine(self): - nodes = test_utils.extract_node(""" + ast_nodes = builder.extract_node(""" import datetime class A(datetime.date): #@ @@ -1111,11 +1136,11 @@ datetime.date = A datetime.date = B """) - for klass in nodes: + for klass in ast_nodes: self.assertEqual(None, klass.metaclass()) def test_metaclass_generator_hack(self): - klass = test_utils.extract_node(""" + klass = builder.extract_node(""" import six class WithMeta(six.with_metaclass(type, object)): #@ @@ -1128,7 +1153,7 @@ 'type', klass.metaclass().name) def test_using_six_add_metaclass(self): - klass = test_utils.extract_node(''' + klass = builder.extract_node(''' import six import abc @@ -1142,7 +1167,7 @@ self.assertEqual(metaclass.qname(), 'abc.ABCMeta') def test_using_invalid_six_add_metaclass_call(self): - klass = test_utils.extract_node(''' + klass = builder.extract_node(''' import six @six.add_metaclass() class Invalid(object): @@ -1166,7 +1191,7 @@ instance = astroid['tgts'] # used to raise "'_Yes' object is not iterable", see # https://bitbucket.org/logilab/astroid/issue/17 - self.assertEqual(list(instance.infer()), [util.YES]) + self.assertEqual(list(instance.infer()), [util.Uninferable]) def test_slots(self): astroid = builder.parse(""" @@ -1237,6 +1262,18 @@ module['OldStyle'].slots() self.assertEqual(str(cm.exception), msg) + def test_slots_for_dict_keys(self): + module = builder.parse(''' + class Issue(object): + SlotDefaults = {'id': 0, 'id1':1} + __slots__ = SlotDefaults.keys() + ''') + cls = module['Issue'] + slots = cls.slots() + self.assertEqual(len(slots), 2) + self.assertEqual(slots[0].value, 'id') + self.assertEqual(slots[1].value, 'id1') + def test_slots_empty_list_of_slots(self): module = builder.parse(""" class Klass(object): @@ -1279,7 +1316,7 @@ @test_utils.require_version(maxver='3.0') def test_no_mro_for_old_style(self): - node = test_utils.extract_node(""" + node = builder.extract_node(""" class Old: pass""") with self.assertRaises(NotImplementedError) as cm: node.mro() @@ -1287,8 +1324,34 @@ "old-style classes.") @test_utils.require_version(maxver='3.0') + def test_mro_for_classes_with_old_style_in_mro(self): + node = builder.extract_node(''' + class Factory: + pass + class ClientFactory(Factory): + pass + class ReconnectingClientFactory(ClientFactory): + pass + class WebSocketAdapterFactory(object): + pass + class WebSocketClientFactory(WebSocketAdapterFactory, ClientFactory): + pass + class WampWebSocketClientFactory(WebSocketClientFactory): + pass + class RetryFactory(WampWebSocketClientFactory, ReconnectingClientFactory): + pas + ''') + self.assertEqualMro( + node, + ['RetryFactory', 'WampWebSocketClientFactory', + 'WebSocketClientFactory', 'WebSocketAdapterFactory', 'object', + 'ReconnectingClientFactory', 'ClientFactory', + 'Factory'] + ) + + @test_utils.require_version(maxver='3.0') def test_combined_newstyle_oldstyle_in_mro(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' class Old: pass class New(object): @@ -1358,18 +1421,16 @@ self.assertEqualMro(astroid['E1'], ['E1', 'C1', 'B1', 'A1', 'object']) with self.assertRaises(InconsistentMroError) as cm: astroid['F1'].mro() - self.assertEqual(str(cm.exception), - "Cannot create a consistent method resolution order " - "for bases (B1, C1, A1, object), " - "(C1, B1, A1, object)") - + A1 = astroid.getattr('A1')[0] + B1 = astroid.getattr('B1')[0] + C1 = astroid.getattr('C1')[0] + object_ = builder.MANAGER.astroid_cache[BUILTINS].getattr('object')[0] + self.assertEqual(cm.exception.mros, [[B1, C1, A1, object_], + [C1, B1, A1, object_]]) with self.assertRaises(InconsistentMroError) as cm: astroid['G1'].mro() - self.assertEqual(str(cm.exception), - "Cannot create a consistent method resolution order " - "for bases (C1, B1, A1, object), " - "(B1, C1, A1, object)") - + self.assertEqual(cm.exception.mros, [[C1, B1, A1, object_], + [B1, C1, A1, object_]]) self.assertEqualMro( astroid['PedalWheelBoat'], ["PedalWheelBoat", "EngineLess", @@ -1390,12 +1451,13 @@ with self.assertRaises(DuplicateBasesError) as cm: astroid['Duplicates'].mro() - self.assertEqual(str(cm.exception), "Duplicates found in the mro.") - self.assertTrue(issubclass(cm.exception.__class__, MroError)) - self.assertTrue(issubclass(cm.exception.__class__, ResolveError)) + Duplicates = astroid.getattr('Duplicates')[0] + self.assertEqual(cm.exception.cls, Duplicates) + self.assertIsInstance(cm.exception, MroError) + self.assertIsInstance(cm.exception, ResolveError) def test_generator_from_infer_call_result_parent(self): - func = test_utils.extract_node(""" + func = builder.extract_node(""" import contextlib @contextlib.contextmanager @@ -1407,7 +1469,7 @@ self.assertEqual(result.parent, func) def test_type_three_arguments(self): - classes = test_utils.extract_node(""" + classes = builder.extract_node(""" type('A', (object, ), {"a": 1, "b": 2, missing: 3}) #@ """) first = next(classes.infer()) @@ -1418,26 +1480,107 @@ self.assertEqual(first["a"].value, 1) self.assertIsInstance(first["b"], nodes.Const) self.assertEqual(first["b"].value, 2) - with self.assertRaises(NotFoundError): + with self.assertRaises(AttributeInferenceError): first.getattr("missing") def test_implicit_metaclass(self): - cls = test_utils.extract_node(""" + cls = builder.extract_node(""" class A(object): pass """) type_cls = scoped_nodes.builtin_lookup("type")[1][0] self.assertEqual(cls.implicit_metaclass(), type_cls) + def test_implicit_metaclass_lookup(self): + cls = builder.extract_node(''' + class A(object): + pass + ''') + instance = cls.instantiate_class() + func = cls.getattr('mro') + self.assertEqual(len(func), 1) + self.assertRaises(AttributeInferenceError, instance.getattr, 'mro') + + def test_metaclass_lookup_using_same_class(self): + # Check that we don't have recursive attribute access for metaclass + cls = builder.extract_node(''' + class A(object): pass + ''') + self.assertEqual(len(cls.getattr('mro')), 1) + + def test_metaclass_lookup_inferrence_errors(self): + module = builder.parse(''' + import six + + class Metaclass(type): + foo = lala + + @six.add_metaclass(Metaclass) + class B(object): pass + ''') + cls = module['B'] + self.assertEqual(util.Uninferable, next(cls.igetattr('foo'))) + + def test_metaclass_lookup(self): + module = builder.parse(''' + import six + + class Metaclass(type): + foo = 42 + @classmethod + def class_method(cls): + pass + def normal_method(cls): + pass + @property + def meta_property(cls): + return 42 + @staticmethod + def static(): + pass + + @six.add_metaclass(Metaclass) + class A(object): + pass + ''') + acls = module['A'] + normal_attr = next(acls.igetattr('foo')) + self.assertIsInstance(normal_attr, nodes.Const) + self.assertEqual(normal_attr.value, 42) + + class_method = next(acls.igetattr('class_method')) + self.assertIsInstance(class_method, BoundMethod) + self.assertEqual(class_method.bound, module['Metaclass']) + + normal_method = next(acls.igetattr('normal_method')) + self.assertIsInstance(normal_method, BoundMethod) + self.assertEqual(normal_method.bound, module['A']) + + # Attribute access for properties: + # from the metaclass is a property object + # from the class that uses the metaclass, the value + # of the property + property_meta = next(module['Metaclass'].igetattr('meta_property')) + self.assertIsInstance(property_meta, UnboundMethod) + wrapping = scoped_nodes.get_wrapping_class(property_meta) + self.assertEqual(wrapping, module['Metaclass']) + + property_class = next(acls.igetattr('meta_property')) + self.assertIsInstance(property_class, nodes.Const) + self.assertEqual(property_class.value, 42) + + static = next(acls.igetattr('static')) + self.assertIsInstance(static, scoped_nodes.FunctionDef) + @test_utils.require_version(maxver='3.0') def test_implicit_metaclass_is_none(self): - cls = test_utils.extract_node(""" + cls = builder.extract_node(""" class A: pass """) self.assertIsNone(cls.implicit_metaclass()) def test_local_attr_invalid_mro(self): - cls = test_utils.extract_node(""" + cls = builder.extract_node(""" # A has an invalid MRO, local_attr should fallback # to using .ancestors. class A(object, object): @@ -1486,7 +1629,7 @@ self.assertRaises(DuplicateBasesError, module['B'].mro) def test_instance_bound_method_lambdas(self): - ast_nodes = test_utils.extract_node(''' + ast_nodes = builder.extract_node(''' class Test(object): #@ lam = lambda self: self not_method = lambda xargs: xargs @@ -1503,7 +1646,7 @@ self.assertIsInstance(not_method, scoped_nodes.Lambda) def test_class_extra_decorators_frame_is_not_class(self): - ast_node = test_utils.extract_node(''' + ast_node = builder.extract_node(''' def ala(): def bala(): #@ func = 42 @@ -1511,7 +1654,7 @@ self.assertEqual(ast_node.extra_decorators, []) def test_class_extra_decorators_only_callfunc_are_considered(self): - ast_node = test_utils.extract_node(''' + ast_node = builder.extract_node(''' class Ala(object): def func(self): #@ pass @@ -1520,7 +1663,7 @@ self.assertEqual(ast_node.extra_decorators, []) def test_class_extra_decorators_only_assignment_names_are_considered(self): - ast_node = test_utils.extract_node(''' + ast_node = builder.extract_node(''' class Ala(object): def func(self): #@ pass @@ -1531,7 +1674,7 @@ self.assertEqual(ast_node.extra_decorators, []) def test_class_extra_decorators_only_same_name_considered(self): - ast_node = test_utils.extract_node(''' + ast_node = builder.extract_node(''' class Ala(object): def func(self): #@ pass @@ -1541,7 +1684,7 @@ self.assertEqual(ast_node.type, 'method') def test_class_extra_decorators(self): - static_method, clsmethod = test_utils.extract_node(''' + static_method, clsmethod = builder.extract_node(''' class Ala(object): def static(self): #@ pass @@ -1556,7 +1699,7 @@ self.assertEqual(static_method.type, 'staticmethod') def test_extra_decorators_only_class_level_assignments(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' def _bind(arg): return arg.bind @@ -1578,6 +1721,18 @@ parent = bind.scope() self.assertEqual(len(parent.extra_decorators), 0) + @test_utils.require_version(minver='3.0') + def test_class_keywords(self): + data = ''' + class TestKlass(object, metaclass=TestMetaKlass, + foo=42, bar='baz'): + pass + ''' + astroid = builder.parse(data, __name__) + cls = astroid['TestKlass'] + self.assertEqual(len(cls.keywords), 2) + self.assertEqual([x.arg for x in cls.keywords], ['foo', 'bar']) + if __name__ == '__main__': unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_transforms.py astroid-1.5.3/astroid/tests/unittest_transforms.py --- astroid-1.4.9/astroid/tests/unittest_transforms.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_transforms.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,245 +1,234 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . - -from __future__ import print_function - -import contextlib -import time -import unittest - -from astroid import builder -from astroid import nodes -from astroid import parse -from astroid import transforms - - -@contextlib.contextmanager -def add_transform(manager, node, transform, predicate=None): - manager.register_transform(node, transform, predicate) - try: - yield - finally: - manager.unregister_transform(node, transform, predicate) - - -class TestTransforms(unittest.TestCase): - - def setUp(self): - self.transformer = transforms.TransformVisitor() - - def parse_transform(self, code): - module = parse(code, apply_transforms=False) - return self.transformer.visit(module) - - def test_function_inlining_transform(self): - def transform_call(node): - # Let's do some function inlining - inferred = next(node.infer()) - return inferred - - self.transformer.register_transform(nodes.Call, - transform_call) - - module = self.parse_transform(''' - def test(): return 42 - test() #@ - ''') - - self.assertIsInstance(module.body[1], nodes.Expr) - self.assertIsInstance(module.body[1].value, nodes.Const) - self.assertEqual(module.body[1].value.value, 42) - - def test_recursive_transforms_into_astroid_fields(self): - # Test that the transformer walks properly the tree - # by going recursively into the _astroid_fields per each node. - def transform_compare(node): - # Let's check the values of the ops - _, right = node.ops[0] - # Assume they are Consts and they were transformed before - # us. - return nodes.const_factory(node.left.value < right.value) - - def transform_name(node): - # Should be Consts - return next(node.infer()) - - self.transformer.register_transform(nodes.Compare, transform_compare) - self.transformer.register_transform(nodes.Name, transform_name) - - module = self.parse_transform(''' - a = 42 - b = 24 - a < b - ''') - - self.assertIsInstance(module.body[2], nodes.Expr) - self.assertIsInstance(module.body[2].value, nodes.Const) - self.assertFalse(module.body[2].value.value) - - def test_transform_patches_locals(self): - def transform_function(node): - assign = nodes.Assign() - name = nodes.AssignName() - name.name = 'value' - assign.targets = [name] - assign.value = nodes.const_factory(42) - node.body.append(assign) - - self.transformer.register_transform(nodes.FunctionDef, - transform_function) - - module = self.parse_transform(''' - def test(): - pass - ''') - - func = module.body[0] - self.assertEqual(len(func.body), 2) - self.assertIsInstance(func.body[1], nodes.Assign) - self.assertEqual(func.body[1].as_string(), 'value = 42') - - def test_predicates(self): - def transform_call(node): - inferred = next(node.infer()) - return inferred - - def should_inline(node): - return node.func.name.startswith('inlineme') - - self.transformer.register_transform(nodes.Call, - transform_call, - should_inline) - - module = self.parse_transform(''' - def inlineme_1(): - return 24 - def dont_inline_me(): - return 42 - def inlineme_2(): - return 2 - inlineme_1() - dont_inline_me() - inlineme_2() - ''') - values = module.body[-3:] - self.assertIsInstance(values[0], nodes.Expr) - self.assertIsInstance(values[0].value, nodes.Const) - self.assertEqual(values[0].value.value, 24) - self.assertIsInstance(values[1], nodes.Expr) - self.assertIsInstance(values[1].value, nodes.Call) - self.assertIsInstance(values[2], nodes.Expr) - self.assertIsInstance(values[2].value, nodes.Const) - self.assertEqual(values[2].value.value, 2) - - def test_transforms_are_separated(self): - # Test that the transforming is done at a separate - # step, which means that we are not doing inference - # on a partially constructred tree anymore, which was the - # source of crashes in the past when certain inference rules - # were used in a transform. - def transform_function(node): - if node.decorators: - for decorator in node.decorators.nodes: - inferred = next(decorator.infer()) - if inferred.qname() == 'abc.abstractmethod': - return next(node.infer_call_result(node)) - - manager = builder.MANAGER - with add_transform(manager, nodes.FunctionDef, transform_function): - module = builder.parse(''' - import abc - from abc import abstractmethod - - class A(object): - @abc.abstractmethod - def ala(self): - return 24 - - @abstractmethod - def bala(self): - return 42 - ''') - - cls = module['A'] - ala = cls.body[0] - bala = cls.body[1] - self.assertIsInstance(ala, nodes.Const) - self.assertEqual(ala.value, 24) - self.assertIsInstance(bala, nodes.Const) - self.assertEqual(bala.value, 42) - - def test_transforms_are_called_for_builtin_modules(self): - # Test that transforms are called for builtin modules. - def transform_function(node): - name = nodes.AssignName() - name.name = 'value' - node.args.args = [name] - return node - - manager = builder.MANAGER - predicate = lambda node: node.root().name == 'time' - with add_transform(manager, nodes.FunctionDef, - transform_function, predicate): - builder_instance = builder.AstroidBuilder() - module = builder_instance.module_build(time) - - asctime = module['asctime'] - self.assertEqual(len(asctime.args.args), 1) - self.assertIsInstance(asctime.args.args[0], nodes.AssignName) - self.assertEqual(asctime.args.args[0].name, 'value') - - def test_builder_apply_transforms(self): - def transform_function(node): - return nodes.const_factory(42) - - manager = builder.MANAGER - with add_transform(manager, nodes.FunctionDef, transform_function): - astroid_builder = builder.AstroidBuilder(apply_transforms=False) - module = astroid_builder.string_build('''def test(): pass''') - - # The transform wasn't applied. - self.assertIsInstance(module.body[0], nodes.FunctionDef) - - def test_transform_crashes_on_is_subtype_of(self): - # Test that we don't crash when having is_subtype_of - # in a transform, as per issue #188. This happened - # before, when the transforms weren't in their own step. - def transform_class(cls): - if cls.is_subtype_of('django.db.models.base.Model'): - return cls - return cls - - self.transformer.register_transform(nodes.ClassDef, - transform_class) - - self.parse_transform(''' - # Change environ to automatically call putenv() if it exists - import os - putenv = os.putenv - try: - # This will fail if there's no putenv - putenv - except NameError: - pass - else: - import UserDict - ''') - - -if __name__ == '__main__': - unittest.main() +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + + +from __future__ import print_function + +import contextlib +import time +import unittest + +from astroid import builder +from astroid import nodes +from astroid import parse +from astroid import transforms + + +@contextlib.contextmanager +def add_transform(manager, node, transform, predicate=None): + manager.register_transform(node, transform, predicate) + try: + yield + finally: + manager.unregister_transform(node, transform, predicate) + + +class TestTransforms(unittest.TestCase): + + def setUp(self): + self.transformer = transforms.TransformVisitor() + + def parse_transform(self, code): + module = parse(code, apply_transforms=False) + return self.transformer.visit(module) + + def test_function_inlining_transform(self): + def transform_call(node): + # Let's do some function inlining + inferred = next(node.infer()) + return inferred + + self.transformer.register_transform(nodes.Call, + transform_call) + + module = self.parse_transform(''' + def test(): return 42 + test() #@ + ''') + + self.assertIsInstance(module.body[1], nodes.Expr) + self.assertIsInstance(module.body[1].value, nodes.Const) + self.assertEqual(module.body[1].value.value, 42) + + def test_recursive_transforms_into_astroid_fields(self): + # Test that the transformer walks properly the tree + # by going recursively into the _astroid_fields per each node. + def transform_compare(node): + # Let's check the values of the ops + _, right = node.ops[0] + # Assume they are Consts and they were transformed before + # us. + return nodes.const_factory(node.left.value < right.value) + + def transform_name(node): + # Should be Consts + return next(node.infer()) + + self.transformer.register_transform(nodes.Compare, transform_compare) + self.transformer.register_transform(nodes.Name, transform_name) + + module = self.parse_transform(''' + a = 42 + b = 24 + a < b + ''') + + self.assertIsInstance(module.body[2], nodes.Expr) + self.assertIsInstance(module.body[2].value, nodes.Const) + self.assertFalse(module.body[2].value.value) + + def test_transform_patches_locals(self): + def transform_function(node): + assign = nodes.Assign() + name = nodes.AssignName() + name.name = 'value' + assign.targets = [name] + assign.value = nodes.const_factory(42) + node.body.append(assign) + + self.transformer.register_transform(nodes.FunctionDef, + transform_function) + + module = self.parse_transform(''' + def test(): + pass + ''') + + func = module.body[0] + self.assertEqual(len(func.body), 2) + self.assertIsInstance(func.body[1], nodes.Assign) + self.assertEqual(func.body[1].as_string(), 'value = 42') + + def test_predicates(self): + def transform_call(node): + inferred = next(node.infer()) + return inferred + + def should_inline(node): + return node.func.name.startswith('inlineme') + + self.transformer.register_transform(nodes.Call, + transform_call, + should_inline) + + module = self.parse_transform(''' + def inlineme_1(): + return 24 + def dont_inline_me(): + return 42 + def inlineme_2(): + return 2 + inlineme_1() + dont_inline_me() + inlineme_2() + ''') + values = module.body[-3:] + self.assertIsInstance(values[0], nodes.Expr) + self.assertIsInstance(values[0].value, nodes.Const) + self.assertEqual(values[0].value.value, 24) + self.assertIsInstance(values[1], nodes.Expr) + self.assertIsInstance(values[1].value, nodes.Call) + self.assertIsInstance(values[2], nodes.Expr) + self.assertIsInstance(values[2].value, nodes.Const) + self.assertEqual(values[2].value.value, 2) + + def test_transforms_are_separated(self): + # Test that the transforming is done at a separate + # step, which means that we are not doing inference + # on a partially constructed tree anymore, which was the + # source of crashes in the past when certain inference rules + # were used in a transform. + def transform_function(node): + if node.decorators: + for decorator in node.decorators.nodes: + inferred = next(decorator.infer()) + if inferred.qname() == 'abc.abstractmethod': + return next(node.infer_call_result(node)) + + manager = builder.MANAGER + with add_transform(manager, nodes.FunctionDef, transform_function): + module = builder.parse(''' + import abc + from abc import abstractmethod + + class A(object): + @abc.abstractmethod + def ala(self): + return 24 + + @abstractmethod + def bala(self): + return 42 + ''') + + cls = module['A'] + ala = cls.body[0] + bala = cls.body[1] + self.assertIsInstance(ala, nodes.Const) + self.assertEqual(ala.value, 24) + self.assertIsInstance(bala, nodes.Const) + self.assertEqual(bala.value, 42) + + def test_transforms_are_called_for_builtin_modules(self): + # Test that transforms are called for builtin modules. + def transform_function(node): + name = nodes.AssignName() + name.name = 'value' + node.args.args = [name] + return node + + manager = builder.MANAGER + predicate = lambda node: node.root().name == 'time' + with add_transform(manager, nodes.FunctionDef, + transform_function, predicate): + builder_instance = builder.AstroidBuilder() + module = builder_instance.module_build(time) + + asctime = module['asctime'] + self.assertEqual(len(asctime.args.args), 1) + self.assertIsInstance(asctime.args.args[0], nodes.AssignName) + self.assertEqual(asctime.args.args[0].name, 'value') + + def test_builder_apply_transforms(self): + def transform_function(node): + return nodes.const_factory(42) + + manager = builder.MANAGER + with add_transform(manager, nodes.FunctionDef, transform_function): + astroid_builder = builder.AstroidBuilder(apply_transforms=False) + module = astroid_builder.string_build('''def test(): pass''') + + # The transform wasn't applied. + self.assertIsInstance(module.body[0], nodes.FunctionDef) + + def test_transform_crashes_on_is_subtype_of(self): + # Test that we don't crash when having is_subtype_of + # in a transform, as per issue #188. This happened + # before, when the transforms weren't in their own step. + def transform_class(cls): + if cls.is_subtype_of('django.db.models.base.Model'): + return cls + return cls + + self.transformer.register_transform(nodes.ClassDef, + transform_class) + + self.parse_transform(''' + # Change environ to automatically call putenv() if it exists + import os + putenv = os.putenv + try: + # This will fail if there's no putenv + putenv + except NameError: + pass + else: + import UserDict + ''') + + +if __name__ == '__main__': + unittest.main() diff -Nru astroid-1.4.9/astroid/tests/unittest_utils.py astroid-1.5.3/astroid/tests/unittest_utils.py --- astroid-1.4.9/astroid/tests/unittest_utils.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/tests/unittest_utils.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,27 +1,15 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2014, 2016 Google, Inc. +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + import unittest from astroid import builder from astroid import InferenceError from astroid import nodes from astroid import node_classes -from astroid import test_utils from astroid import util as astroid_util @@ -100,7 +88,7 @@ self.assertEqual(node_classes.are_exclusive(f4, f2), True) def test_unpack_infer_uninferable_nodes(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' x = [A] * 1 f = [x, [A] * 2] f @@ -108,11 +96,11 @@ inferred = next(node.infer()) unpacked = list(node_classes.unpack_infer(inferred)) self.assertEqual(len(unpacked), 3) - self.assertTrue(all(elt is astroid_util.YES + self.assertTrue(all(elt is astroid_util.Uninferable for elt in unpacked)) def test_unpack_infer_empty_tuple(self): - node = test_utils.extract_node(''' + node = builder.extract_node(''' () ''') inferred = next(node.infer()) diff -Nru astroid-1.4.9/astroid/test_utils.py astroid-1.5.3/astroid/test_utils.py --- astroid-1.4.9/astroid/test_utils.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/test_utils.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,172 +1,18 @@ +# Copyright (c) 2013-2014 Google, Inc. +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + """Utility functions for test code that uses astroid ASTs as input.""" +import contextlib import functools import sys +import warnings from astroid import nodes -from astroid import builder -# The name of the transient function that is used to -# wrap expressions to be extracted when calling -# extract_node. -_TRANSIENT_FUNCTION = '__' - -# The comment used to select a statement to be extracted -# when calling extract_node. -_STATEMENT_SELECTOR = '#@' - -def _extract_expressions(node): - """Find expressions in a call to _TRANSIENT_FUNCTION and extract them. - - The function walks the AST recursively to search for expressions that - are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an - expression, it completely removes the function call node from the tree, - replacing it by the wrapped expression inside the parent. - - :param node: An astroid node. - :type node: astroid.bases.NodeNG - :yields: The sequence of wrapped expressions on the modified tree - expression can be found. - """ - if (isinstance(node, nodes.Call) - and isinstance(node.func, nodes.Name) - and node.func.name == _TRANSIENT_FUNCTION): - real_expr = node.args[0] - real_expr.parent = node.parent - # Search for node in all _astng_fields (the fields checked when - # get_children is called) of its parent. Some of those fields may - # be lists or tuples, in which case the elements need to be checked. - # When we find it, replace it by real_expr, so that the AST looks - # like no call to _TRANSIENT_FUNCTION ever took place. - for name in node.parent._astroid_fields: - child = getattr(node.parent, name) - if isinstance(child, (list, tuple)): - for idx, compound_child in enumerate(child): - if compound_child is node: - child[idx] = real_expr - elif child is node: - setattr(node.parent, name, real_expr) - yield real_expr - else: - for child in node.get_children(): - for result in _extract_expressions(child): - yield result - - -def _find_statement_by_line(node, line): - """Extracts the statement on a specific line from an AST. - - If the line number of node matches line, it will be returned; - otherwise its children are iterated and the function is called - recursively. - - :param node: An astroid node. - :type node: astroid.bases.NodeNG - :param line: The line number of the statement to extract. - :type line: int - :returns: The statement on the line, or None if no statement for the line - can be found. - :rtype: astroid.bases.NodeNG or None - """ - if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)): - # This is an inaccuracy in the AST: the nodes that can be - # decorated do not carry explicit information on which line - # the actual definition (class/def), but .fromline seems to - # be close enough. - node_line = node.fromlineno - else: - node_line = node.lineno - - if node_line == line: - return node - - for child in node.get_children(): - result = _find_statement_by_line(child, line) - if result: - return result - - return None - -def extract_node(code, module_name=''): - """Parses some Python code as a module and extracts a designated AST node. - - Statements: - To extract one or more statement nodes, append #@ to the end of the line - - Examples: - >>> def x(): - >>> def y(): - >>> return 1 #@ - - The return statement will be extracted. - - >>> class X(object): - >>> def meth(self): #@ - >>> pass - - The funcion object 'meth' will be extracted. - - Expressions: - To extract arbitrary expressions, surround them with the fake - function call __(...). After parsing, the surrounded expression - will be returned and the whole AST (accessible via the returned - node's parent attribute) will look like the function call was - never there in the first place. - - Examples: - >>> a = __(1) - - The const node will be extracted. - - >>> def x(d=__(foo.bar)): pass - - The node containing the default argument will be extracted. - - >>> def foo(a, b): - >>> return 0 < __(len(a)) < b - - The node containing the function call 'len' will be extracted. - - If no statements or expressions are selected, the last toplevel - statement will be returned. - - If the selected statement is a discard statement, (i.e. an expression - turned into a statement), the wrapped expression is returned instead. - - For convenience, singleton lists are unpacked. - - :param str code: A piece of Python code that is parsed as - a module. Will be passed through textwrap.dedent first. - :param str module_name: The name of the module. - :returns: The designated node from the parse tree, or a list of nodes. - :rtype: astroid.bases.NodeNG, or a list of nodes. - """ - def _extract(node): - if isinstance(node, nodes.Expr): - return node.value - else: - return node - - requested_lines = [] - for idx, line in enumerate(code.splitlines()): - if line.strip().endswith(_STATEMENT_SELECTOR): - requested_lines.append(idx + 1) - - tree = builder.parse(code, module_name=module_name) - extracted = [] - if requested_lines: - for line in requested_lines: - extracted.append(_find_statement_by_line(tree, line)) - - # Modifies the tree. - extracted.extend(_extract_expressions(tree)) - - if not extracted: - extracted.append(tree.body[-1]) - - extracted = [_extract(node) for node in extracted] - if len(extracted) == 1: - return extracted[0] - else: - return extracted +from astroid import util def require_version(minver=None, maxver=None): @@ -178,24 +24,38 @@ try: return tuple(int(v) for v in string.split('.')) except ValueError: - raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version) + util.reraise(ValueError('%s is not a correct version : should be X.Y[.Z].' % string)) def check_require_version(f): current = sys.version_info[:3] if parse(minver, "0") < current <= parse(maxver, "4"): return f - else: - str_version = '.'.join(str(v) for v in sys.version_info) - @functools.wraps(f) - def new_f(self, *args, **kwargs): - if minver is not None: - self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version)) - elif maxver is not None: - self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version)) - return new_f + + str_version = '.'.join(str(v) for v in sys.version_info) + @functools.wraps(f) + def new_f(self, *args, **kwargs): + if minver is not None: + self.skipTest('Needs Python > %s. Current version is %s.' + % (minver, str_version)) + elif maxver is not None: + self.skipTest('Needs Python <= %s. Current version is %s.' + % (maxver, str_version)) + return new_f return check_require_version def get_name_node(start_from, name, index=0): return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index] + + +@contextlib.contextmanager +def enable_warning(warning): + warnings.simplefilter('always', warning) + try: + yield + finally: + # Reset it to default value, so it will take + # into account the values from the -W flag. + warnings.simplefilter('default', warning) + \ No newline at end of file diff -Nru astroid-1.4.9/astroid/transforms.py astroid-1.5.3/astroid/transforms.py --- astroid-1.4.9/astroid/transforms.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/transforms.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,20 +1,8 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + import collections import warnings @@ -69,8 +57,8 @@ return [self._visit_generic(child) for child in node] elif isinstance(node, tuple): return tuple(self._visit_generic(child) for child in node) - else: - return self._visit(node) + + return self._visit(node) def register_transform(self, node_class, transform, predicate=None): """Register `transform(node)` function to be applied on the given diff -Nru astroid-1.4.9/astroid/util.py astroid-1.5.3/astroid/util.py --- astroid-1.4.9/astroid/util.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/astroid/util.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,89 +1,116 @@ -# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it -# under the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 2.1 of the License, or (at your -# option) any later version. -# -# astroid is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License -# for more details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . -# -# The code in this file was originally part of logilab-common, licensed under -# the same license. +# Copyright (c) 2015-2016 Cara Vinson +# Copyright (c) 2015-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + +import sys import warnings -from astroid import exceptions +import importlib +import lazy_object_proxy +import six -def generate_warning(message, warning): - return lambda *args: warnings.warn(message % args, warning, stacklevel=3) +def lazy_descriptor(obj): + class DescriptorProxy(lazy_object_proxy.Proxy): + def __get__(self, instance, owner=None): + return self.__class__.__get__(self, instance) + return DescriptorProxy(obj) -rename_warning = generate_warning( - "%r is deprecated and will be removed in astroid %.1f, use %r instead", - PendingDeprecationWarning) -attribute_to_method_warning = generate_warning( - "%s is deprecated and will be removed in astroid %.1f, use the " - "method '%s()' instead.", PendingDeprecationWarning) +def lazy_import(module_name): + return lazy_object_proxy.Proxy( + lambda: importlib.import_module('.' + module_name, 'astroid')) -attribute_to_function_warning = generate_warning( - "%s is deprecated and will be removed in astroid %.1f, use the " - "function '%s()' instead.", PendingDeprecationWarning) -method_to_function_warning = generate_warning( - "%s() is deprecated and will be removed in astroid %.1f, use the " - "function '%s()' instead.", PendingDeprecationWarning) +def reraise(exception): + '''Reraises an exception with the traceback from the current exception + block.''' + six.reraise(type(exception), exception, sys.exc_info()[2]) -class _Yes(object): +@object.__new__ +class Uninferable(object): """Special inference object, which is returned when inference fails.""" def __repr__(self): - return 'YES' - + return 'Uninferable' __str__ = __repr__ def __getattribute__(self, name): if name == 'next': raise AttributeError('next method should not be called') if name.startswith('__') and name.endswith('__'): - return super(_Yes, self).__getattribute__(name) + return object.__getattribute__(self, name) if name == 'accept': - return super(_Yes, self).__getattribute__(name) + return object.__getattribute__(self, name) return self def __call__(self, *args, **kwargs): return self + def __bool__(self): + return False + + __nonzero__ = __bool__ + def accept(self, visitor): - func = getattr(visitor, "visit_yes") + func = getattr(visitor, "visit_uninferable") return func(self) +class BadOperationMessage(object): + """Object which describes a TypeError occurred somewhere in the inference chain -YES = _Yes() + This is not an exception, but a container object which holds the types and + the error which occurred. + """ -def safe_infer(node, context=None): - """Return the inferred value for the given node. - Return None if inference failed or if there is some ambiguity (more than - one node has been inferred). - """ - try: - inferit = node.infer(context=context) - value = next(inferit) - except exceptions.InferenceError: - return - try: - next(inferit) - return # None if there is ambiguity on the inferred node - except exceptions.InferenceError: - return # there is some kind of ambiguity - except StopIteration: - return value +class BadUnaryOperationMessage(BadOperationMessage): + """Object which describes operational failures on UnaryOps.""" + + def __init__(self, operand, op, error): + self.operand = operand + self.op = op + self.error = error + + def __str__(self): + operand_type = self.operand.name + msg = "bad operand type for unary {}: {}" + return msg.format(self.op, operand_type) + + +class BadBinaryOperationMessage(BadOperationMessage): + """Object which describes type errors for BinOps.""" + + def __init__(self, left_type, op, right_type): + self.left_type = left_type + self.right_type = right_type + self.op = op + + def __str__(self): + msg = "unsupported operand type(s) for {}: {!r} and {!r}" + return msg.format(self.op, self.left_type.name, self.right_type.name) + + +def _instancecheck(cls, other): + wrapped = cls.__wrapped__ + other_cls = other.__class__ + is_instance_of = wrapped is other_cls or issubclass(other_cls, wrapped) + warnings.warn("%r is deprecated and slated for removal in astroid " + "2.0, use %r instead" % (cls.__class__.__name__, + wrapped.__name__), + PendingDeprecationWarning, stacklevel=2) + return is_instance_of + + +def proxy_alias(alias_name, node_type): + """Get a Proxy from the given name to the given node type.""" + proxy = type(alias_name, (lazy_object_proxy.Proxy,), + {'__class__': object.__dict__['__class__'], + '__instancecheck__': _instancecheck}) + return proxy(lambda: node_type) + + +# Backwards-compatibility aliases +YES = Uninferable diff -Nru astroid-1.4.9/astroid.egg-info/PKG-INFO astroid-1.5.3/astroid.egg-info/PKG-INFO --- astroid-1.4.9/astroid.egg-info/PKG-INFO 2016-12-18 10:27:51.000000000 +0000 +++ astroid-1.5.3/astroid.egg-info/PKG-INFO 2017-06-03 14:04:13.000000000 +0000 @@ -1,18 +1,30 @@ Metadata-Version: 1.1 Name: astroid -Version: 1.4.9 +Version: 1.5.3 Summary: A abstract syntax tree for Python with inference support. Home-page: https://github.com/PyCQA/astroid Author: Python Code Quality Authority Author-email: code-quality@python.org License: LGPL -Description: .. image:: https://drone.io/bitbucket.org/logilab/astroid/status.png - :alt: drone.io Build Status - :target: https://drone.io/bitbucket.org/logilab/astroid - - Astroid +Description: Astroid ======= + .. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master + :target: https://travis-ci.org/PyCQA/astroid + + .. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true + :alt: AppVeyor Build Status + :target: https://ci.appveyor.com/project/PCManticore/astroid + + .. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master + :target: https://coveralls.io/github/PyCQA/astroid?branch=master + + .. image:: https://readthedocs.org/projects/astroid/badge/?version=latest + :target: http://astroid.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + + + What's this? ------------ @@ -29,15 +41,6 @@ scopes. Furthermore, astroid builds partial trees by inspecting living objects. - Main modules are: - - * `bases`, `node_classses` and `scoped_nodes` contain the classes for the - different type of nodes of the tree. - - * the `manager` contains a high level object to get astroid trees from - source files and living objects. It maintains a cache of previously - constructed tree for quick access. - Installation ------------ diff -Nru astroid-1.4.9/astroid.egg-info/requires.txt astroid-1.5.3/astroid.egg-info/requires.txt --- astroid-1.4.9/astroid.egg-info/requires.txt 2016-12-18 10:27:51.000000000 +0000 +++ astroid-1.5.3/astroid.egg-info/requires.txt 2017-06-03 14:04:13.000000000 +0000 @@ -1,3 +1,10 @@ -six lazy_object_proxy +six wrapt + +[:python_version<"3.3"] +backports.functools_lru_cache + +[:python_version<"3.4"] +enum34>=1.1.3 +singledispatch diff -Nru astroid-1.4.9/astroid.egg-info/SOURCES.txt astroid-1.5.3/astroid.egg-info/SOURCES.txt --- astroid-1.4.9/astroid.egg-info/SOURCES.txt 2016-12-18 10:27:51.000000000 +0000 +++ astroid-1.5.3/astroid.egg-info/SOURCES.txt 2017-06-03 14:04:13.000000000 +0000 @@ -16,6 +16,7 @@ astroid/context.py astroid/decorators.py astroid/exceptions.py +astroid/helpers.py astroid/inference.py astroid/manager.py astroid/mixins.py @@ -36,25 +37,44 @@ astroid.egg-info/requires.txt astroid.egg-info/top_level.txt astroid/brain/brain_builtin_inference.py +astroid/brain/brain_collections.py astroid/brain/brain_dateutil.py +astroid/brain/brain_fstrings.py +astroid/brain/brain_functools.py astroid/brain/brain_gi.py +astroid/brain/brain_hashlib.py +astroid/brain/brain_io.py astroid/brain/brain_mechanize.py +astroid/brain/brain_multiprocessing.py +astroid/brain/brain_namedtuple_enum.py astroid/brain/brain_nose.py astroid/brain/brain_numpy.py +astroid/brain/brain_pkg_resources.py astroid/brain/brain_pytest.py astroid/brain/brain_qt.py +astroid/brain/brain_re.py astroid/brain/brain_six.py astroid/brain/brain_ssl.py -astroid/brain/brain_stdlib.py +astroid/brain/brain_subprocess.py +astroid/brain/brain_threading.py +astroid/brain/brain_typing.py +astroid/interpreter/__init__.py +astroid/interpreter/dunder_lookup.py +astroid/interpreter/objectmodel.py +astroid/interpreter/_import/__init__.py +astroid/interpreter/_import/spec.py +astroid/interpreter/_import/util.py astroid/tests/__init__.py astroid/tests/resources.py astroid/tests/unittest_brain.py astroid/tests/unittest_builder.py +astroid/tests/unittest_helpers.py astroid/tests/unittest_inference.py astroid/tests/unittest_lookup.py astroid/tests/unittest_manager.py astroid/tests/unittest_modutils.py astroid/tests/unittest_nodes.py +astroid/tests/unittest_object_model.py astroid/tests/unittest_objects.py astroid/tests/unittest_peephole.py astroid/tests/unittest_protocols.py @@ -69,10 +89,10 @@ astroid/tests/testdata/python2/data/__init__.py astroid/tests/testdata/python2/data/absimport.py astroid/tests/testdata/python2/data/all.py -astroid/tests/testdata/python2/data/clientmodule_test.py astroid/tests/testdata/python2/data/descriptor_crash.py astroid/tests/testdata/python2/data/email.py astroid/tests/testdata/python2/data/format.py +astroid/tests/testdata/python2/data/invalid_encoding.py astroid/tests/testdata/python2/data/joined_strings.py astroid/tests/testdata/python2/data/module.py astroid/tests/testdata/python2/data/module2.py @@ -80,7 +100,7 @@ astroid/tests/testdata/python2/data/nonregr.py astroid/tests/testdata/python2/data/notall.py astroid/tests/testdata/python2/data/recursion.py -astroid/tests/testdata/python2/data/suppliermodule_test.py +astroid/tests/testdata/python2/data/tmp__init__.py astroid/tests/testdata/python2/data/SSL1/Connection1.py astroid/tests/testdata/python2/data/SSL1/__init__.py astroid/tests/testdata/python2/data/absimp/__init__.py @@ -88,21 +108,38 @@ astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py astroid/tests/testdata/python2/data/appl/__init__.py astroid/tests/testdata/python2/data/appl/myConnection.py +astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py astroid/tests/testdata/python2/data/find_test/__init__.py astroid/tests/testdata/python2/data/find_test/module.py astroid/tests/testdata/python2/data/find_test/module2.py astroid/tests/testdata/python2/data/find_test/noendingnewline.py astroid/tests/testdata/python2/data/find_test/nonregr.py +astroid/tests/testdata/python2/data/foogle/fax/__init__.py +astroid/tests/testdata/python2/data/foogle/fax/a.py astroid/tests/testdata/python2/data/lmfp/__init__.py astroid/tests/testdata/python2/data/lmfp/foo.py astroid/tests/testdata/python2/data/module1abs/__init__.py astroid/tests/testdata/python2/data/module1abs/core.py +astroid/tests/testdata/python2/data/namespace_pep_420/module.py +astroid/tests/testdata/python2/data/notamodule/file.py astroid/tests/testdata/python2/data/package/__init__.py astroid/tests/testdata/python2/data/package/absimport.py astroid/tests/testdata/python2/data/package/hello.py astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py astroid/tests/testdata/python2/data/package/subpackage/__init__.py astroid/tests/testdata/python2/data/package/subpackage/module.py +astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py +astroid/tests/testdata/python2/data/path_pkg_resources_1/package/foo.py +astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py +astroid/tests/testdata/python2/data/path_pkg_resources_2/package/bar.py +astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py +astroid/tests/testdata/python2/data/path_pkg_resources_3/package/baz.py +astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py +astroid/tests/testdata/python2/data/path_pkgutil_1/package/foo.py +astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py +astroid/tests/testdata/python2/data/path_pkgutil_2/package/bar.py +astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py +astroid/tests/testdata/python2/data/path_pkgutil_3/package/baz.py astroid/tests/testdata/python2/data/unicode_package/__init__.py astroid/tests/testdata/python2/data/unicode_package/core/__init__.py astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg @@ -110,10 +147,10 @@ astroid/tests/testdata/python3/data/__init__.py astroid/tests/testdata/python3/data/absimport.py astroid/tests/testdata/python3/data/all.py -astroid/tests/testdata/python3/data/clientmodule_test.py astroid/tests/testdata/python3/data/descriptor_crash.py astroid/tests/testdata/python3/data/email.py astroid/tests/testdata/python3/data/format.py +astroid/tests/testdata/python3/data/invalid_encoding.py astroid/tests/testdata/python3/data/joined_strings.py astroid/tests/testdata/python3/data/module.py astroid/tests/testdata/python3/data/module2.py @@ -121,7 +158,7 @@ astroid/tests/testdata/python3/data/nonregr.py astroid/tests/testdata/python3/data/notall.py astroid/tests/testdata/python3/data/recursion.py -astroid/tests/testdata/python3/data/suppliermodule_test.py +astroid/tests/testdata/python3/data/tmp__init__.py astroid/tests/testdata/python3/data/SSL1/Connection1.py astroid/tests/testdata/python3/data/SSL1/__init__.py astroid/tests/testdata/python3/data/absimp/__init__.py @@ -129,20 +166,37 @@ astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py astroid/tests/testdata/python3/data/appl/__init__.py astroid/tests/testdata/python3/data/appl/myConnection.py +astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py astroid/tests/testdata/python3/data/find_test/__init__.py astroid/tests/testdata/python3/data/find_test/module.py astroid/tests/testdata/python3/data/find_test/module2.py astroid/tests/testdata/python3/data/find_test/noendingnewline.py astroid/tests/testdata/python3/data/find_test/nonregr.py +astroid/tests/testdata/python3/data/foogle/fax/__init__.py +astroid/tests/testdata/python3/data/foogle/fax/a.py astroid/tests/testdata/python3/data/lmfp/__init__.py astroid/tests/testdata/python3/data/lmfp/foo.py astroid/tests/testdata/python3/data/module1abs/__init__.py astroid/tests/testdata/python3/data/module1abs/core.py +astroid/tests/testdata/python3/data/namespace_pep_420/module.py +astroid/tests/testdata/python3/data/notamodule/file.py astroid/tests/testdata/python3/data/package/__init__.py astroid/tests/testdata/python3/data/package/absimport.py astroid/tests/testdata/python3/data/package/hello.py astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py astroid/tests/testdata/python3/data/package/subpackage/__init__.py astroid/tests/testdata/python3/data/package/subpackage/module.py +astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py +astroid/tests/testdata/python3/data/path_pkg_resources_1/package/foo.py +astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py +astroid/tests/testdata/python3/data/path_pkg_resources_2/package/bar.py +astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py +astroid/tests/testdata/python3/data/path_pkg_resources_3/package/baz.py +astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py +astroid/tests/testdata/python3/data/path_pkgutil_1/package/foo.py +astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py +astroid/tests/testdata/python3/data/path_pkgutil_2/package/bar.py +astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py +astroid/tests/testdata/python3/data/path_pkgutil_3/package/baz.py astroid/tests/testdata/python3/data/unicode_package/__init__.py astroid/tests/testdata/python3/data/unicode_package/core/__init__.py \ No newline at end of file diff -Nru astroid-1.4.9/ChangeLog astroid-1.5.3/ChangeLog --- astroid-1.4.9/ChangeLog 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/ChangeLog 2017-06-03 13:47:48.000000000 +0000 @@ -1,25 +1,61 @@ Change log for the astroid package (used to be astng) ===================================================== -2016-12-18 -- 1.4.9 +2017-06-03 -- 1.5.3 - * Cast __path__ to a list in _module_file + * enum34 dependency is forced to be at least version 1.1.3. Fixes spurious + bug related to enum classes being falsy in boolean context, which caused + _Inconsistent Hierarchy_ `RuntimeError` in `singledispatch` module. - _path__ can be an '_frozen_importlib_external._NamespacePath' - object for a namespace module (declared by setuptools) with python 3.5+ - This should prevent a crash ocurring when working with namespace packages. + See links below for details: + - http://bugs.python.org/issue26748 + - https://bitbucket.org/ambv/singledispatch/issues/8/inconsistent-hierarchy-with-enum + - https://bitbucket.org/stoneleaf/enum34/commits/da50803651ab644e6fce66ebc85562f1117c344b - * Add support for pytest 3.0. + * Do not raise an exception when uninferable value is unpacked in ``with`` statement. -2016-07-27 -- 1.4.8 + * Lock objects from ``threading`` module are now correctly recognised + as context managers. + +2017-04-17 -- 1.5.2 + + * Basic support for the class form of typing.NamedTuple + + * mro() can be computed for classes with old style classes in the hierarchy + + +2017-04-13 -- 1.5.0 + + * Arguments node gained a new attribute, ``kwonlyargs_annotations`` + + This new attribute holds the annotations for the keyword-only + arguments. + + * `namedtuple` inference now understands `rename` keyword argument + + * Classes can now know their definition-time arguments. + + Classes can support keyword arguments, which are passed when + a class is constructed using ``__new__``. + + * Add support for inferring typing.NamedTuple. + + * ClassDef now supports __getitem__ inference through the metaclass. + + * getitem() method accepts nodes now, instead of Python objects. + + * Add support for explicit namespace packages, created with pkg_resources. + + * Add brain tips for _io.TextIOWrapper's buffer and raw attributes. * Add `returns` into the proper order in FunctionDef._astroid_fields The order is important, since it determines the last child, which in turn determines the last line number of a scoped node. + * Add brain tips for functools.lru_cache. -2016-07-07 -- 1.4.7 + * New function, astroid.extract_node, exported out from astroid.test_utils. * Stop saving assignment locals in ExceptHandlers, when the context is a store. @@ -28,16 +64,62 @@ could result in a local save for IndexError as KeyError, resulting in potential unexpected inferences. Since we don't lose a lot, this syntax gets prohibited. -2016-06-06 -- 1.4.6 - * Fix a crash which occurred when the class of a namedtuple could not be inferred. + * Add support for implicit namespace packages (PEP 420) + + This change involves a couple of modifications. First, we're relying on a + spec finder protocol, inspired by importlib's ModuleSpec, for finding where + a file or package is, using importlib's PathFinder as well, which enable + us to discover namespace packages as well. + This discovery is the center piece of the namespace package support, + the other part being the construction of a dummy Module node whenever + a namespace package root directory is requested during astroid's import + references. + + * Introduce a special attributes model + + Through this model, astroid starts knowing special attributes of certain Python objects, + such as functions, classes, super objects and so on. This was previously possible before, + but now the lookup and the attributes themselves are separated into a new module, + objectmodel.py, which describes, in a more comprehensive way, the data model of each + object. + + * Exceptions have their own object model + + Some of exceptions's attributes, such as .args and .message, + can't be inferred correctly since they are descriptors that get + transformed into the proper objects at runtime. This can cause issues + with the static analysis, since they are inferred as different than + what's expected. Now when we're creating instances of exceptions, + we're inferring a special object that knows how to transform those + runtime attributes into the proper objects via a custom object model. + Closes issue #81 + + * dict.values, dict.keys and dict.items are properly + inferred to their corresponding type, which also + includes the proper containers for Python 3. + + * Fix a crash which occurred when a method had a same name as a builtin object, + decorated at the same time by that builtin object ( a property for instance) + + * The inference can handle the case where the attribute is accessed through a subclass + of a base class and the attribute is defined at the base class's level, + by taking in consideration a redefinition in the subclass. + + This should fix https://github.com/PyCQA/pylint/issues/432 + + * Calling lambda methods (defined at class level) can be understood. + + * Don't take in consideration invalid assignments, especially when __slots__ + declaration forbids them. + + Close issue #332 + * Functional form of enums support accessing values through __call__. * Brain tips for the ssl library. -2016-03-21 -- 1.4.5 - * decoratornames() does not leak InferenceError anymore. * wildcard_imported_names() got replaced by _public_names() @@ -53,9 +135,6 @@ The method is private in 1.4.X. - -2016-01-15 -- 1.4.4 - * unpack_infer raises InferenceError if it can't operate with the given sequences of nodes. @@ -73,16 +152,14 @@ In this case it doesn't make sense to return two values, but only the last one. -2015-12-24 -- 1.4.3 - - * pkg_resources brain tips are a bit more specific, - by specifiying proper returns. - - * Standard library modules are properly detected by is_standard_module. + * Add support for inference on threading.Lock - This should fix issues such as https://github.com/PyCQA/pylint/issues/725. + As a matter of fact, astroid can infer on threading.RLock, + threading.Semaphore, but can't do it on threading.Lock (because it comes + from an extension module). -2015-12-21 -- 1.4.2 + * pkg_resources brain tips are a bit more specific, + by specifying proper returns. * The slots() method conflates all the slots from the ancestors into a list of current and parent slots. @@ -91,9 +168,30 @@ they get inherited, as long as each parent defines a __slots__ entry. + * Some nodes got a new attribute, 'ctx', which tells in which context + the said node was used. + + The possible values for the contexts are `Load` ('a'), `Del` + ('del a'), `Store` ('a = 4') and the nodes that got the new + attribute are Starred, Subscript, List and Tuple. Closes issue #267. + + * relative_to_absolute_name or methods calling it will now raise + TooManyLevelsError when a relative import was trying to + access something beyond the top-level package. + + * AstroidBuildingException is now AstroidBuildingError. The first + name will exist until astroid 2.0. + + * Add two new exceptions, AstroidImportError and AstroidSyntaxError. + They are subclasses of AstroidBuildingException and are raised when + a module can't be imported from various reasons. + Also do_import_module lets the errors to bubble up without converting + them to InferenceError. This particular conversion happens only + during the inference. + * Revert to using printf-style formatting in as_string, in order to avoid a potential problem with encodings when using .format. - Closes issue #273. + Closes issue #273. Patch by notsqrt. * assigned_stmts methods have the same signature from now on. @@ -101,10 +199,20 @@ assumptions about what could be passed to other implementations, leading to various possible crashes when one or more arguments weren't given. Closes issue #277. - -2015-11-29 -- 1.4.1 + * Fix metaclass detection, when multiple keyword arguments + are used in class definition. + * Add support for annotated variable assignments (PEP 526) + + * Starred expressions are now inferred correctly for tuple, + list, set, and dictionary literals. + + * Support for asynchronous comprehensions introduced in Python 3.6. + + Fixes #399. See PEP530 for details. + +2015-11-29 -- 1.4.1 * Add support for handling Uninferable nodes when calling as_string @@ -120,30 +228,45 @@ 2015-11-29 -- 1.4.0 * Class.getattr('__mro__') returns the actual MRO. Closes issue #128. + * The logilab-common dependency is not needed anymore as the needed code was integrated into astroid. - * Add 'assert_equals' method in nose.tools's brain plugin. - * Generated enum member stubs now support IntEnum and multiple base classes. + * astroid.builder.AstroidBuilder.string_build and + astroid.builder.AstroidBuilder.file_build are now raising + AstroidBuildingException when the parsing of the string raises + a SyntaxError. + * Add brain tips for multiprocessing.Manager and multiprocessing.managers.SyncManager. - * Add brain tips for multiprocessing post Python 3.4+, - where the module level functions are retrieved with getattr - from a context object, leading to many no-member errors - in Pylint. + * Add some fixes which enhances the Jython support. + The fix mostly includes updates to modutils, which is + modified in order to properly lookup paths from live objects, + which ends in $py.class, not pyc as for Python 2, + Closes issue #83. * The Generator objects inferred with `infer_call_result` from functions have as parent the function from which they are returned. + * Add brain tips for multiprocessing post Python 3.4+, + where the module level functions are retrieved with getattr + from a context object, leading to many no-member errors + in Pylint. + * Understand partially the 3-argument form of `type`. The only change is that astroid understands members passed in as dictionaries as the third argument. + * .slots() will return an empty list for classes with empty slots. + Previously it returned None, which is the same value for + classes without slots at all. This was changed in order + to better reflect what's actually happening. + * Improve the inference of Getattr nodes when dealing with abstract properties from the abc module. @@ -199,6 +322,83 @@ that all of them will have __getattr__ and __getattribute__ present and it is wrong to consider that those methods were actually implemented. + * Add basic support for understanding context managers. + + Currently, there's no way to understand whatever __enter__ returns in a + context manager and what it is binded using the ``as`` keyword. With these changes, + we can understand ``bar`` in ``with foo() as bar``, which will be the result of __enter__. + + * Add a new type of node, called *inference objects*. Inference objects are similar with + AST nodes, but they can be obtained only after inference, so they can't be found + inside the original AST tree. Their purpose is to handle at astroid level + some operations which can't be handled when using brain transforms. + For instance, the first object added is FrozenSet, which can be manipulated + at astroid's level (inferred, itered etc). Code such as this 'frozenset((1,2))' + will not return an Instance of frozenset, without having access to its + content, but a new objects.FrozenSet, which can be used just as a nodes.Set. + + * Add a new *inference object* called Super, which also adds support for understanding + super calls. astroid understands the zero-argument form of super, specific to + Python 3, where the interpreter fills itself the arguments of the call. Also, we + are understanding the 2-argument form of super, both for bounded lookups + (super(X, instance)) as well as for unbounded lookups (super(X, Y)), + having as well support for validating that the object-or-type is a subtype + of the first argument. The unbounded form of super (one argument) is not + understood, since it's useless in practice and should be removed from + Python's specification. Closes issue #89. + + * Add inference support for getattr builtin. Now getattr builtins are + properly understood. Closes issue #103. + + * Add inference support for hasattr builtin. Closes issue #102. + + * Add 'assert_equals' method in nose.tools's brain plugin. + + * Don't leak StopIteration when inferring invalid UnaryOps (+[], +None etc.). + + * Improve the inference of UnaryOperands. + + When inferring unary operands, astroid looks up the return value + of __pos__, __neg__ and __invert__ to determine the inferred value + of ``~node``, ``+node`` or ``-node``. + + * Improve the inference of six.moves, especially when using `from ... import ...` + syntax. Also, we added a new fail import hook for six.moves, which fixes the + import-error false positive from pylint. Closes issue #107. + + * Make the first steps towards detecting type errors for unary and binary + operations. + + In exceptions, one object was added for holding information about a possible + UnaryOp TypeError, object called `UnaryOperationError`. Even though the name + suggests it's an exception, it's actually not one. When inferring UnaryOps, + we use this special object to mark a possible TypeError, + object which can be interpreted by pylint in order to emit a new warning. + We are also exposing a new method for UnaryOps, called `type_errors`, + which returns a list of UnaryOperationsError. + + * A new method was added to the AST nodes, 'bool_value'. It is used to deduce + the value of a node when used in a boolean context, which is useful + for both inference, as well as for data flow analysis, where we are interested + in what branches will be followed when the program will be executed. + `bool_value` returns True, False or YES, if the node's boolean value can't + be deduced. The method is used when inferring the unary operand `not`. + Thus, `not something` will result in calling `something.bool_value` and + negating the result, if it is a boolean. + + * Add inference support for boolean operations (`and` and `not`). + + * Add inference support for the builtin `callable`. + + * astroid.inspector was moved to pylint.pyreverse, since + it is the only known client of this module. No other change + was made to the exported API. + + * astroid.utils.ASTWalker and astroid.utils.LocalsVisitor + were moved to pylint.pyreverse.utils. + + * Add inference support for the builtin `bool`. + * Add `igetattr` method to scoped_nodes.Function. * Add support for Python 3.5's MatMul operation: see PEP 465 for more @@ -208,38 +408,53 @@ builtins module. Previously trying to infer the Name(NotImplemented) returned an YES object. - * Add proper grammatical names for `infered` and `ass_type` methods, - namely `inferred` and `assign_type`. + * Add astroid.helpers, a module of various useful utilities which don't + belong yet into other components. Added *object_type*, a function + which can be used to obtain the type of almost any astroid object, + similar to how the builtin *type* works. - The old methods will raise PendingDeprecationWarning, being slated - for removal in astroid 2.0. + * Understand the one-argument form of the builtin *type*. - * Add new AST names in order to be similar to the ones - from the builtin ast module. + This uses the recently added *astroid.helpers.object_type* in order to + retrieve the Python type of the first argument of the call. - With this change, Getattr becomes Attributes, Backquote becomes - Repr, Class is ClassDef, Function is FunctionDef, Discard is Expr, - CallFunc is Call, From is ImportFrom, AssName is AssignName - and AssAttr is AssignAttr. The old names are maintained for backwards - compatibility and they are interchangeable, in the sense that using - Discard will use Expr under the hood and the implemented visit_discard - in checkers will be called with Expr nodes instead. The AST does not - contain the old nodes, only the interoperability between them hides this - fact. Recommandations to move to the new nodes are emitted accordingly, - the old names will be removed in astroid 2.0. + * Add helpers.is_supertype and helpers.is_subtype, two functions for + checking if an object is a super/sub type of another. + * Improve the inference of binary arithmetic operations (normal + and augmented). + + * Add support for retrieving TypeErrors for binary arithmetic operations. + + The change is similar to what was added for UnaryOps: a new method + called *type_errors* for both AugAssign and BinOp, which can be used + to retrieve type errors occurred during inference. Also, a new + exception object was added, BinaryOperationError. + + * Lambdas found at class level, which have a `self` argument, are considered + BoundMethods when accessing them from instances of their class. + + * Add support for multiplication of tuples and lists with instances + which provides an __index__ returning-int method. + + * Add support for indexing containers with instances which provides + an __index__ returning-int method. * Star unpacking in assignments returns properly a list, not the individual components. Closes issue #138. - * Lambdas found at class level, which have a `self` argument, are considered + * Add annotation support for function.as_string(). Closes issue #37. * Add support for indexing bytes on Python 3. - BoundMethods when accessing them from instances of their class. * Add support for inferring subscript on instances, which will use __getitem__. Closes issue #124. + * Add support for pkg_resources.declare_namespaces. + + * Move pyreverse specific modules and functionality back into pyreverse + (astroid.manager.Project, astroid.manager.Manager.project_from_files). + * Understand metaclasses added with six.add_metaclass decorator. Closes issue #129. * Add a new convenience API, `astroid.parse`, which can be used to retrieve @@ -252,7 +467,14 @@ in a non-package will finally result in an import-error on Pylint's side. Until now relative_only was ignored, leading to the import of `something`, if it was globally available. - + + * Add get_wrapping_class API to scoped_nodes, which can be used to + retrieve the class that wraps a node. + + * Class.getattr looks by default in the implicit and the explicit metaclasses, + which is `type` on Python 3. + + Closes issue #114. * There's a new separate step for transforms. @@ -278,7 +500,48 @@ AstroidBuilder._data_build. Closes issue #116. + + * Class._explicit_metaclass is now a public API, in the form of + Class.declared_metaclass. + + Class.mro remains the de facto method for retrieving the metaclass + of a class, which will also do an evaluation of what declared_metaclass + returns. + + * Understand slices of tuples, lists, strings and instances with support + for slices. + + Closes issue #137. + + * Add proper grammatical names for `infered` and `ass_type` methods, + namely `inferred` and `assign_type`. + + The old methods will raise PendingDeprecationWarning, being slated + for removal in astroid 2.0. + * Add new AST names in order to be similar to the ones + from the builtin ast module. + + With this change, Getattr becomes Attributes, Backquote becomes + Repr, Class is ClassDef, Function is FunctionDef, Discard is Expr, + CallFunc is Call, From is ImportFrom, AssName is AssignName + and AssAttr is AssignAttr. The old names are maintained for backwards + compatibility and they are interchangeable, in the sense that using + Discard will use Expr under the hood and the implemented visit_discard + in checkers will be called with Expr nodes instead. The AST does not + contain the old nodes, only the interoperability between them hides this + fact. Recommendations to move to the new nodes are emitted accordingly, + the old names will be removed in astroid 2.0. + + * Add support for understanding class creation using `type.__new__(mcs, name, bases, attrs)`` + + Until now, inferring this kind of calls resulted in Instances, not in classes, + since astroid didn't understand that the presence of the metaclass in the call + leads to a class creating, not to an instance creation. + + * Understand the `slice` builtin. Closes issue #184. + + * Add brain tips for numpy.core, which should fix Pylint's #453. * Add a new node, DictUnpack, which is used to represent the unpacking of a dictionary into another dictionary, using PEP 448 specific syntax @@ -288,40 +551,9 @@ since it just uses None to represent this kind of operation, which seems conceptually wrong, due to the fact the AST contains non-AST nodes. Closes issue #206. - - * Add a new type of node, called *inference objects*. Inference objects are similar with - AST nodes, but they can be obtained only after inference, so they can't be found - inside the AST tree. Their purpose is to handle at astroid level - some operations which can't be handled when using brain transforms. - For instance, the first object added is FrozenSet, which can be manipulated - at astroid's level (inferred, itered etc). Code such as this 'frozenset((1,2))' - will not return an Instance of frozenset, without having access to its - content, but a new objects.FrozenSet, which can be used just as a nodes.Set. - - - * Add a new *inference object* called Super, which also adds support for understanding - super calls. astroid understands the zero-argument form of super, specific to - Python 3, where the interpreter fills itself the arguments of the call. Also, we - are understanding the 2-argument form of super, both for bounded lookups - (super(X, instance)) as well as for unbounded lookups (super(X, Y)), - having as well support for validating that the object-or-type is a subtype - of the first argument. The unbounded form of super (one argument) is not - understood, since it's useless in practice and should be removed from - Python's specification. Closes issue #89. - - * astroid.utils.ASTWalker and astroid.utils.LocalsVisitor - were moved to pylint.pyreverse.utils. -2015-08-02 -- 1.3.8 - * Backport of 40e3176, which fixes issue #84. -2015-07-27 -- 1.3.7 - - * Improve the inference of six.moves, especially when using `from ... import ...` - syntax. Also, we added a new fail import hook for six.moves, which fixes the - import-error false positive from pylint. Closes issue #107. - 2015-03-14 -- 1.3.6 * Class.slots raises NotImplementedError for old style classes. @@ -418,7 +650,7 @@ 2014-11-20 -- 1.3.0 - * Fix a maximum recursion error occured during the inference, + * Fix a maximum recursion error occurred during the inference, where statements with the same name weren't filtered properly. Closes pylint issue #295. @@ -519,7 +751,7 @@ most things there are for pylint/astroid only and we want to be able to fix them without requiring a new logilab.common release - * Fix names grabed using wildcard import in "absolute import mode" + * Fix names grabbed using wildcard import in "absolute import mode" (ie with absolute_import activated from the __future__ or with python 3). Fix pylint issue #58. @@ -669,7 +901,7 @@ * #74746: should return empty module when __main__ is imported (patch by google) - * #74748: getitem protocal return constant value instead of a Const node + * #74748: getitem protocol return constant value instead of a Const node (patch by google) * #77188: support lgc.decorators.classproperty @@ -686,7 +918,7 @@ * #70497: Crash on AttributeError: 'NoneType' object has no attribute '_infer_name' - * #70381: IndendationError in import causes crash + * #70381: IndentationError in import causes crash * #70565: absolute imports treated as relative (patch by Jacek Konieczny) @@ -749,7 +981,7 @@ 2010-09-10 -- 0.20.2 * fix astng building bug: we've to set module.package flag at the node - creation time otherwise we'll miss this information when infering relative + creation time otherwise we'll miss this information when inferring relative import during the build process (this should fix for instance some problems with numpy) diff -Nru astroid-1.4.9/debian/changelog astroid-1.5.3/debian/changelog --- astroid-1.4.9/debian/changelog 2017-01-05 18:48:17.000000000 +0000 +++ astroid-1.5.3/debian/changelog 2017-08-04 13:22:14.000000000 +0000 @@ -1,3 +1,28 @@ +astroid (1.5.3-0ubuntu3) artful; urgency=medium + + * d/control + - Manually add python-backports.functools-lru-cache to Build-Depends and + Depends of the Python 2 package as they are missed by dh-python thanks + to bug #834133. + + -- Daniel Watkins Thu, 03 Aug 2017 12:31:08 -0400 + +astroid (1.5.3-0ubuntu2) artful; urgency=medium + + * Upstream has added a runtime dependency on setuptools. + + -- Daniel Watkins Thu, 03 Aug 2017 10:30:42 -0400 + +astroid (1.5.3-0ubuntu1) artful; urgency=medium + + * New upstream release. + * d/control + - Manually add build-depends and depends on python-singledispatch and + python-enum34 to the Python 2 package, as they are missed by dh-python + thanks to bug #834133. + + -- Michael Hudson-Doyle Wed, 12 Jul 2017 09:24:08 +1200 + astroid (1.4.9-1) unstable; urgency=medium * New upstream release diff -Nru astroid-1.4.9/debian/control astroid-1.5.3/debian/control --- astroid-1.4.9/debian/control 2017-01-05 18:48:17.000000000 +0000 +++ astroid-1.5.3/debian/control 2017-08-04 13:22:14.000000000 +0000 @@ -1,9 +1,10 @@ Source: astroid Section: python Priority: optional -Maintainer: Sandro Tosi +Maintainer: Ubuntu Developers +XSBC-Original-Maintainer: Sandro Tosi Uploaders: Debian Python Modules Team -Build-Depends: debhelper (>= 9.0.0), python, python3, dh-python, python-setuptools, python3-setuptools, python-nose, python3-nose, python-wrapt, python3-wrapt, python-lazy-object-proxy, python3-lazy-object-proxy +Build-Depends: debhelper (>= 9.0.0), python, python3, dh-python, python-setuptools, python3-setuptools, python-nose, python3-nose, python-wrapt, python3-wrapt, python-lazy-object-proxy, python3-lazy-object-proxy, python-singledispatch, python-enum34, python-backports.functools-lru-cache Standards-Version: 3.9.8 Homepage: http://www.astroid.org/ Vcs-Git: https://anonscm.debian.org/git/python-modules/packages/astroid.git @@ -11,7 +12,7 @@ Package: python-astroid Architecture: all -Depends: ${python:Depends}, ${misc:Depends}, python-six +Depends: ${python:Depends}, ${misc:Depends}, python-six, python-singledispatch, python-enum34, python-setuptools, python-backports.functools-lru-cache Description: rebuild a new abstract syntax tree from Python's AST The aim of this module is to provide a common base representation of Python source code for projects such as pychecker, pyreverse, @@ -39,7 +40,7 @@ Package: python3-astroid Architecture: all -Depends: ${python3:Depends}, ${misc:Depends}, python3-six +Depends: ${python3:Depends}, ${misc:Depends}, python3-six, python3-setuptools Description: rebuild a new abstract syntax tree from Python's AST (Python3) The aim of this module is to provide a common base representation of Python source code for projects such as pychecker, pyreverse, diff -Nru astroid-1.4.9/debian/.git-dpm astroid-1.5.3/debian/.git-dpm --- astroid-1.4.9/debian/.git-dpm 2017-01-05 18:48:17.000000000 +0000 +++ astroid-1.5.3/debian/.git-dpm 2017-07-11 21:22:45.000000000 +0000 @@ -1,11 +1,11 @@ # see git-dpm(1) from git-dpm package -809070c4cabba1876402ae620927394351313e8d -809070c4cabba1876402ae620927394351313e8d -809070c4cabba1876402ae620927394351313e8d -809070c4cabba1876402ae620927394351313e8d -astroid_1.4.9.orig.tar.gz -2529c9757b015632e7b4156bee6c5f72b601b4b6 -189315 +f9a64e44949e5c3fc4ce6944e2e33539a29deddc +f9a64e44949e5c3fc4ce6944e2e33539a29deddc +f9a64e44949e5c3fc4ce6944e2e33539a29deddc +f9a64e44949e5c3fc4ce6944e2e33539a29deddc +astroid_1.5.3.orig.tar.gz +e654225ab5bd2788e5e246b156910990bf33cde6 +240617 debianTag="debian/%e%v" patchedTag="patched/%e%v" upstreamTag="upstream/%e%u" diff -Nru astroid-1.4.9/PKG-INFO astroid-1.5.3/PKG-INFO --- astroid-1.4.9/PKG-INFO 2016-12-18 10:27:52.000000000 +0000 +++ astroid-1.5.3/PKG-INFO 2017-06-03 14:04:16.000000000 +0000 @@ -1,18 +1,30 @@ Metadata-Version: 1.1 Name: astroid -Version: 1.4.9 +Version: 1.5.3 Summary: A abstract syntax tree for Python with inference support. Home-page: https://github.com/PyCQA/astroid Author: Python Code Quality Authority Author-email: code-quality@python.org License: LGPL -Description: .. image:: https://drone.io/bitbucket.org/logilab/astroid/status.png - :alt: drone.io Build Status - :target: https://drone.io/bitbucket.org/logilab/astroid - - Astroid +Description: Astroid ======= + .. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master + :target: https://travis-ci.org/PyCQA/astroid + + .. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true + :alt: AppVeyor Build Status + :target: https://ci.appveyor.com/project/PCManticore/astroid + + .. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master + :target: https://coveralls.io/github/PyCQA/astroid?branch=master + + .. image:: https://readthedocs.org/projects/astroid/badge/?version=latest + :target: http://astroid.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + + + What's this? ------------ @@ -29,15 +41,6 @@ scopes. Furthermore, astroid builds partial trees by inspecting living objects. - Main modules are: - - * `bases`, `node_classses` and `scoped_nodes` contain the classes for the - different type of nodes of the tree. - - * the `manager` contains a high level object to get astroid trees from - source files and living objects. It maintains a cache of previously - constructed tree for quick access. - Installation ------------ diff -Nru astroid-1.4.9/README.rst astroid-1.5.3/README.rst --- astroid-1.4.9/README.rst 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/README.rst 2017-03-11 13:04:27.000000000 +0000 @@ -1,10 +1,22 @@ -.. image:: https://drone.io/bitbucket.org/logilab/astroid/status.png - :alt: drone.io Build Status - :target: https://drone.io/bitbucket.org/logilab/astroid - Astroid ======= +.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master + :target: https://travis-ci.org/PyCQA/astroid + +.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true + :alt: AppVeyor Build Status + :target: https://ci.appveyor.com/project/PCManticore/astroid + +.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master + :target: https://coveralls.io/github/PyCQA/astroid?branch=master + +.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest + :target: http://astroid.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + + + What's this? ------------ @@ -21,15 +33,6 @@ scopes. Furthermore, astroid builds partial trees by inspecting living objects. -Main modules are: - -* `bases`, `node_classses` and `scoped_nodes` contain the classes for the - different type of nodes of the tree. - -* the `manager` contains a high level object to get astroid trees from - source files and living objects. It maintains a cache of previously - constructed tree for quick access. - Installation ------------ diff -Nru astroid-1.4.9/setup.cfg astroid-1.5.3/setup.cfg --- astroid-1.4.9/setup.cfg 2016-12-18 10:27:52.000000000 +0000 +++ astroid-1.5.3/setup.cfg 2017-06-03 14:04:16.000000000 +0000 @@ -4,5 +4,4 @@ [egg_info] tag_build = tag_date = 0 -tag_svn_revision = 0 diff -Nru astroid-1.4.9/setup.py astroid-1.5.3/setup.py --- astroid-1.4.9/setup.py 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/setup.py 2017-03-11 13:04:28.000000000 +0000 @@ -1,22 +1,12 @@ #!/usr/bin/env python +# Copyright (c) 2006, 2009-2013 LOGILAB S.A. (Paris, FRANCE) +# Copyright (c) 2014 Google, Inc. +# Copyright (c) 2014-2016 Claudiu Popa + +# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html +# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER + # pylint: disable=W0404,W0622,W0704,W0613 -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of astroid. -# -# astroid is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) any -# later version. -# -# astroid is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with astroid. If not, see . """Setup script for astroid.""" import os from setuptools import setup, find_packages @@ -63,6 +53,7 @@ url = web, include_package_data = True, install_requires = install_requires, + extras_require=extras_require, packages = find_packages(), cmdclass={'install_lib': AstroidInstallLib, 'easy_install': AstroidEasyInstallLib} diff -Nru astroid-1.4.9/tox.ini astroid-1.5.3/tox.ini --- astroid-1.4.9/tox.ini 2016-12-18 10:16:32.000000000 +0000 +++ astroid-1.5.3/tox.ini 2017-06-01 22:07:25.000000000 +0000 @@ -1,20 +1,56 @@ -[tox] -envlist = py27, py33, py34, py35, pypy, jython, pylint -skip_missing_interpreters = true - -[testenv:pylint] -commands = pylint -rn --rcfile={toxinidir}/pylintrc {envsitepackagesdir}/astroid - -[testenv] -deps = - py27,py33,pypy,jython: enum34 - lazy-object-proxy - nose - py27,py33,py34,py35: numpy - pytest - python-dateutil - py27,py33,pypy,jython: singledispatch - six - wrapt - pylint: git+https://github.com/pycqa/pylint@1.5.0 -commands = python -m unittest discover -s {envsitepackagesdir}/astroid/tests -p "unittest*.py" +[tox] +envlist = py27, py33, py34, py35, py36, pypy, pylint +skip_missing_interpreters = true + +[testenv:pylint] +commands = pylint -rn --rcfile={toxinidir}/pylintrc {envsitepackagesdir}/astroid + +[testenv] +deps = + py27,pypy: backports.functools_lru_cache + py27,py33,pypy: enum34 + lazy-object-proxy + nose + py27,py34,py35,py36: numpy + pytest + python-dateutil + py27,py33,pypy: singledispatch + six + wrapt + pylint: git+https://github.com/pycqa/pylint@master + coverage + +setenv = + COVERAGE_FILE = {toxinidir}/.coverage.{envname} + +commands = + python -Wi {envsitepackagesdir}/coverage run -m unittest {posargs: discover -s {envsitepackagesdir}/astroid/tests -p "unittest*.py"} + ; Transform absolute path to relative path + ; for compatibility with coveralls.io and fix 'source not available' error. + ; If you can find a cleaner way is welcome + python -c "import os;cov_strip_abspath = open(os.environ['COVERAGE_FILE'], 'r').read().replace('.tox' + os.sep + os.path.relpath('{envsitepackagesdir}', '{toxworkdir}') + os.sep, '');open(os.environ['COVERAGE_FILE'], 'w').write(cov_strip_abspath)" + +[testenv:coveralls] +setenv = + COVERAGE_FILE = {toxinidir}/.coverage +passenv = + * +deps = + coverage + coveralls +skip_install = true +commands = + python {envsitepackagesdir}/coverage combine + python {envsitepackagesdir}/coverage report --rcfile={toxinidir}/.coveragerc -m + - coveralls --rcfile={toxinidir}/.coveragerc +changedir = {toxinidir} + +[testenv:coverage-erase] +setenv = + COVERAGE_FILE = {toxinidir}/.coverage +deps = + coverage +skip_install = true +commands = + python {envsitepackagesdir}/coverage erase +changedir = {toxinidir}