_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q31100
infer_slice
train
def infer_slice(node, context=None): """Understand `slice` calls.""" args = node.args if not 0 < len(args) <= 3: raise UseInferenceDefault infer_func = partial(helpers.safe_infer, context=context) args = [infer_func(arg) for arg in args] for arg in args: if not arg or arg is util.Uninferable: raise UseInferenceDefault if not isinstance(arg, nodes.Const): raise UseInferenceDefault if not isinstance(arg.value, (type(None), int)): raise UseInferenceDefault if len(args) < 3: # Make sure we have 3 arguments. args.extend([None] * (3 - len(args))) slice_node = nodes.Slice( lineno=node.lineno, col_offset=node.col_offset, parent=node.parent ) slice_node.postinit(*args) return slice_node
python
{ "resource": "" }
q31101
infer_isinstance
train
def infer_isinstance(callnode, context=None): """Infer isinstance calls :param nodes.Call callnode: an isinstance call :param InferenceContext: context for call (currently unused but is a common interface for inference) :rtype nodes.Const: Boolean Const value of isinstance call :raises UseInferenceDefault: If the node cannot be inferred """ call = arguments.CallSite.from_call(callnode) if call.keyword_arguments: # isinstance doesn't support keyword arguments raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments") if len(call.positional_arguments) != 2: raise UseInferenceDefault( "Expected two arguments, got {count}".format( count=len(call.positional_arguments) ) ) # The left hand argument is the obj to be checked obj_node, class_or_tuple_node = call.positional_arguments # The right hand argument is the class(es) that the given # obj is to be check is an instance of try: class_container = _class_or_tuple_to_container( class_or_tuple_node, context=context ) except InferenceError: raise UseInferenceDefault try: isinstance_bool = helpers.object_isinstance(obj_node, class_container, context) except AstroidTypeError as exc: raise UseInferenceDefault("TypeError: " + str(exc)) except MroError as exc: raise UseInferenceDefault from exc if isinstance_bool is util.Uninferable: raise UseInferenceDefault return nodes.Const(isinstance_bool)
python
{ "resource": "" }
q31102
infer_len
train
def infer_len(node, context=None): """Infer length calls :param nodes.Call node: len call to infer :param context.InferenceContext: node context :rtype nodes.Const: a Const node with the inferred length, if possible """ call = arguments.CallSite.from_call(node) if call.keyword_arguments: raise UseInferenceDefault("TypeError: len() must take no keyword arguments") if len(call.positional_arguments) != 1: raise UseInferenceDefault( "TypeError: len() must take exactly one argument " "({len}) given".format(len=len(call.positional_arguments)) ) [argument_node] = call.positional_arguments try: return nodes.Const(helpers.object_len(argument_node, context=context)) except (AstroidTypeError, InferenceError) as exc: raise UseInferenceDefault(str(exc)) from exc
python
{ "resource": "" }
q31103
infer_dict_fromkeys
train
def infer_dict_fromkeys(node, context=None): """Infer dict.fromkeys :param nodes.Call node: dict.fromkeys() call to infer :param context.InferenceContext: node context :rtype nodes.Dict: a Dictionary containing the values that astroid was able to infer. In case the inference failed for any reason, an empty dictionary will be inferred instead. """ def _build_dict_with_elements(elements): new_node = nodes.Dict( col_offset=node.col_offset, lineno=node.lineno, parent=node.parent ) new_node.postinit(elements) return new_node call = arguments.CallSite.from_call(node) if call.keyword_arguments: raise UseInferenceDefault("TypeError: int() must take no keyword arguments") if len(call.positional_arguments) not in {1, 2}: raise UseInferenceDefault( "TypeError: Needs between 1 and 2 positional arguments" ) default = nodes.Const(None) values = call.positional_arguments[0] try: inferred_values = next(values.infer(context=context)) except InferenceError: return _build_dict_with_elements([]) if inferred_values is util.Uninferable: return _build_dict_with_elements([]) # Limit to a couple of potential values, as this can become pretty complicated accepted_iterable_elements = (nodes.Const,) if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)): elements = inferred_values.elts for element in elements: if not isinstance(element, accepted_iterable_elements): # Fallback to an empty dict return _build_dict_with_elements([]) elements_with_value = [(element, default) for element in elements] return _build_dict_with_elements(elements_with_value) elif isinstance(inferred_values, nodes.Const) and isinstance( inferred_values.value, (str, bytes) ): elements = [ (nodes.Const(element), default) for element in inferred_values.value ] return _build_dict_with_elements(elements) elif isinstance(inferred_values, nodes.Dict): keys = inferred_values.itered() for key in keys: if not isinstance(key, accepted_iterable_elements): # Fallback to an empty dict return _build_dict_with_elements([]) elements_with_value = [(element, default) for element in keys] return _build_dict_with_elements(elements_with_value) # Fallback to an empty dictionary return _build_dict_with_elements([])
python
{ "resource": "" }
q31104
AsStringVisitor._stmt_list
train
def _stmt_list(self, stmts, indent=True): """return a list of nodes to string""" stmts = "\n".join(nstr for nstr in [n.accept(self) for n in stmts] if nstr) if indent: return self.indent + stmts.replace("\n", "\n" + self.indent) return stmts
python
{ "resource": "" }
q31105
AsStringVisitor._precedence_parens
train
def _precedence_parens(self, node, child, is_left=True): """Wrap child in parens only if required to keep same semantics""" if self._should_wrap(node, child, is_left): return "(%s)" % child.accept(self) return child.accept(self)
python
{ "resource": "" }
q31106
AsStringVisitor.visit_assert
train
def visit_assert(self, node): """return an astroid.Assert node as string""" if node.fail: return "assert %s, %s" % (node.test.accept(self), node.fail.accept(self)) return "assert %s" % node.test.accept(self)
python
{ "resource": "" }
q31107
AsStringVisitor.visit_assign
train
def visit_assign(self, node): """return an astroid.Assign node as string""" lhs = " = ".join(n.accept(self) for n in node.targets) return "%s = %s" % (lhs, node.value.accept(self))
python
{ "resource": "" }
q31108
AsStringVisitor.visit_augassign
train
def visit_augassign(self, node): """return an astroid.AugAssign node as string""" return "%s %s %s" % (node.target.accept(self), node.op, node.value.accept(self))
python
{ "resource": "" }
q31109
AsStringVisitor.visit_annassign
train
def visit_annassign(self, node): """Return an astroid.AugAssign node as string""" target = node.target.accept(self) annotation = node.annotation.accept(self) if node.value is None: return "%s: %s" % (target, annotation) return "%s: %s = %s" % (target, annotation, node.value.accept(self))
python
{ "resource": "" }
q31110
AsStringVisitor.visit_binop
train
def visit_binop(self, node): """return an astroid.BinOp node as string""" left = self._precedence_parens(node, node.left) right = self._precedence_parens(node, node.right, is_left=False) if node.op == "**": return "%s%s%s" % (left, node.op, right) return "%s %s %s" % (left, node.op, right)
python
{ "resource": "" }
q31111
AsStringVisitor.visit_boolop
train
def visit_boolop(self, node): """return an astroid.BoolOp node as string""" values = ["%s" % self._precedence_parens(node, n) for n in node.values] return (" %s " % node.op).join(values)
python
{ "resource": "" }
q31112
AsStringVisitor.visit_call
train
def visit_call(self, node): """return an astroid.Call node as string""" expr_str = self._precedence_parens(node, node.func) args = [arg.accept(self) for arg in node.args] if node.keywords: keywords = [kwarg.accept(self) for kwarg in node.keywords] else: keywords = [] args.extend(keywords) return "%s(%s)" % (expr_str, ", ".join(args))
python
{ "resource": "" }
q31113
AsStringVisitor.visit_classdef
train
def visit_classdef(self, node): """return an astroid.ClassDef node as string""" decorate = node.decorators.accept(self) if node.decorators else "" bases = ", ".join(n.accept(self) for n in node.bases) metaclass = node.metaclass() if metaclass and not node.has_metaclass_hack(): if bases: bases = "(%s, metaclass=%s)" % (bases, metaclass.name) else: bases = "(metaclass=%s)" % metaclass.name else: bases = "(%s)" % bases if bases else "" docs = self._docs_dedent(node.doc) if node.doc else "" return "\n\n%sclass %s%s:%s\n%s\n" % ( decorate, node.name, bases, docs, self._stmt_list(node.body), )
python
{ "resource": "" }
q31114
AsStringVisitor.visit_compare
train
def visit_compare(self, node): """return an astroid.Compare node as string""" rhs_str = " ".join( [ "%s %s" % (op, self._precedence_parens(node, expr, is_left=False)) for op, expr in node.ops ] ) return "%s %s" % (self._precedence_parens(node, node.left), rhs_str)
python
{ "resource": "" }
q31115
AsStringVisitor.visit_delete
train
def visit_delete(self, node): # XXX check if correct """return an astroid.Delete node as string""" return "del %s" % ", ".join(child.accept(self) for child in node.targets)
python
{ "resource": "" }
q31116
AsStringVisitor.visit_decorators
train
def visit_decorators(self, node): """return an astroid.Decorators node as string""" return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes)
python
{ "resource": "" }
q31117
AsStringVisitor.visit_dictcomp
train
def visit_dictcomp(self, node): """return an astroid.DictComp node as string""" return "{%s: %s %s}" % ( node.key.accept(self), node.value.accept(self), " ".join(n.accept(self) for n in node.generators), )
python
{ "resource": "" }
q31118
AsStringVisitor.visit_exec
train
def visit_exec(self, node): """return an astroid.Exec node as string""" if node.locals: return "exec %s in %s, %s" % ( node.expr.accept(self), node.locals.accept(self), node.globals.accept(self), ) if node.globals: return "exec %s in %s" % (node.expr.accept(self), node.globals.accept(self)) return "exec %s" % node.expr.accept(self)
python
{ "resource": "" }
q31119
AsStringVisitor.visit_extslice
train
def visit_extslice(self, node): """return an astroid.ExtSlice node as string""" return ", ".join(dim.accept(self) for dim in node.dims)
python
{ "resource": "" }
q31120
AsStringVisitor.visit_for
train
def visit_for(self, node): """return an astroid.For node as string""" fors = "for %s in %s:\n%s" % ( node.target.accept(self), node.iter.accept(self), self._stmt_list(node.body), ) if node.orelse: fors = "%s\nelse:\n%s" % (fors, self._stmt_list(node.orelse)) return fors
python
{ "resource": "" }
q31121
AsStringVisitor.visit_importfrom
train
def visit_importfrom(self, node): """return an astroid.ImportFrom node as string""" return "from %s import %s" % ( "." * (node.level or 0) + node.modname, _import_string(node.names), )
python
{ "resource": "" }
q31122
AsStringVisitor.visit_functiondef
train
def visit_functiondef(self, node): """return an astroid.Function node as string""" decorate = node.decorators.accept(self) if node.decorators else "" docs = self._docs_dedent(node.doc) if node.doc else "" trailer = ":" if node.returns: return_annotation = "->" + node.returns.as_string() trailer = return_annotation + ":" def_format = "\n%sdef %s(%s)%s%s\n%s" return def_format % ( decorate, node.name, node.args.accept(self), trailer, docs, self._stmt_list(node.body), )
python
{ "resource": "" }
q31123
AsStringVisitor.visit_generatorexp
train
def visit_generatorexp(self, node): """return an astroid.GeneratorExp node as string""" return "(%s %s)" % ( node.elt.accept(self), " ".join(n.accept(self) for n in node.generators), )
python
{ "resource": "" }
q31124
AsStringVisitor.visit_attribute
train
def visit_attribute(self, node): """return an astroid.Getattr node as string""" return "%s.%s" % (self._precedence_parens(node, node.expr), node.attrname)
python
{ "resource": "" }
q31125
AsStringVisitor.visit_if
train
def visit_if(self, node): """return an astroid.If node as string""" ifs = ["if %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))] if node.has_elif_block(): ifs.append("el%s" % self._stmt_list(node.orelse, indent=False)) elif node.orelse: ifs.append("else:\n%s" % self._stmt_list(node.orelse)) return "\n".join(ifs)
python
{ "resource": "" }
q31126
AsStringVisitor.visit_ifexp
train
def visit_ifexp(self, node): """return an astroid.IfExp node as string""" return "%s if %s else %s" % ( self._precedence_parens(node, node.body, is_left=True), self._precedence_parens(node, node.test, is_left=True), self._precedence_parens(node, node.orelse, is_left=False), )
python
{ "resource": "" }
q31127
AsStringVisitor.visit_keyword
train
def visit_keyword(self, node): """return an astroid.Keyword node as string""" if node.arg is None: return "**%s" % node.value.accept(self) return "%s=%s" % (node.arg, node.value.accept(self))
python
{ "resource": "" }
q31128
AsStringVisitor.visit_lambda
train
def visit_lambda(self, node): """return an astroid.Lambda node as string""" args = node.args.accept(self) body = node.body.accept(self) if args: return "lambda %s: %s" % (args, body) return "lambda: %s" % body
python
{ "resource": "" }
q31129
AsStringVisitor.visit_list
train
def visit_list(self, node): """return an astroid.List node as string""" return "[%s]" % ", ".join(child.accept(self) for child in node.elts)
python
{ "resource": "" }
q31130
AsStringVisitor.visit_listcomp
train
def visit_listcomp(self, node): """return an astroid.ListComp node as string""" return "[%s %s]" % ( node.elt.accept(self), " ".join(n.accept(self) for n in node.generators), )
python
{ "resource": "" }
q31131
AsStringVisitor.visit_module
train
def visit_module(self, node): """return an astroid.Module node as string""" docs = '"""%s"""\n\n' % node.doc if node.doc else "" return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n"
python
{ "resource": "" }
q31132
AsStringVisitor.visit_print
train
def visit_print(self, node): """return an astroid.Print node as string""" nodes = ", ".join(n.accept(self) for n in node.values) if not node.nl: nodes = "%s," % nodes if node.dest: return "print >> %s, %s" % (node.dest.accept(self), nodes) return "print %s" % nodes
python
{ "resource": "" }
q31133
AsStringVisitor.visit_return
train
def visit_return(self, node): """return an astroid.Return node as string""" if node.is_tuple_return() and len(node.value.elts) > 1: elts = [child.accept(self) for child in node.value.elts] return "return %s" % ", ".join(elts) if node.value: return "return %s" % node.value.accept(self) return "return"
python
{ "resource": "" }
q31134
AsStringVisitor.visit_set
train
def visit_set(self, node): """return an astroid.Set node as string""" return "{%s}" % ", ".join(child.accept(self) for child in node.elts)
python
{ "resource": "" }
q31135
AsStringVisitor.visit_setcomp
train
def visit_setcomp(self, node): """return an astroid.SetComp node as string""" return "{%s %s}" % ( node.elt.accept(self), " ".join(n.accept(self) for n in node.generators), )
python
{ "resource": "" }
q31136
AsStringVisitor.visit_slice
train
def visit_slice(self, node): """return an astroid.Slice node as string""" lower = node.lower.accept(self) if node.lower else "" upper = node.upper.accept(self) if node.upper else "" step = node.step.accept(self) if node.step else "" if step: return "%s:%s:%s" % (lower, upper, step) return "%s:%s" % (lower, upper)
python
{ "resource": "" }
q31137
AsStringVisitor.visit_subscript
train
def visit_subscript(self, node): """return an astroid.Subscript node as string""" idx = node.slice if idx.__class__.__name__.lower() == "index": idx = idx.value idxstr = idx.accept(self) if idx.__class__.__name__.lower() == "tuple" and idx.elts: # Remove parenthesis in tuple and extended slice. # a[(::1, 1:)] is not valid syntax. idxstr = idxstr[1:-1] return "%s[%s]" % (self._precedence_parens(node, node.value), idxstr)
python
{ "resource": "" }
q31138
AsStringVisitor.visit_tryexcept
train
def visit_tryexcept(self, node): """return an astroid.TryExcept node as string""" trys = ["try:\n%s" % self._stmt_list(node.body)] for handler in node.handlers: trys.append(handler.accept(self)) if node.orelse: trys.append("else:\n%s" % self._stmt_list(node.orelse)) return "\n".join(trys)
python
{ "resource": "" }
q31139
AsStringVisitor.visit_tryfinally
train
def visit_tryfinally(self, node): """return an astroid.TryFinally node as string""" return "try:\n%s\nfinally:\n%s" % ( self._stmt_list(node.body), self._stmt_list(node.finalbody), )
python
{ "resource": "" }
q31140
AsStringVisitor.visit_tuple
train
def visit_tuple(self, node): """return an astroid.Tuple node as string""" if len(node.elts) == 1: return "(%s, )" % node.elts[0].accept(self) return "(%s)" % ", ".join(child.accept(self) for child in node.elts)
python
{ "resource": "" }
q31141
AsStringVisitor.visit_unaryop
train
def visit_unaryop(self, node): """return an astroid.UnaryOp node as string""" if node.op == "not": operator = "not " else: operator = node.op return "%s%s" % (operator, self._precedence_parens(node, node.operand))
python
{ "resource": "" }
q31142
AsStringVisitor.visit_while
train
def visit_while(self, node): """return an astroid.While node as string""" whiles = "while %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body)) if node.orelse: whiles = "%s\nelse:\n%s" % (whiles, self._stmt_list(node.orelse)) return whiles
python
{ "resource": "" }
q31143
AsStringVisitor.visit_with
train
def visit_with(self, node): # 'with' without 'as' is possible """return an astroid.With node as string""" items = ", ".join( ("%s" % expr.accept(self)) + (vars and " as %s" % (vars.accept(self)) or "") for expr, vars in node.items ) return "with %s:\n%s" % (items, self._stmt_list(node.body))
python
{ "resource": "" }
q31144
AsStringVisitor3.visit_yieldfrom
train
def visit_yieldfrom(self, node): """ Return an astroid.YieldFrom node as string. """ yi_val = (" " + node.value.accept(self)) if node.value else "" expr = "yield from" + yi_val if node.parent.is_statement: return expr return "(%s)" % (expr,)
python
{ "resource": "" }
q31145
bind_context_to_node
train
def bind_context_to_node(context, node): """Give a context a boundnode to retrieve the correct function name or attribute value with from further inference. Do not use an existing context since the boundnode could then be incorrectly propagated higher up in the call stack. :param context: Context to use :type context: Optional(context) :param node: Node to do name lookups from :type node NodeNG: :returns: A new context :rtype: InferenceContext """ context = copy_context(context) context.boundnode = node return context
python
{ "resource": "" }
q31146
InferenceContext.push
train
def push(self, node): """Push node into inference path :return: True if node is already in context path else False :rtype: bool Allows one to see if the given node has already been looked at for this inference context""" name = self.lookupname if (node, name) in self.path: return True self.path.add((node, name)) return False
python
{ "resource": "" }
q31147
InferenceContext.clone
train
def clone(self): """Clone inference path For example, each side of a binary operation (BinOp) starts with the same context but diverge as each side is inferred so the InferenceContext will need be cloned""" # XXX copy lookupname/callcontext ? clone = InferenceContext(self.path, inferred=self.inferred) clone.callcontext = self.callcontext clone.boundnode = self.boundnode clone.extra_context = self.extra_context return clone
python
{ "resource": "" }
q31148
InferenceContext.cache_generator
train
def cache_generator(self, key, generator): """Cache result of generator into dictionary Used to cache inference results""" results = [] for result in generator: results.append(result) yield result self.inferred[key] = tuple(results)
python
{ "resource": "" }
q31149
NestedViewSetMixin.get_queryset
train
def get_queryset(self): """ Filter the `QuerySet` based on its parents as defined in the `serializer_class.parent_lookup_kwargs`. """ queryset = super(NestedViewSetMixin, self).get_queryset() if hasattr(self.serializer_class, 'parent_lookup_kwargs'): orm_filters = {} for query_param, field_name in self.serializer_class.parent_lookup_kwargs.items(): orm_filters[field_name] = self.kwargs[query_param] return queryset.filter(**orm_filters) return queryset
python
{ "resource": "" }
q31150
ReteMatcher.changes
train
def changes(self, adding=None, deleting=None): """Pass the given changes to the root_node.""" if deleting is not None: for deleted in deleting: self.root_node.remove(deleted) if adding is not None: for added in adding: self.root_node.add(added) added = list() removed = list() for csn in self._get_conflict_set_nodes(): c_added, c_removed = csn.get_activations() added.extend(c_added) removed.extend(c_removed) return (added, removed)
python
{ "resource": "" }
q31151
ReteMatcher.build_alpha_part
train
def build_alpha_part(ruleset, root_node): """ Given a set of already adapted rules, build the alpha part of the RETE network starting at `root_node`. """ # Adds a dummy rule with InitialFact as LHS for always generate # the alpha part matching InitialFact(). This is needed for the # CE using InitialFact ruleset = ruleset.copy() ruleset.add(Rule(InitialFact())) # Generate a dictionary with rules and the set of facts of the # rule. rule_facts = {rule: extract_facts(rule) for rule in ruleset} # For each fact build a list of checker function capable of # check for each part in the fact. fact_checks = {fact: set(generate_checks(fact)) for fact in chain.from_iterable(rule_facts.values())} # Make a ranking of the most used checks check_rank = Counter(chain.from_iterable(fact_checks.values())) def weighted_check_sort(check): """Sort check by its type and number of times seen.""" if isinstance(check, TypeCheck): return (float('inf'), hash(check)) elif isinstance(check, FactCapture): return (float('-inf'), hash(check)) elif isinstance(check, FeatureCheck): return (check_rank[check], hash(check)) else: raise TypeError("Unknown check type.") # pragma: no cover def weighted_rule_sort(rule): """Sort rules by the average weight of its checks.""" total = 0 for fact in rule_facts[rule]: for check in fact_checks[fact]: total += check_rank[check] return total / len(rule_facts[rule]) sorted_rules = sorted(ruleset, key=weighted_rule_sort, reverse=True) fact_terminal_nodes = dict() # For rule in rank order and for each rule fact also in rank # order, build the alpha brank looking for an existing node # first. for rule in sorted_rules: for fact in rule_facts[rule]: current_node = root_node fact_sorted_checks = sorted( fact_checks[fact], key=weighted_check_sort, reverse=True) for check in fact_sorted_checks: # Look for a child node with the given check in the # current parent node. for child in current_node.children: if child.node.matcher is check: current_node = child.node break else: # Create a new node and append as child new_node = FeatureTesterNode(check) current_node.add_child(new_node, new_node.activate) current_node = new_node fact_terminal_nodes[fact] = current_node # Return this dictionary containing the last alpha node for each # fact. return fact_terminal_nodes
python
{ "resource": "" }
q31152
ReteMatcher.build_beta_part
train
def build_beta_part(ruleset, alpha_terminals): """ Given a set of already adapted rules, and a dictionary of patterns and alpha_nodes, wire up the beta part of the RETE network. """ for rule in ruleset: if isinstance(rule[0], OR): for subrule in rule[0]: wire_rule(rule, alpha_terminals, lhs=subrule) else: wire_rule(rule, alpha_terminals, lhs=rule)
python
{ "resource": "" }
q31153
ReteMatcher.print_network
train
def print_network(self): # pragma: no cover """ Generate a graphviz compatible graph. """ edges = set() def gen_edges(node): nonlocal edges name = str(id(node)) yield '{name} [label="{cls_name}"];'.format( name=name, cls_name=str(node)) for child in node.children: if (node, child.callback) not in edges: yield ('{parent} -> {child} ' '[label="{child_label}"];').format( parent=name, child=str(id(child.node)), child_label=child.callback.__name__) edges.add((node, child.callback)) yield from gen_edges(child.node) return "digraph {\n %s \n}" % ("\n".join( gen_edges(self.root_node)))
python
{ "resource": "" }
q31154
Node.reset
train
def reset(self): """Reset itself and recursively all its children.""" watchers.MATCHER.debug("Node <%s> reset", self) self._reset() for child in self.children: child.node.reset()
python
{ "resource": "" }
q31155
OneInputNode.activate
train
def activate(self, token): """Make a copy of the received token and call `self._activate`.""" if watchers.worth('MATCHER', 'DEBUG'): # pragma: no cover watchers.MATCHER.debug( "Node <%s> activated with token %r", self, token) return self._activate(token.copy())
python
{ "resource": "" }
q31156
TwoInputNode.activate_left
train
def activate_left(self, token): """Make a copy of the received token and call `_activate_left`.""" watchers.MATCHER.debug( "Node <%s> activated left with token %r", self, token) return self._activate_left(token.copy())
python
{ "resource": "" }
q31157
TwoInputNode.activate_right
train
def activate_right(self, token): """Make a copy of the received token and call `_activate_right`.""" watchers.MATCHER.debug( "Node <%s> activated right with token %r", self, token) return self._activate_right(token.copy())
python
{ "resource": "" }
q31158
KnowledgeEngine.duplicate
train
def duplicate(self, template_fact, **modifiers): """Create a new fact from an existing one.""" newfact = template_fact.copy() newfact.update(dict(self._get_real_modifiers(**modifiers))) return self.declare(newfact)
python
{ "resource": "" }
q31159
KnowledgeEngine.get_deffacts
train
def get_deffacts(self): """Return the existing deffacts sorted by the internal order""" return sorted(self._get_by_type(DefFacts), key=lambda d: d.order)
python
{ "resource": "" }
q31160
KnowledgeEngine.retract
train
def retract(self, idx_or_declared_fact): """ Retracts a specific fact, using its index .. note:: This updates the agenda """ self.facts.retract(idx_or_declared_fact) if not self.running: added, removed = self.get_activations() self.strategy.update_agenda(self.agenda, added, removed)
python
{ "resource": "" }
q31161
KnowledgeEngine.run
train
def run(self, steps=float('inf')): """ Execute agenda activations """ self.running = True activation = None execution = 0 while steps > 0 and self.running: added, removed = self.get_activations() self.strategy.update_agenda(self.agenda, added, removed) if watchers.worth('AGENDA', 'DEBUG'): # pragma: no cover for idx, act in enumerate(self.agenda.activations): watchers.AGENDA.debug( "%d: %r %r", idx, act.rule.__name__, ", ".join(str(f) for f in act.facts)) activation = self.agenda.get_next() if activation is None: break else: steps -= 1 execution += 1 watchers.RULES.info( "FIRE %s %s: %s", execution, activation.rule.__name__, ", ".join(str(f) for f in activation.facts)) activation.rule( self, **{k: v for k, v in activation.context.items() if not k.startswith('__')}) self.running = False
python
{ "resource": "" }
q31162
KnowledgeEngine.__declare
train
def __declare(self, *facts): """ Internal declaration method. Used for ``declare`` and ``deffacts`` """ if any(f.has_field_constraints() for f in facts): raise TypeError( "Declared facts cannot contain conditional elements") elif any(f.has_nested_accessor() for f in facts): raise KeyError( "Cannot declare facts containing double underscores as keys.") else: last_inserted = None for fact in facts: last_inserted = self.facts.declare(fact) if not self.running: added, removed = self.get_activations() self.strategy.update_agenda(self.agenda, added, removed) return last_inserted
python
{ "resource": "" }
q31163
KnowledgeEngine.declare
train
def declare(self, *facts): """ Declare from inside a fact, equivalent to ``assert`` in clips. .. note:: This updates the agenda. """ if not self.facts: watchers.ENGINE.warning("Declaring fact before reset()") return self.__declare(*facts)
python
{ "resource": "" }
q31164
REGEX
train
def REGEX(pattern, flags=0): """Regular expression matching.""" return P(lambda x: re.match(pattern, x, flags=flags))
python
{ "resource": "" }
q31165
ILIKE
train
def ILIKE(pattern): """Unix shell-style wildcards. Case-insensitive""" return P(lambda x: fnmatch.fnmatch(x.lower(), pattern.lower()))
python
{ "resource": "" }
q31166
worth
train
def worth(what, level_name): """Returns `True` if the watcher `what` would log under `level_name`.""" return (logging.NOTSET < globals()[what].level <= getattr(logging, level_name))
python
{ "resource": "" }
q31167
watch
train
def watch(*what, level=logging.DEBUG): """ Enable watchers. Defaults to enable all watchers, accepts a list names of watchers to enable. """ if not what: what = ALL for watcher_name in what: watcher = globals()[watcher_name] watcher.setLevel(level)
python
{ "resource": "" }
q31168
extract_facts
train
def extract_facts(rule): """Given a rule, return a set containing all rule LHS facts.""" def _extract_facts(ce): if isinstance(ce, Fact): yield ce elif isinstance(ce, TEST): pass else: for e in ce: yield from _extract_facts(e) return set(_extract_facts(rule))
python
{ "resource": "" }
q31169
generate_checks
train
def generate_checks(fact): """Given a fact, generate a list of Check objects for checking it.""" yield TypeCheck(type(fact)) fact_captured = False for key, value in fact.items(): if (isinstance(key, str) and key.startswith('__') and key.endswith('__')): # Special fact feature if key == '__bind__': yield FactCapture(value) fact_captured = True else: # pragma: no cover yield FeatureCheck(key, value) else: yield FeatureCheck(key, value) # Assign the matching fact to the context if not fact_captured: yield FactCapture("__pattern_%s__" % id(fact))
python
{ "resource": "" }
q31170
BusNode.add
train
def add(self, fact): """Create a VALID token and send it to all children.""" token = Token.valid(fact) MATCHER.debug("<BusNode> added %r", token) for child in self.children: child.callback(token)
python
{ "resource": "" }
q31171
BusNode.remove
train
def remove(self, fact): """Create an INVALID token and send it to all children.""" token = Token.invalid(fact) MATCHER.debug("<BusNode> added %r", token) for child in self.children: child.callback(token)
python
{ "resource": "" }
q31172
OrdinaryMatchNode.__activation
train
def __activation(self, token, branch_memory, matching_memory, is_left=True): """ Node activation internal function. This is a generalization of both activation functions. The given token is added or removed from `branch_memory` depending of its tag. For any other data in `matching_memory` the match function will be called and if a match occurs a new token will be produced and sent to all children. """ if token.is_valid(): branch_memory.append(token.to_info()) else: with suppress(ValueError): branch_memory.remove(token.to_info()) for other_data, other_context in matching_memory: other_context = dict(other_context) if is_left: left_context = token.context right_context = other_context else: left_context = other_context right_context = token.context match = self.matcher(left_context, right_context) if match: MATCH.info("%s (%s | %s) = True", self.__class__.__name__, left_context, right_context) newcontext = {k: v for k, v in token.context.items() if isinstance(k, str)} for k, v in other_context.items(): if not isinstance(k, tuple): # Negated value are not needed any further newcontext[k] = v newtoken = Token(token.tag, token.data | other_data, newcontext) for child in self.children: child.callback(newtoken) else: MATCH.debug("%s (%s | %s) = False", self.__class__.__name__, left_context, right_context)
python
{ "resource": "" }
q31173
OrdinaryMatchNode._activate_left
train
def _activate_left(self, token): """Node left activation.""" self.__activation(token, self.left_memory, self.right_memory, is_left=True)
python
{ "resource": "" }
q31174
OrdinaryMatchNode._activate_right
train
def _activate_right(self, token): """Node right activation.""" self.__activation(token, self.right_memory, self.left_memory, is_left=False)
python
{ "resource": "" }
q31175
ConflictSetNode._activate
train
def _activate(self, token): """Activate this node for the given token.""" info = token.to_info() activation = Activation( self.rule, frozenset(info.data), {k: v for k, v in info.context if isinstance(k, str)}) if token.is_valid(): if info not in self.memory: self.memory.add(info) if activation in self.removed: self.removed.remove(activation) else: self.added.add(activation) else: try: self.memory.remove(info) except ValueError: pass else: if activation in self.added: self.added.remove(activation) else: self.removed.add(activation)
python
{ "resource": "" }
q31176
ConflictSetNode.get_activations
train
def get_activations(self): """Return a list of activations.""" res = (self.added, self.removed) self.added = set() self.removed = set() return res
python
{ "resource": "" }
q31177
NotNode._activate_left
train
def _activate_left(self, token): """ Activate from the left. In case of a valid token this activations test the right memory with the given token and looks for the number of matches. The token and the number of occurences are stored in the left memory. If the number of matches is zero the token activates all children. """ if not self.right_memory: if token.is_valid(): self.left_memory[token.to_info()] = 0 else: del self.left_memory[token.to_info()] for child in self.children: child.callback(token) elif token.is_valid(): count = 0 for _, right_context in self.right_memory: if self.matcher(token.context, dict(right_context)): count += 1 if count == 0: for child in self.children: child.callback(token) self.left_memory[token.to_info()] = count else: count = 0 for _, right_context in self.right_memory: if self.matcher(token.context, dict(right_context)): count += 1 break if count == 0: for child in self.children: child.callback(token) del self.left_memory[token.to_info()]
python
{ "resource": "" }
q31178
NotNode._activate_right
train
def _activate_right(self, token): """ Activate from the right. Go over the left memory and find matching data, when found update the counter (substracting if the given token is invalid and adding otherwise). Depending on the result of this operation a new token is generated and passing to all children. """ if token.is_valid(): self.right_memory.append(token.to_info()) inc = 1 else: inc = -1 self.right_memory.remove(token.to_info()) for left in self.left_memory: if self.matcher(dict(left.context), token.context): self.left_memory[left] += inc newcount = self.left_memory[left] if (newcount == 0 and inc == -1) or \ (newcount == 1 and inc == 1): if inc == -1: newtoken = left.to_valid_token() else: newtoken = left.to_invalid_token() for child in self.children: child.callback(newtoken)
python
{ "resource": "" }
q31179
FactList.retract
train
def retract(self, idx_or_fact): """ Retract a previously asserted fact. See `"Retract that fact" in Clips User Guide <http://clipsrules.sourceforge.net/doc\ umentation/v624/ug.htm#_Toc412126077>`_. :param idx: The index of the fact to retract in the factlist :return: (int) The retracted fact's index :throws IndexError: If the fact's index providen does not exist """ if isinstance(idx_or_fact, int): idx = idx_or_fact else: idx = idx_or_fact.__factid__ if idx not in self: raise IndexError('Fact not found.') fact = self[idx] # Decrement value reference counter fact_id = self._get_fact_id(fact) self.reference_counter[fact_id] -= 1 if self.reference_counter[fact_id] == 0: self.reference_counter.pop(fact_id) watchers.FACTS.info(" <== %s: %r", fact, fact) self.removed.append(fact) del self[idx] return idx
python
{ "resource": "" }
q31180
FactList.changes
train
def changes(self): """ Return a tuple with the removed and added facts since last run. """ try: return self.added, self.removed finally: self.added = list() self.removed = list()
python
{ "resource": "" }
q31181
Rule.new_conditions
train
def new_conditions(self, *args): """ Generate a new rule with the same attributes but with the given conditions. """ obj = self.__class__(*args, salience=self.salience) if self._wrapped: obj = obj(self._wrapped) return obj
python
{ "resource": "" }
q31182
Fact.as_dict
train
def as_dict(self): """Return a dictionary containing this `Fact` data.""" return {k: unfreeze(v) for k, v in self.items() if not self.is_special(k)}
python
{ "resource": "" }
q31183
Fact.copy
train
def copy(self): """Return a copy of this `Fact`.""" content = [(k, v) for k, v in self.items()] intidx = [(k, v) for k, v in content if isinstance(k, int)] args = [v for k, v in sorted(intidx)] kwargs = {k: v for k, v in content if not isinstance(k, int) and not self.is_special(k)} return self.__class__(*args, **kwargs)
python
{ "resource": "" }
q31184
AnyChild.add_child
train
def add_child(self, node, callback): """Add node and callback to the children set.""" if node not in self.children: self.children.append(ChildNode(node, callback))
python
{ "resource": "" }
q31185
Token.valid
train
def valid(cls, data, context=None): """Shortcut to create a VALID Token.""" return cls(cls.TagType.VALID, data, context)
python
{ "resource": "" }
q31186
Token.invalid
train
def invalid(cls, data, context=None): """Shortcut to create an INVALID Token.""" return cls(cls.TagType.INVALID, data, context)
python
{ "resource": "" }
q31187
Token.copy
train
def copy(self): """ Make a new instance of this Token. This method makes a copy of the mutable part of the token before making the instance. """ return self.__class__(self.tag, self.data.copy(), self.context.copy())
python
{ "resource": "" }
q31188
parse
train
def parse(datetime_str, timezone=None, isofirst=True, dayfirst=True, yearfirst=True): """ Parses a datetime string and returns a `Delorean` object. :param datetime_str: The string to be interpreted into a `Delorean` object. :param timezone: Pass this parameter and the returned Delorean object will be normalized to this timezone. Any offsets passed as part of datetime_str will be ignored. :param isofirst: try to parse string as date in ISO format before everything else. :param dayfirst: Whether to interpret the first value in an ambiguous 3-integer date (ex. 01/05/09) as the day (True) or month (False). If yearfirst is set to True, this distinguishes between YDM and YMD. :param yearfirst: Whether to interpret the first value in an ambiguous 3-integer date (ex. 01/05/09) as the year. If True, the first number is taken to be the year, otherwise the last number is taken to be the year. .. testsetup:: from delorean import Delorean from delorean import parse .. doctest:: >>> parse('2015-01-01 00:01:02') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='UTC') If a fixed offset is provided in the datetime_str, it will be parsed and the returned `Delorean` object will store a `pytz.FixedOffest` as it's timezone. .. doctest:: >>> parse('2015-01-01 00:01:02 -0800') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone=pytz.FixedOffset(-480)) If the timezone argument is supplied, the returned Delorean object will be in the timezone supplied. Any offsets in the datetime_str will be ignored. .. doctest:: >>> parse('2015-01-01 00:01:02 -0500', timezone='US/Pacific') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='US/Pacific') If an unambiguous timezone is detected in the datetime string, a Delorean object with that datetime and timezone will be returned. .. doctest:: >>> parse('2015-01-01 00:01:02 PST') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='America/Los_Angeles') However if the provided timezone is ambiguous, parse will ignore the timezone and return a `Delorean` object in UTC time. >>> parse('2015-01-01 00:01:02 EST') Delorean(datetime=datetime.datetime(2015, 1, 1, 0, 1, 2), timezone='UTC') """ # parse string to datetime object dt = None if isofirst: try: dt = isocapture(datetime_str) except Exception: pass if dt is None: dt = capture(datetime_str, dayfirst=dayfirst, yearfirst=yearfirst) if timezone: dt = dt.replace(tzinfo=None) do = Delorean(datetime=dt, timezone=timezone) elif dt.tzinfo is None: # assuming datetime object passed in is UTC do = Delorean(datetime=dt, timezone='UTC') elif isinstance(dt.tzinfo, tzoffset): utcoffset = dt.tzinfo.utcoffset(None) total_seconds = ( (utcoffset.microseconds + (utcoffset.seconds + utcoffset.days * 24 * 3600) * 10**6) / 10**6) tz = pytz.FixedOffset(total_seconds / 60) dt = dt.replace(tzinfo=None) do = Delorean(dt, timezone=tz) elif isinstance(dt.tzinfo, tzlocal): tz = get_localzone() dt = dt.replace(tzinfo=None) do = Delorean(dt, timezone=tz) else: dt = pytz.utc.normalize(dt) # making dt naive so we can pass it to Delorean dt = dt.replace(tzinfo=None) # if parse string has tzinfo we return a normalized UTC # delorean object that represents the time. do = Delorean(datetime=dt, timezone='UTC') return do
python
{ "resource": "" }
q31189
range_daily
train
def range_daily(start=None, stop=None, timezone='UTC', count=None): """ This an alternative way to generating sets of Delorean objects with DAILY stops """ return stops(start=start, stop=stop, freq=DAILY, timezone=timezone, count=count)
python
{ "resource": "" }
q31190
range_hourly
train
def range_hourly(start=None, stop=None, timezone='UTC', count=None): """ This an alternative way to generating sets of Delorean objects with HOURLY stops """ return stops(start=start, stop=stop, freq=HOURLY, timezone=timezone, count=count)
python
{ "resource": "" }
q31191
range_monthly
train
def range_monthly(start=None, stop=None, timezone='UTC', count=None): """ This an alternative way to generating sets of Delorean objects with MONTHLY stops """ return stops(start=start, stop=stop, freq=MONTHLY, timezone=timezone, count=count)
python
{ "resource": "" }
q31192
range_yearly
train
def range_yearly(start=None, stop=None, timezone='UTC', count=None): """ This an alternative way to generating sets of Delorean objects with YEARLY stops """ return stops(start=start, stop=stop, freq=YEARLY, timezone=timezone, count=count)
python
{ "resource": "" }
q31193
stops
train
def stops(freq, interval=1, count=None, wkst=None, bysetpos=None, bymonth=None, bymonthday=None, byyearday=None, byeaster=None, byweekno=None, byweekday=None, byhour=None, byminute=None, bysecond=None, timezone='UTC', start=None, stop=None): """ This will create a list of delorean objects the apply to setting possed in. """ # check to see if datetimees passed in are naive if so process them # with given timezone. if all([(start is None or is_datetime_naive(start)), (stop is None or is_datetime_naive(stop))]): pass else: raise DeloreanInvalidDatetime('Provide a naive datetime object') # if no datetimes are passed in create a proper datetime object for # start default because default in dateutil is datetime.now() :( if start is None: start = datetime_timezone(timezone) for dt in rrule(freq, interval=interval, count=count, wkst=wkst, bysetpos=bysetpos, bymonth=bymonth, bymonthday=bymonthday, byyearday=byyearday, byeaster=byeaster, byweekno=byweekno, byweekday=byweekday, byhour=byhour, byminute=byminute, bysecond=bysecond, until=stop, dtstart=start): # make the delorean object # yield it. # doing this to make sure delorean receives a naive datetime. dt = dt.replace(tzinfo=None) d = Delorean(datetime=dt, timezone=timezone) yield d
python
{ "resource": "" }
q31194
_move_datetime
train
def _move_datetime(dt, direction, delta): """ Move datetime given delta by given direction """ if direction == 'next': dt = dt + delta elif direction == 'last': dt = dt - delta else: pass # raise some delorean error here return dt
python
{ "resource": "" }
q31195
move_datetime_month
train
def move_datetime_month(dt, direction, num_shifts): """ Move datetime 1 month in the chosen direction. unit is a no-op, to keep the API the same as the day case """ delta = relativedelta(months=+num_shifts) return _move_datetime(dt, direction, delta)
python
{ "resource": "" }
q31196
move_datetime_week
train
def move_datetime_week(dt, direction, num_shifts): """ Move datetime 1 week in the chosen direction. unit is a no-op, to keep the API the same as the day case """ delta = relativedelta(weeks=+num_shifts) return _move_datetime(dt, direction, delta)
python
{ "resource": "" }
q31197
move_datetime_year
train
def move_datetime_year(dt, direction, num_shifts): """ Move datetime 1 year in the chosen direction. unit is a no-op, to keep the API the same as the day case """ delta = relativedelta(years=+num_shifts) return _move_datetime(dt, direction, delta)
python
{ "resource": "" }
q31198
datetime_timezone
train
def datetime_timezone(tz): """ This method given a timezone returns a localized datetime object. """ utc_datetime_naive = datetime.utcnow() # return a localized datetime to UTC utc_localized_datetime = localize(utc_datetime_naive, 'UTC') # normalize the datetime to given timezone normalized_datetime = normalize(utc_localized_datetime, tz) return normalized_datetime
python
{ "resource": "" }
q31199
localize
train
def localize(dt, tz): """ Given a naive datetime object this method will return a localized datetime object """ if not isinstance(tz, tzinfo): tz = pytz.timezone(tz) return tz.localize(dt)
python
{ "resource": "" }