sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def matchesTripleExprRef(cntxt: Context, T: RDFGraph, expr: ShExJ.tripleExprLabel, _: DebugContext) -> bool:
"""
expr is an tripleExprRef and satisfies(value, tripleExprWithId(tripleExprRef), G, m).
The tripleExprWithId function is defined in Triple Expression Reference Requirement below.
"""
expr = cntxt.tripleExprFor(expr)
if expr is None:
cntxt.fail_reason = "{expr}: Reference not found"
return False
return matchesExpr(cntxt, T, expr)
|
expr is an tripleExprRef and satisfies(value, tripleExprWithId(tripleExprRef), G, m).
The tripleExprWithId function is defined in Triple Expression Reference Requirement below.
|
entailment
|
def get(self, key, get_cas=False):
"""
Get a key from server.
:param key: Key's name
:type key: six.string_types
:param get_cas: If true, return (value, cas), where cas is the new CAS value.
:type get_cas: boolean
:return: Returns a key data from server.
:rtype: object
"""
for server in self.servers:
value, cas = server.get(key)
if value is not None:
if get_cas:
return value, cas
else:
return value
if get_cas:
return None, None
|
Get a key from server.
:param key: Key's name
:type key: six.string_types
:param get_cas: If true, return (value, cas), where cas is the new CAS value.
:type get_cas: boolean
:return: Returns a key data from server.
:rtype: object
|
entailment
|
def gets(self, key):
"""
Get a key from server, returning the value and its CAS key.
This method is for API compatibility with other implementations.
:param key: Key's name
:type key: six.string_types
:return: Returns (key data, value), or (None, None) if the value is not in cache.
:rtype: object
"""
for server in self.servers:
value, cas = server.get(key)
if value is not None:
return value, cas
return None, None
|
Get a key from server, returning the value and its CAS key.
This method is for API compatibility with other implementations.
:param key: Key's name
:type key: six.string_types
:return: Returns (key data, value), or (None, None) if the value is not in cache.
:rtype: object
|
entailment
|
def get_multi(self, keys, get_cas=False):
"""
Get multiple keys from server.
:param keys: A list of keys to from server.
:type keys: list
:param get_cas: If get_cas is true, each value is (data, cas), with each result's CAS value.
:type get_cas: boolean
:return: A dict with all requested keys.
:rtype: dict
"""
d = {}
if keys:
for server in self.servers:
results = server.get_multi(keys)
if not get_cas:
# Remove CAS data
for key, (value, cas) in results.items():
results[key] = value
d.update(results)
keys = [_ for _ in keys if _ not in d]
if not keys:
break
return d
|
Get multiple keys from server.
:param keys: A list of keys to from server.
:type keys: list
:param get_cas: If get_cas is true, each value is (data, cas), with each result's CAS value.
:type get_cas: boolean
:return: A dict with all requested keys.
:rtype: dict
|
entailment
|
def set(self, key, value, time=0, compress_level=-1):
"""
Set a value for a key on server.
:param key: Key's name
:type key: str
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
"""
returns = []
for server in self.servers:
returns.append(server.set(key, value, time, compress_level=compress_level))
return any(returns)
|
Set a value for a key on server.
:param key: Key's name
:type key: str
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
|
entailment
|
def set_multi(self, mappings, time=0, compress_level=-1):
"""
Set multiple keys with it's values on server.
:param mappings: A dict with keys/values
:type mappings: dict
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
"""
returns = []
if mappings:
for server in self.servers:
returns.append(server.set_multi(mappings, time, compress_level=compress_level))
return all(returns)
|
Set multiple keys with it's values on server.
:param mappings: A dict with keys/values
:type mappings: dict
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
|
entailment
|
def delete(self, key, cas=0):
"""
Delete a key/value from server. If key does not exist, it returns True.
:param key: Key's name to be deleted
:param cas: CAS of the key
:return: True in case o success and False in case of failure.
"""
returns = []
for server in self.servers:
returns.append(server.delete(key, cas))
return any(returns)
|
Delete a key/value from server. If key does not exist, it returns True.
:param key: Key's name to be deleted
:param cas: CAS of the key
:return: True in case o success and False in case of failure.
|
entailment
|
def incr(self, key, value):
"""
Increment a key, if it exists, returns it's actual value, if it don't, return 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be incremented
:type value: int
:return: Actual value of the key on server
:rtype: int
"""
returns = []
for server in self.servers:
returns.append(server.incr(key, value))
return returns[0]
|
Increment a key, if it exists, returns it's actual value, if it don't, return 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be incremented
:type value: int
:return: Actual value of the key on server
:rtype: int
|
entailment
|
def decr(self, key, value):
"""
Decrement a key, if it exists, returns it's actual value, if it don't, return 0.
Minimum value of decrement return is 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be decremented
:type value: int
:return: Actual value of the key on server
:rtype: int
"""
returns = []
for server in self.servers:
returns.append(server.decr(key, value))
return returns[0]
|
Decrement a key, if it exists, returns it's actual value, if it don't, return 0.
Minimum value of decrement return is 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be decremented
:type value: int
:return: Actual value of the key on server
:rtype: int
|
entailment
|
def set_result(self, rval: bool) -> None:
""" Set the result of the evaluation. If the result is true, prune all of the children that didn't cut it
:param rval: Result of evaluation
"""
self.result = rval
if self.result:
self.nodes = [pn for pn in self.nodes if pn.result]
|
Set the result of the evaluation. If the result is true, prune all of the children that didn't cut it
:param rval: Result of evaluation
|
entailment
|
def reference_of(selector: shapeLabel, cntxt: Union[Context, ShExJ.Schema] ) -> Optional[ShExJ.shapeExpr]:
""" Return the shape expression in the schema referenced by selector, if any
:param cntxt: Context node or ShEx Schema
:param selector: identifier of element to select within the schema
:return:
"""
schema = cntxt.schema if isinstance(cntxt, Context) else cntxt
if selector is START:
return schema.start
for expr in schema.shapes:
if not isinstance(expr, ShExJ.ShapeExternal) and expr.id == selector:
return expr
return schema.start if schema.start is not None and schema.start.id == selector else None
|
Return the shape expression in the schema referenced by selector, if any
:param cntxt: Context node or ShEx Schema
:param selector: identifier of element to select within the schema
:return:
|
entailment
|
def triple_reference_of(label: ShExJ.tripleExprLabel, cntxt: Context) -> Optional[ShExJ.tripleExpr]:
""" Search for the label in a Schema """
te: Optional[ShExJ.tripleExpr] = None
if cntxt.schema.start is not None:
te = triple_in_shape(cntxt.schema.start, label, cntxt)
if te is None:
for shapeExpr in cntxt.schema.shapes:
te = triple_in_shape(shapeExpr, label, cntxt)
if te:
break
return te
|
Search for the label in a Schema
|
entailment
|
def triple_in_shape(expr: ShExJ.shapeExpr, label: ShExJ.tripleExprLabel, cntxt: Context) \
-> Optional[ShExJ.tripleExpr]:
""" Search for the label in a shape expression """
te = None
if isinstance(expr, (ShExJ.ShapeOr, ShExJ.ShapeAnd)):
for expr2 in expr.shapeExprs:
te = triple_in_shape(expr2, label, cntxt)
if te is not None:
break
elif isinstance(expr, ShExJ.ShapeNot):
te = triple_in_shape(expr.shapeExpr, label, cntxt)
elif isinstance(expr, ShExJ.shapeExprLabel):
se = reference_of(expr, cntxt)
if se is not None:
te = triple_in_shape(se, label, cntxt)
return te
|
Search for the label in a shape expression
|
entailment
|
def predicates_in_expression(expression: ShExJ.shapeExpr, cntxt: Context) -> List[IRIREF]:
""" Return the set of predicates that "appears in a TripleConstraint in an expression
See: `5.5.2 Semantics <http://shex.io/shex-semantics/#triple-expressions-semantics>`_ for details
:param expression: Expression to scan for predicates
:param cntxt: Context of evaluation
:return: List of predicates
"""
return list(directed_predicates_in_expression(expression, cntxt).keys())
|
Return the set of predicates that "appears in a TripleConstraint in an expression
See: `5.5.2 Semantics <http://shex.io/shex-semantics/#triple-expressions-semantics>`_ for details
:param expression: Expression to scan for predicates
:param cntxt: Context of evaluation
:return: List of predicates
|
entailment
|
def directed_predicates_in_expression(expression: ShExJ.shapeExpr, cntxt: Context) -> Dict[IRIREF, PredDirection]:
""" Directed predicates in expression -- return all predicates in shapeExpr along with which direction(s) they
evaluate
:param expression: Expression to scan
:param cntxt:
:return:
"""
dir_predicates: Dict[IRIREF, PredDirection] = {}
def predicate_finder(predicates: Dict[IRIREF, PredDirection], tc: ShExJ.TripleConstraint, _: Context) -> None:
if isinstance(tc, ShExJ.TripleConstraint):
predicates.setdefault(tc.predicate, PredDirection()).dir(tc.inverse is None or not tc.inverse)
def triple_expr_finder(predicates: Dict[IRIREF, PredDirection], expr: ShExJ.shapeExpr, cntxt_: Context) -> None:
if isinstance(expr, ShExJ.Shape) and expr.expression is not None:
cntxt_.visit_triple_expressions(expr.expression, predicate_finder, predicates)
# TODO: follow_inner_shapes as True probably goes too far, but we definitely need to cross shape/triplecons
cntxt.visit_shapes(expression, triple_expr_finder, dir_predicates, follow_inner_shapes=False)
return dir_predicates
|
Directed predicates in expression -- return all predicates in shapeExpr along with which direction(s) they
evaluate
:param expression: Expression to scan
:param cntxt:
:return:
|
entailment
|
def algorithm_u(ns, m):
"""
taken from `Stack Overflow <https://codereview.stackexchange.com/questions/1526/finding-all-k-subset-partitions>`_
"""
def visit(nv, av):
ps = [[] for _ in range(m)]
for jv in range(nv):
ps[av[jv + 1]].append(ns[jv])
return ps
def f(mu, nu, sigma, n, a):
if mu == 2:
yield visit(n, a)
else:
for v in f(mu - 1, nu - 1, (mu + sigma) % 2, n, a):
yield v
if nu == mu + 1:
a[mu] = mu - 1
yield visit(n, a)
while a[nu] > 0:
a[nu] = a[nu] - 1
yield visit(n, a)
elif nu > mu + 1:
if (mu + sigma) % 2 == 1:
a[nu - 1] = mu - 1
else:
a[mu] = mu - 1
if (a[nu] + sigma) % 2 == 1:
for v in b(mu, nu - 1, 0, n, a):
yield v
else:
for v in f(mu, nu - 1, 0, n, a):
yield v
while a[nu] > 0:
a[nu] = a[nu] - 1
if (a[nu] + sigma) % 2 == 1:
for v in b(mu, nu - 1, 0, n, a):
yield v
else:
for v in f(mu, nu - 1, 0, n, a):
yield v
def b(mu, nu, sigma, n, a):
if nu == mu + 1:
while a[nu] < mu - 1:
yield visit(n, a)
a[nu] = a[nu] + 1
yield visit(n, a)
a[mu] = 0
elif nu > mu + 1:
if (a[nu] + sigma) % 2 == 1:
for v in f(mu, nu - 1, 0, n, a):
yield v
else:
for v in b(mu, nu - 1, 0, n, a):
yield v
while a[nu] < mu - 1:
a[nu] = a[nu] + 1
if (a[nu] + sigma) % 2 == 1:
for v in f(mu, nu - 1, 0, n, a):
yield v
else:
for v in b(mu, nu - 1, 0, n, a):
yield v
if (mu + sigma) % 2 == 1:
a[nu - 1] = 0
else:
a[mu] = 0
if mu == 2:
yield visit(n, a)
else:
for v in b(mu - 1, nu - 1, (mu + sigma) % 2, n, a):
yield v
ng = len(ns)
ag = [0] * (ng + 1)
for j in range(1, m + 1):
ag[ng - m + j] = j - 1
return f(m, ng, 0, ng, ag) if m > 1 else [[ns]]
|
taken from `Stack Overflow <https://codereview.stackexchange.com/questions/1526/finding-all-k-subset-partitions>`_
|
entailment
|
def integer_partition(size: int, nparts: int) -> Iterator[List[List[int]]]:
""" Partition a list of integers into a list of partitions """
for part in algorithm_u(range(size), nparts):
yield part
|
Partition a list of integers into a list of partitions
|
entailment
|
def partition_t(T: RDFGraph, nparts: int) -> Iterator[Tuple[RDFGraph, ...]]:
"""
Partition T into all possible partitions of T of size nparts
:param T: Set of RDF triples to be partitioned
:param nparts: number of partitions (e.g. 2 means return all possible 2 set partitions
:return: Iterator that returns partitions
We don't actually partition the triples directly -- instead, we partition a set of integers that
reference elements in the (ordered) set and return those
"""
def partition_map(partition: List[List[int]]) -> Tuple[RDFGraph, ...]:
rval: List[RDFGraph, ...] = []
for part in partition:
if len(part) == 1 and part[0] >= t_list_len:
rval.append(RDFGraph())
else:
rval.append(RDFGraph([t_list[e] for e in part if e < t_list_len]))
return tuple(rval)
t_list = sorted(list(T)) # Sorted not strictly necessary, but aids testing
t_list_len = len(t_list)
return map(lambda partition: partition_map(partition), filtered_integer_partition(t_list_len, nparts))
|
Partition T into all possible partitions of T of size nparts
:param T: Set of RDF triples to be partitioned
:param nparts: number of partitions (e.g. 2 means return all possible 2 set partitions
:return: Iterator that returns partitions
We don't actually partition the triples directly -- instead, we partition a set of integers that
reference elements in the (ordered) set and return those
|
entailment
|
def partition_2(T: RDFGraph) -> List[Tuple[RDFGraph, RDFGraph]]:
"""
Partition T into all possible combinations of two subsets
:param T: RDF Graph to partition
:return:
"""
for p in partition_t(T, 2):
yield p
|
Partition T into all possible combinations of two subsets
:param T: RDF Graph to partition
:return:
|
entailment
|
def is_strict_numeric(n: Node) -> bool:
""" numeric denotes typed literals with datatypes xsd:integer, xsd:decimal, xsd:float, and xsd:double. """
return is_typed_literal(n) and cast(Literal, n).datatype in [XSD.integer, XSD.decimal, XSD.float, XSD.double]
|
numeric denotes typed literals with datatypes xsd:integer, xsd:decimal, xsd:float, and xsd:double.
|
entailment
|
def is_simple_literal(n: Node) -> bool:
""" simple literal denotes a plain literal with no language tag. """
return is_typed_literal(n) and cast(Literal, n).datatype is None and cast(Literal, n).language is None
|
simple literal denotes a plain literal with no language tag.
|
entailment
|
def semActsSatisfied(acts: Optional[List[ShExJ.SemAct]], cntxt: Context) -> bool:
""" `5.7.1 Semantic Actions Semantics <http://shex.io/shex-semantics/#semantic-actions-semantics>`_
The evaluation semActsSatisfied on a list of SemActs returns success or failure. The evaluation of an individual
SemAct is implementation-dependent.
"""
return True
|
`5.7.1 Semantic Actions Semantics <http://shex.io/shex-semantics/#semantic-actions-semantics>`_
The evaluation semActsSatisfied on a list of SemActs returns success or failure. The evaluation of an individual
SemAct is implementation-dependent.
|
entailment
|
def print(self, txt: str, hold: bool=False) -> None:
""" Conditionally print txt
:param txt: text to print
:param hold: If true, hang on to the text until another print comes through
:param hold: If true, drop both print statements if another hasn't intervened
:return:
"""
if hold:
self.held_prints[self.trace_depth] = txt
elif self.held_prints[self.trace_depth]:
if self.max_print_depth > self.trace_depth:
print(self.held_prints[self.trace_depth])
print(txt)
self.max_print_depth = self.trace_depth
del self.held_prints[self.trace_depth]
else:
print(txt)
self.max_print_depth = self.trace_depth
|
Conditionally print txt
:param txt: text to print
:param hold: If true, hang on to the text until another print comes through
:param hold: If true, drop both print statements if another hasn't intervened
:return:
|
entailment
|
def reset(self) -> None:
"""
Reset the context preceeding an evaluation
"""
self.evaluating = set()
self.assumptions = {}
self.known_results = {}
self.current_node = None
self.evaluate_stack = []
self.bnode_map = {}
|
Reset the context preceeding an evaluation
|
entailment
|
def _gen_schema_xref(self, expr: Optional[Union[ShExJ.shapeExprLabel, ShExJ.shapeExpr]]) -> None:
"""
Generate the schema_id_map
:param expr: root shape expression
"""
if expr is not None and not isinstance_(expr, ShExJ.shapeExprLabel) and 'id' in expr and expr.id is not None:
abs_id = self._resolve_relative_uri(expr.id)
if abs_id not in self.schema_id_map:
self.schema_id_map[abs_id] = expr
if isinstance(expr, (ShExJ.ShapeOr, ShExJ.ShapeAnd)):
for expr2 in expr.shapeExprs:
self._gen_schema_xref(expr2)
elif isinstance(expr, ShExJ.ShapeNot):
self._gen_schema_xref(expr.shapeExpr)
elif isinstance(expr, ShExJ.Shape):
if expr.expression is not None:
self._gen_te_xref(expr.expression)
|
Generate the schema_id_map
:param expr: root shape expression
|
entailment
|
def _gen_te_xref(self, expr: Union[ShExJ.tripleExpr, ShExJ.tripleExprLabel]) -> None:
"""
Generate the triple expression map (te_id_map)
:param expr: root triple expression
"""
if expr is not None and not isinstance_(expr, ShExJ.tripleExprLabel) and 'id' in expr and expr.id is not None:
if expr.id in self.te_id_map:
return
else:
self.te_id_map[self._resolve_relative_uri(expr.id)] = expr
if isinstance(expr, (ShExJ.OneOf, ShExJ.EachOf)):
for expr2 in expr.expressions:
self._gen_te_xref(expr2)
elif isinstance(expr, ShExJ.TripleConstraint):
if expr.valueExpr is not None:
self._gen_schema_xref(expr.valueExpr)
|
Generate the triple expression map (te_id_map)
:param expr: root triple expression
|
entailment
|
def tripleExprFor(self, id_: ShExJ.tripleExprLabel) -> ShExJ.tripleExpr:
""" Return the triple expression that corresponds to id """
return self.te_id_map.get(id_)
|
Return the triple expression that corresponds to id
|
entailment
|
def shapeExprFor(self, id_: Union[ShExJ.shapeExprLabel, START]) -> Optional[ShExJ.shapeExpr]:
""" Return the shape expression that corresponds to id """
rval = self.schema.start if id_ is START else self.schema_id_map.get(str(id_))
return rval
|
Return the shape expression that corresponds to id
|
entailment
|
def visit_shapes(self, expr: ShExJ.shapeExpr, f: Callable[[Any, ShExJ.shapeExpr, "Context"], None], arg_cntxt: Any,
visit_center: _VisitorCenter = None, follow_inner_shapes: bool=True) -> None:
"""
Visit expr and all of its "descendant" shapes.
:param expr: root shape expression
:param f: visitor function
:param arg_cntxt: accompanying context for the visitor function
:param visit_center: Recursive visit context. (Not normally supplied on an external call)
:param follow_inner_shapes: Follow nested shapes or just visit on outer level
"""
if visit_center is None:
visit_center = _VisitorCenter(f, arg_cntxt)
has_id = getattr(expr, 'id', None) is not None
if not has_id or not (visit_center.already_seen_shape(expr.id)
or visit_center.actively_visiting_shape(expr.id)):
# Visit the root expression
if has_id:
visit_center.start_visiting_shape(expr.id)
f(arg_cntxt, expr, self)
# Traverse the expression and visit its components
if isinstance(expr, (ShExJ.ShapeOr, ShExJ.ShapeAnd)):
for expr2 in expr.shapeExprs:
self.visit_shapes(expr2, f, arg_cntxt, visit_center, follow_inner_shapes=follow_inner_shapes)
elif isinstance(expr, ShExJ.ShapeNot):
self.visit_shapes(expr.shapeExpr, f, arg_cntxt, visit_center, follow_inner_shapes=follow_inner_shapes)
elif isinstance(expr, ShExJ.Shape):
if expr.expression is not None and follow_inner_shapes:
self.visit_triple_expressions(expr.expression,
lambda ac, te, cntxt: self._visit_shape_te(te, visit_center),
arg_cntxt,
visit_center)
elif isinstance_(expr, ShExJ.shapeExprLabel):
if not visit_center.actively_visiting_shape(str(expr)) and follow_inner_shapes:
visit_center.start_visiting_shape(str(expr))
self.visit_shapes(self.shapeExprFor(expr), f, arg_cntxt, visit_center)
visit_center.done_visiting_shape(str(expr))
if has_id:
visit_center.done_visiting_shape(expr.id)
|
Visit expr and all of its "descendant" shapes.
:param expr: root shape expression
:param f: visitor function
:param arg_cntxt: accompanying context for the visitor function
:param visit_center: Recursive visit context. (Not normally supplied on an external call)
:param follow_inner_shapes: Follow nested shapes or just visit on outer level
|
entailment
|
def _visit_shape_te(self, te: ShExJ.tripleExpr, visit_center: _VisitorCenter) -> None:
"""
Visit a triple expression that was reached through a shape. This, in turn, is used to visit additional shapes
that are referenced by a TripleConstraint
:param te: Triple expression reached through a Shape.expression
:param visit_center: context used in shape visitor
"""
if isinstance(te, ShExJ.TripleConstraint) and te.valueExpr is not None:
visit_center.f(visit_center.arg_cntxt, te.valueExpr, self)
|
Visit a triple expression that was reached through a shape. This, in turn, is used to visit additional shapes
that are referenced by a TripleConstraint
:param te: Triple expression reached through a Shape.expression
:param visit_center: context used in shape visitor
|
entailment
|
def _visit_te_shape(self, shape: ShExJ.shapeExpr, visit_center: _VisitorCenter) -> None:
"""
Visit a shape expression that was reached through a triple expression. This, in turn, is used to visit
additional triple expressions that are referenced by the Shape
:param shape: Shape reached through triple expression traverse
:param visit_center: context used in shape visitor
"""
if isinstance(shape, ShExJ.Shape) and shape.expression is not None:
visit_center.f(visit_center.arg_cntxt, shape.expression, self)
|
Visit a shape expression that was reached through a triple expression. This, in turn, is used to visit
additional triple expressions that are referenced by the Shape
:param shape: Shape reached through triple expression traverse
:param visit_center: context used in shape visitor
|
entailment
|
def start_evaluating(self, n: Node, s: ShExJ.shapeExpr) -> Optional[bool]:
"""Indicate that we are beginning to evaluate n according to shape expression s.
If we are already in the process of evaluating (n,s), as indicated self.evaluating, we return our current
guess as to the result.
:param n: Node to be evaluated
:param s: expression for node evaluation
:return: Assumed evaluation result. If None, evaluation must be performed
"""
if not s.id:
s.id = str(BNode()) # Random permanant id
key = (n, s.id)
# We only evaluate a node once
if key in self.known_results:
return self.known_results[key]
if key not in self.evaluating:
self.evaluating.add(key)
return None
elif key not in self.assumptions:
self.assumptions[key] = True
return self.assumptions[key]
|
Indicate that we are beginning to evaluate n according to shape expression s.
If we are already in the process of evaluating (n,s), as indicated self.evaluating, we return our current
guess as to the result.
:param n: Node to be evaluated
:param s: expression for node evaluation
:return: Assumed evaluation result. If None, evaluation must be performed
|
entailment
|
def done_evaluating(self, n: Node, s: ShExJ.shapeExpr, result: bool) -> Tuple[bool, bool]:
"""
Indicate that we have completed an actual evaluation of (n,s). This is only called when start_evaluating
has returned None as the assumed result
:param n: Node that was evaluated
:param s: expression for node evaluation
:param result: result of evaluation
:return: Tuple - first element is whether we are done, second is whether evaluation was consistent
"""
key = (n, s.id)
# If we didn't have to assume anything or our assumption was correct, we're done
if key not in self.assumptions or self.assumptions[key] == result:
if key in self.assumptions:
del self.assumptions[key] # good housekeeping, not strictly necessary
self.evaluating.remove(key)
self.known_results[key] = result
return True, True
# If we assumed true and got a false, try assuming false
elif self.assumptions[key]:
self.evaluating.remove(key) # restart the evaluation from the top
self.assumptions[key] = False
return False, True
else:
self.fail_reason = f"{s.id}: Inconsistent recursive shape reference"
return True, False
|
Indicate that we have completed an actual evaluation of (n,s). This is only called when start_evaluating
has returned None as the assumed result
:param n: Node that was evaluated
:param s: expression for node evaluation
:param result: result of evaluation
:return: Tuple - first element is whether we are done, second is whether evaluation was consistent
|
entailment
|
def type_last(self, obj: JsonObj) -> JsonObj:
""" Move the type identifiers to the end of the object for print purposes """
def _tl_list(v: List) -> List:
return [self.type_last(e) if isinstance(e, JsonObj)
else _tl_list(e) if isinstance(e, list) else e for e in v if e is not None]
rval = JsonObj()
for k in as_dict(obj).keys():
v = obj[k]
if v is not None and k not in ('type', '_context'):
rval[k] = _tl_list(v) if isinstance(v, list) else self.type_last(v) if isinstance(v, JsonObj) else v
if 'type' in obj and obj.type:
rval.type = obj.type
return rval
|
Move the type identifiers to the end of the object for print purposes
|
entailment
|
def satisfies(cntxt: Context, n: Node, se: ShExJ.shapeExpr) -> bool:
""" `5.3 Shape Expressions <http://shex.io/shex-semantics/#node-constraint-semantics>`_
satisfies: The expression satisfies(n, se, G, m) indicates that a node n and graph G satisfy a shape
expression se with shapeMap m.
satisfies(n, se, G, m) is true if and only if:
* Se is a NodeConstraint and satisfies2(n, se) as described below in Node Constraints.
Note that testing if a node satisfies a node constraint does not require a graph or shapeMap.
* Se is a Shape and satisfies(n, se) as defined below in Shapes and Triple Expressions.
* Se is a ShapeOr and there is some shape expression se2 in shapeExprs such that
satisfies(n, se2, G, m).
* Se is a ShapeAnd and for every shape expression se2 in shapeExprs, satisfies(n, se2, G, m).
* Se is a ShapeNot and for the shape expression se2 at shapeExpr, notSatisfies(n, se2, G, m).
* Se is a ShapeExternal and implementation-specific mechansims not defined in this specification
indicate success.
* Se is a shapeExprRef and there exists in the schema a shape expression se2 with that id and
satisfies(n, se2, G, m).
.. note:: Where is the documentation on recursion? All I can find is
`5.9.4 Recursion Example <http://shex.io/shex-semantics/#example-recursion>`_
"""
if isinstance(se, ShExJ.NodeConstraint):
rval = satisfiesNodeConstraint(cntxt, n, se)
elif isinstance(se, ShExJ.Shape):
rval = satisfiesShape(cntxt, n, se)
elif isinstance(se, ShExJ.ShapeOr):
rval = satisifesShapeOr(cntxt, n, se)
elif isinstance(se, ShExJ.ShapeAnd):
rval = satisfiesShapeAnd(cntxt, n, se)
elif isinstance(se, ShExJ.ShapeNot):
rval = satisfiesShapeNot(cntxt, n, se)
elif isinstance(se, ShExJ.ShapeExternal):
rval = satisfiesExternal(cntxt, n, se)
elif isinstance_(se, ShExJ.shapeExprLabel):
rval = satisfiesShapeExprRef(cntxt, n, se)
else:
raise NotImplementedError(f"Unrecognized shapeExpr: {type(se)}")
return rval
|
`5.3 Shape Expressions <http://shex.io/shex-semantics/#node-constraint-semantics>`_
satisfies: The expression satisfies(n, se, G, m) indicates that a node n and graph G satisfy a shape
expression se with shapeMap m.
satisfies(n, se, G, m) is true if and only if:
* Se is a NodeConstraint and satisfies2(n, se) as described below in Node Constraints.
Note that testing if a node satisfies a node constraint does not require a graph or shapeMap.
* Se is a Shape and satisfies(n, se) as defined below in Shapes and Triple Expressions.
* Se is a ShapeOr and there is some shape expression se2 in shapeExprs such that
satisfies(n, se2, G, m).
* Se is a ShapeAnd and for every shape expression se2 in shapeExprs, satisfies(n, se2, G, m).
* Se is a ShapeNot and for the shape expression se2 at shapeExpr, notSatisfies(n, se2, G, m).
* Se is a ShapeExternal and implementation-specific mechansims not defined in this specification
indicate success.
* Se is a shapeExprRef and there exists in the schema a shape expression se2 with that id and
satisfies(n, se2, G, m).
.. note:: Where is the documentation on recursion? All I can find is
`5.9.4 Recursion Example <http://shex.io/shex-semantics/#example-recursion>`_
|
entailment
|
def satisifesShapeOr(cntxt: Context, n: Node, se: ShExJ.ShapeOr, _: DebugContext) -> bool:
""" Se is a ShapeOr and there is some shape expression se2 in shapeExprs such that satisfies(n, se2, G, m). """
return any(satisfies(cntxt, n, se2) for se2 in se.shapeExprs)
|
Se is a ShapeOr and there is some shape expression se2 in shapeExprs such that satisfies(n, se2, G, m).
|
entailment
|
def satisfiesShapeAnd(cntxt: Context, n: Node, se: ShExJ.ShapeAnd, _: DebugContext) -> bool:
""" Se is a ShapeAnd and for every shape expression se2 in shapeExprs, satisfies(n, se2, G, m) """
return all(satisfies(cntxt, n, se2) for se2 in se.shapeExprs)
|
Se is a ShapeAnd and for every shape expression se2 in shapeExprs, satisfies(n, se2, G, m)
|
entailment
|
def satisfiesShapeNot(cntxt: Context, n: Node, se: ShExJ.ShapeNot, _: DebugContext) -> bool:
""" Se is a ShapeNot and for the shape expression se2 at shapeExpr, notSatisfies(n, se2, G, m) """
return not satisfies(cntxt, n, se.shapeExpr)
|
Se is a ShapeNot and for the shape expression se2 at shapeExpr, notSatisfies(n, se2, G, m)
|
entailment
|
def satisfiesExternal(cntxt: Context, n: Node, se: ShExJ.ShapeExternal, c: DebugContext) -> bool:
""" Se is a ShapeExternal and implementation-specific mechansims not defined in this specification indicate
success.
"""
if c.debug:
print(f"id: {se.id}")
extern_shape = cntxt.external_shape_for(se.id)
if extern_shape:
return satisfies(cntxt, n, extern_shape)
cntxt.fail_reason = f"{se.id}: Shape is not in Schema"
return False
|
Se is a ShapeExternal and implementation-specific mechansims not defined in this specification indicate
success.
|
entailment
|
def satisfiesShapeExprRef(cntxt: Context, n: Node, se: ShExJ.shapeExprLabel, c: DebugContext) -> bool:
""" Se is a shapeExprRef and there exists in the schema a shape expression se2 with that id
and satisfies(n, se2, G, m).
"""
if c.debug:
print(f"id: {se}")
for shape in cntxt.schema.shapes:
if shape.id == se:
return satisfies(cntxt, n, shape)
cntxt.fail_reason = f"{se}: Shape is not in Schema"
return False
|
Se is a shapeExprRef and there exists in the schema a shape expression se2 with that id
and satisfies(n, se2, G, m).
|
entailment
|
def cat_proximity():
"""Cat proximity graph from http://xkcd.com/231/"""
chart = SimpleLineChart(int(settings.width * 1.5), settings.height)
chart.set_legend(['INTELLIGENCE', 'INSANITY OF STATEMENTS'])
# intelligence
data_index = chart.add_data([100. / y for y in range(1, 15)])
# insanity of statements
chart.add_data([100. - 100 / y for y in range(1, 15)])
# line colours
chart.set_colours(['208020', '202080'])
# "Near" and "Far" labels, they are placed automatically at either ends.
near_far_axis_index = chart.set_axis_labels(Axis.BOTTOM, ['FAR', 'NEAR'])
# "Human Proximity to cat" label. Aligned to the center.
index = chart.set_axis_labels(Axis.BOTTOM, ['HUMAN PROXIMITY TO CAT'])
chart.set_axis_style(index, '202020', font_size=10, alignment=0)
chart.set_axis_positions(index, [50])
chart.download('label-cat-proximity.png')
|
Cat proximity graph from http://xkcd.com/231/
|
entailment
|
def normalize_uri(u: URI) -> URIRef:
""" Return a URIRef for a str or URIRef """
return u if isinstance(u, URIRef) else URIRef(str(u))
|
Return a URIRef for a str or URIRef
|
entailment
|
def normalize_uriparm(p: URIPARM) -> List[URIRef]:
""" Return an optional list of URIRefs for p"""
return normalize_urilist(p) if isinstance(p, List) else \
normalize_urilist([p]) if isinstance(p, (str, URIRef)) else p
|
Return an optional list of URIRefs for p
|
entailment
|
def normalize_startparm(p: STARTPARM) -> List[Union[type(START), START_TYPE, URIRef]]:
""" Return the startspec for p """
if not isinstance(p, list):
p = [p]
return [normalize_uri(e) if isinstance(e, str) and e is not START else e for e in p]
|
Return the startspec for p
|
entailment
|
def genargs(prog: Optional[str] = None) -> ArgumentParser:
"""
Create a command line parser
:return: parser
"""
parser = ArgumentParser(prog)
parser.add_argument("rdf", help="Input RDF file or SPARQL endpoint if slurper or sparql options")
parser.add_argument("shex", help="ShEx specification")
parser.add_argument("-f", "--format", help="Input RDF Format", default="turtle")
parser.add_argument("-s", "--start", help="Start shape. If absent use ShEx start node.")
parser.add_argument("-ut", "--usetype", help="Start shape is rdf:type of focus", action="store_true")
parser.add_argument("-sp", "--startpredicate", help="Start shape is object of this predicate")
parser.add_argument("-fn", "--focus", help="RDF focus node")
parser.add_argument("-A", "--allsubjects", help="Evaluate all non-bnode subjects in the graph", action="store_true")
parser.add_argument("-d", "--debug", action="store_true", help="Add debug output")
parser.add_argument("-ss", "--slurper", action="store_true", help="Use SPARQL slurper graph")
parser.add_argument("-cf", "--flattener", action="store_true", help="Use RDF Collections flattener graph")
parser.add_argument("-sq", "--sparql", help="SPARQL query to generate focus nodes")
parser.add_argument("-se", "--stoponerror", help="Stop on an error", action="store_true")
parser.add_argument("--stopafter", help="Stop after N nodes", type=int)
parser.add_argument("-ps", "--printsparql", help="Print SPARQL queries as they are executed", action="store_true")
parser.add_argument("-pr", "--printsparqlresults", help="Print SPARQL query and results", action="store_true")
parser.add_argument("-gn", "--graphname", help="Specific SPARQL graph to query - use '' for any named graph")
parser.add_argument("-pb", "--persistbnodes", help="Treat BNodes as persistent in SPARQL endpoint",
action="store_true")
return parser
|
Create a command line parser
:return: parser
|
entailment
|
def rdf(self, rdf: Optional[Union[str, Graph]]) -> None:
""" Set the RDF DataSet to be evaulated. If ``rdf`` is a string, the presence of a return is the
indicator that it is text instead of a location.
:param rdf: File name, URL, representation of rdflib Graph
"""
if isinstance(rdf, Graph):
self.g = rdf
else:
self.g = Graph()
if isinstance(rdf, str):
if '\n' in rdf or '\r' in rdf:
self.g.parse(data=rdf, format=self.rdf_format)
elif ':' in rdf:
self.g.parse(location=rdf, format=self.rdf_format)
else:
self.g.parse(source=rdf, format=self.rdf_format)
|
Set the RDF DataSet to be evaulated. If ``rdf`` is a string, the presence of a return is the
indicator that it is text instead of a location.
:param rdf: File name, URL, representation of rdflib Graph
|
entailment
|
def schema(self, shex: Optional[Union[str, ShExJ.Schema]]) -> None:
""" Set the schema to be used. Schema can either be a ShExC or ShExJ string or a pre-parsed schema.
:param shex: Schema
"""
self.pfx = None
if shex is not None:
if isinstance(shex, ShExJ.Schema):
self._schema = shex
else:
shext = shex.strip()
loader = SchemaLoader()
if ('\n' in shex or '\r' in shex) or shext[0] in '#<_: ':
self._schema = loader.loads(shex)
else:
self._schema = loader.load(shex) if isinstance(shex, str) else shex
if self._schema is None:
raise ValueError("Unable to parse shex file")
self.pfx = PrefixLibrary(loader.schema_text)
|
Set the schema to be used. Schema can either be a ShExC or ShExJ string or a pre-parsed schema.
:param shex: Schema
|
entailment
|
def focus(self, focus: Optional[URIPARM]) -> None:
""" Set the focus node(s). If no focus node is specified, the evaluation will occur for all non-BNode
graph subjects. Otherwise it can be a string, a URIRef or a list of string/URIRef combinations
:param focus: None if focus should be all URIRefs in the graph otherwise a URI or list of URI's
"""
self._focus = normalize_uriparm(focus) if focus else None
|
Set the focus node(s). If no focus node is specified, the evaluation will occur for all non-BNode
graph subjects. Otherwise it can be a string, a URIRef or a list of string/URIRef combinations
:param focus: None if focus should be all URIRefs in the graph otherwise a URI or list of URI's
|
entailment
|
def arcsOut(G: Graph, n: Node) -> RDFGraph:
""" arcsOut(G, n) is the set of triples in a graph G with subject n. """
return RDFGraph(G.triples((n, None, None)))
|
arcsOut(G, n) is the set of triples in a graph G with subject n.
|
entailment
|
def predicatesOut(G: Graph, n: Node) -> Set[TriplePredicate]:
""" predicatesOut(G, n) is the set of predicates in arcsOut(G, n). """
return {p for p, _ in G.predicate_objects(n)}
|
predicatesOut(G, n) is the set of predicates in arcsOut(G, n).
|
entailment
|
def arcsIn(G: Graph, n: Node) -> RDFGraph:
""" arcsIn(G, n) is the set of triples in a graph G with object n. """
return RDFGraph(G.triples((None, None, n)))
|
arcsIn(G, n) is the set of triples in a graph G with object n.
|
entailment
|
def predicatesIn(G: Graph, n: Node) -> Set[TriplePredicate]:
""" predicatesIn(G, n) is the set of predicates in arcsIn(G, n). """
return {p for _, p in G.subject_predicates(n)}
|
predicatesIn(G, n) is the set of predicates in arcsIn(G, n).
|
entailment
|
def neigh(G: Graph, n: Node) -> RDFGraph:
""" neigh(G, n) is the neighbourhood of the node n in the graph G.
neigh(G, n) = arcsOut(G, n) ∪ arcsIn(G, n)
"""
return arcsOut(G, n) | arcsIn(G, n)
|
neigh(G, n) is the neighbourhood of the node n in the graph G.
neigh(G, n) = arcsOut(G, n) ∪ arcsIn(G, n)
|
entailment
|
def predicates(G: Graph, n: Node) -> Set[TriplePredicate]:
""" redicates(G, n) is the set of predicates in neigh(G, n).
predicates(G, n) = predicatesOut(G, n) ∪ predicatesIn(G, n)
"""
return predicatesOut(G, n) | predicatesIn(G, n)
|
redicates(G, n) is the set of predicates in neigh(G, n).
predicates(G, n) = predicatesOut(G, n) ∪ predicatesIn(G, n)
|
entailment
|
def generate_base(path: str) -> str:
""" Convert path, which can be a URL or a file path into a base URI
:param path: file location or url
:return: file location or url sans actual name
"""
if ':' in path:
parts = urlparse(path)
parts_dict = parts._asdict()
parts_dict['path'] = os.path.split(parts.path)[0] if '/' in parts.path else ''
return urlunparse(ParseResult(**parts_dict)) + '/'
else:
return (os.path.split(path)[0] if '/' in path else '') + '/'
|
Convert path, which can be a URL or a file path into a base URI
:param path: file location or url
:return: file location or url sans actual name
|
entailment
|
def ultimate_power():
"""
Data from http://indexed.blogspot.com/2007/08/real-ultimate-power.html
"""
chart = VennChart(settings.width, settings.height)
chart.add_data([100, 100, 100, 20, 20, 20, 10])
chart.set_title('Ninjas or God')
chart.set_legend(['unseen agents', 'super powerful', 'secret plans'])
chart.download('venn-ultimate-power.png')
|
Data from http://indexed.blogspot.com/2007/08/real-ultimate-power.html
|
entailment
|
def split_host_port(cls, server):
"""
Return (host, port) from server.
Port defaults to 11211.
>>> split_host_port('127.0.0.1:11211')
('127.0.0.1', 11211)
>>> split_host_port('127.0.0.1')
('127.0.0.1', 11211)
"""
host, port = splitport(server)
if port is None:
port = 11211
port = int(port)
if re.search(':.*$', host):
host = re.sub(':.*$', '', host)
return host, port
|
Return (host, port) from server.
Port defaults to 11211.
>>> split_host_port('127.0.0.1:11211')
('127.0.0.1', 11211)
>>> split_host_port('127.0.0.1')
('127.0.0.1', 11211)
|
entailment
|
def _read_socket(self, size):
"""
Reads data from socket.
:param size: Size in bytes to be read.
:return: Data from socket
"""
value = b''
while len(value) < size:
data = self.connection.recv(size - len(value))
if not data:
break
value += data
# If we got less data than we requested, the server disconnected.
if len(value) < size:
raise socket.error()
return value
|
Reads data from socket.
:param size: Size in bytes to be read.
:return: Data from socket
|
entailment
|
def _get_response(self):
"""
Get memcached response from socket.
:return: A tuple with binary values from memcached.
:rtype: tuple
"""
try:
self._open_connection()
if self.connection is None:
# The connection wasn't opened, which means we're deferring a reconnection attempt.
# Raise a socket.error, so we'll return the same server_disconnected message as we
# do below.
raise socket.error('Delaying reconnection attempt')
header = self._read_socket(self.HEADER_SIZE)
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas) = struct.unpack(self.HEADER_STRUCT, header)
assert magic == self.MAGIC['response']
extra_content = None
if bodylen:
extra_content = self._read_socket(bodylen)
return (magic, opcode, keylen, extlen, datatype, status, bodylen,
opaque, cas, extra_content)
except socket.error as e:
self._connection_error(e)
# (magic, opcode, keylen, extlen, datatype, status, bodylen, opaque, cas, extra_content)
message = str(e)
return (self.MAGIC['response'], -1, 0, 0, 0, self.STATUS['server_disconnected'], 0, 0, 0, message)
|
Get memcached response from socket.
:return: A tuple with binary values from memcached.
:rtype: tuple
|
entailment
|
def authenticate(self, username, password):
"""
Authenticate user on server.
:param username: Username used to be authenticated.
:type username: six.string_types
:param password: Password used to be authenticated.
:type password: six.string_types
:return: True if successful.
:raises: InvalidCredentials, AuthenticationNotSupported, MemcachedException
:rtype: bool
"""
self._username = username
self._password = password
# Reopen the connection with the new credentials.
self.disconnect()
self._open_connection()
return self.authenticated
|
Authenticate user on server.
:param username: Username used to be authenticated.
:type username: six.string_types
:param password: Password used to be authenticated.
:type password: six.string_types
:return: True if successful.
:raises: InvalidCredentials, AuthenticationNotSupported, MemcachedException
:rtype: bool
|
entailment
|
def serialize(self, value, compress_level=-1):
"""
Serializes a value based on its type.
:param value: Something to be serialized
:type value: six.string_types, int, long, object
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: Serialized type
:rtype: str
"""
flags = 0
if isinstance(value, binary_type):
flags |= self.FLAGS['binary']
elif isinstance(value, text_type):
value = value.encode('utf8')
elif isinstance(value, int) and isinstance(value, bool) is False:
flags |= self.FLAGS['integer']
value = str(value)
elif isinstance(value, long) and isinstance(value, bool) is False:
flags |= self.FLAGS['long']
value = str(value)
else:
flags |= self.FLAGS['object']
buf = BytesIO()
pickler = self.pickler(buf, self.pickle_protocol)
pickler.dump(value)
value = buf.getvalue()
if compress_level != 0 and len(value) > self.COMPRESSION_THRESHOLD:
if compress_level is not None and compress_level > 0:
# Use the specified compression level.
compressed_value = self.compression.compress(value, compress_level)
else:
# Use the default compression level.
compressed_value = self.compression.compress(value)
# Use the compressed value only if it is actually smaller.
if compressed_value and len(compressed_value) < len(value):
value = compressed_value
flags |= self.FLAGS['compressed']
return flags, value
|
Serializes a value based on its type.
:param value: Something to be serialized
:type value: six.string_types, int, long, object
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: Serialized type
:rtype: str
|
entailment
|
def deserialize(self, value, flags):
"""
Deserialized values based on flags or just return it if it is not serialized.
:param value: Serialized or not value.
:type value: six.string_types, int
:param flags: Value flags
:type flags: int
:return: Deserialized value
:rtype: six.string_types|int
"""
FLAGS = self.FLAGS
if flags & FLAGS['compressed']: # pragma: no branch
value = self.compression.decompress(value)
if flags & FLAGS['binary']:
return value
if flags & FLAGS['integer']:
return int(value)
elif flags & FLAGS['long']:
return long(value)
elif flags & FLAGS['object']:
buf = BytesIO(value)
unpickler = self.unpickler(buf)
return unpickler.load()
if six.PY3:
return value.decode('utf8')
# In Python 2, mimic the behavior of the json library: return a str
# unless the value contains unicode characters.
# in Python 2, if value is a binary (e.g struct.pack("<Q") then decode will fail
try:
value.decode('ascii')
except UnicodeDecodeError:
try:
return value.decode('utf8')
except UnicodeDecodeError:
return value
else:
return value
|
Deserialized values based on flags or just return it if it is not serialized.
:param value: Serialized or not value.
:type value: six.string_types, int
:param flags: Value flags
:type flags: int
:return: Deserialized value
:rtype: six.string_types|int
|
entailment
|
def get(self, key):
"""
Get a key and its CAS value from server. If the value isn't cached, return
(None, None).
:param key: Key's name
:type key: six.string_types
:return: Returns (value, cas).
:rtype: object
"""
logger.debug('Getting key %s', key)
data = struct.pack(self.HEADER_STRUCT +
self.COMMANDS['get']['struct'] % (len(key)),
self.MAGIC['request'],
self.COMMANDS['get']['command'],
len(key), 0, 0, 0, len(key), 0, 0, str_to_bytes(key))
self._send(data)
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
logger.debug('Value Length: %d. Body length: %d. Data type: %d',
extlen, bodylen, datatype)
if status != self.STATUS['success']:
if status == self.STATUS['key_not_found']:
logger.debug('Key not found. Message: %s', extra_content)
return None, None
if status == self.STATUS['server_disconnected']:
return None, None
raise MemcachedException('Code: %d Message: %s' % (status, extra_content), status)
flags, value = struct.unpack('!L%ds' % (bodylen - 4, ), extra_content)
return self.deserialize(value, flags), cas
|
Get a key and its CAS value from server. If the value isn't cached, return
(None, None).
:param key: Key's name
:type key: six.string_types
:return: Returns (value, cas).
:rtype: object
|
entailment
|
def noop(self):
"""
Send a NOOP command
:return: Returns the status.
:rtype: int
"""
logger.debug('Sending NOOP')
data = struct.pack(self.HEADER_STRUCT +
self.COMMANDS['noop']['struct'],
self.MAGIC['request'],
self.COMMANDS['noop']['command'],
0, 0, 0, 0, 0, 0, 0)
self._send(data)
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
logger.debug('Value Length: %d. Body length: %d. Data type: %d',
extlen, bodylen, datatype)
if status != self.STATUS['success']:
logger.debug('NOOP failed (status is %d). Message: %s' % (status, extra_content))
return int(status)
|
Send a NOOP command
:return: Returns the status.
:rtype: int
|
entailment
|
def get_multi(self, keys):
"""
Get multiple keys from server.
Since keys are converted to b'' when six.PY3 the keys need to be decoded back
into string . e.g key='test' is read as b'test' and then decoded back to 'test'
This encode/decode does not work when key is already a six.binary_type hence
this function remembers which keys were originally sent as str so that
it only decoded those keys back to string which were sent as string
:param keys: A list of keys to from server.
:type keys: list
:return: A dict with all requested keys.
:rtype: dict
"""
# pipeline N-1 getkq requests, followed by a regular getk to uncork the
# server
o_keys = keys
keys, last = keys[:-1], str_to_bytes(keys[-1])
if six.PY2:
msg = ''
else:
msg = b''
msg = msg.join([
struct.pack(self.HEADER_STRUCT +
self.COMMANDS['getkq']['struct'] % (len(key)),
self.MAGIC['request'],
self.COMMANDS['getkq']['command'],
len(key), 0, 0, 0, len(key), 0, 0, str_to_bytes(key))
for key in keys])
msg += struct.pack(self.HEADER_STRUCT +
self.COMMANDS['getk']['struct'] % (len(last)),
self.MAGIC['request'],
self.COMMANDS['getk']['command'],
len(last), 0, 0, 0, len(last), 0, 0, last)
self._send(msg)
d = {}
opcode = -1
while opcode != self.COMMANDS['getk']['command']:
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
if status == self.STATUS['success']:
flags, key, value = struct.unpack('!L%ds%ds' %
(keylen, bodylen - keylen - 4),
extra_content)
if six.PY2:
d[key] = self.deserialize(value, flags), cas
else:
try:
decoded_key = key.decode()
except UnicodeDecodeError:
d[key] = self.deserialize(value, flags), cas
else:
if decoded_key in o_keys:
d[decoded_key] = self.deserialize(value, flags), cas
else:
d[key] = self.deserialize(value, flags), cas
elif status == self.STATUS['server_disconnected']:
break
elif status != self.STATUS['key_not_found']:
raise MemcachedException('Code: %d Message: %s' % (status, extra_content), status)
return d
|
Get multiple keys from server.
Since keys are converted to b'' when six.PY3 the keys need to be decoded back
into string . e.g key='test' is read as b'test' and then decoded back to 'test'
This encode/decode does not work when key is already a six.binary_type hence
this function remembers which keys were originally sent as str so that
it only decoded those keys back to string which were sent as string
:param keys: A list of keys to from server.
:type keys: list
:return: A dict with all requested keys.
:rtype: dict
|
entailment
|
def _set_add_replace(self, command, key, value, time, cas=0, compress_level=-1):
"""
Function to set/add/replace commands.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param cas: The CAS value that must be matched for this operation to complete, or 0 for no CAS.
:type cas: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
"""
time = time if time >= 0 else self.MAXIMUM_EXPIRE_TIME
logger.debug('Setting/adding/replacing key %s.', key)
flags, value = self.serialize(value, compress_level=compress_level)
logger.debug('Value bytes %s.', len(value))
if isinstance(value, text_type):
value = value.encode('utf8')
self._send(struct.pack(self.HEADER_STRUCT +
self.COMMANDS[command]['struct'] % (len(key), len(value)),
self.MAGIC['request'],
self.COMMANDS[command]['command'],
len(key), 8, 0, 0, len(key) + len(value) + 8, 0, cas, flags,
time, str_to_bytes(key), value))
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
if status != self.STATUS['success']:
if status == self.STATUS['key_exists']:
return False
elif status == self.STATUS['key_not_found']:
return False
elif status == self.STATUS['server_disconnected']:
return False
raise MemcachedException('Code: %d Message: %s' % (status, extra_content), status)
return True
|
Function to set/add/replace commands.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param cas: The CAS value that must be matched for this operation to complete, or 0 for no CAS.
:type cas: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
|
entailment
|
def set(self, key, value, time, compress_level=-1):
"""
Set a value for a key on server.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
"""
return self._set_add_replace('set', key, value, time, compress_level=compress_level)
|
Set a value for a key on server.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True in case of success and False in case of failure
:rtype: bool
|
entailment
|
def cas(self, key, value, cas, time, compress_level=-1):
"""
Add a key/value to server ony if it does not exist.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True if key is added False if key already exists and has a different CAS
:rtype: bool
"""
# The protocol CAS value 0 means "no cas". Calling cas() with that value is
# probably unintentional. Don't allow it, since it would overwrite the value
# without performing CAS at all.
assert cas != 0, '0 is an invalid CAS value'
# If we get a cas of None, interpret that as "compare against nonexistant and set",
# which is simply Add.
if cas is None:
return self._set_add_replace('add', key, value, time, compress_level=compress_level)
else:
return self._set_add_replace('set', key, value, time, cas=cas, compress_level=compress_level)
|
Add a key/value to server ony if it does not exist.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True if key is added False if key already exists and has a different CAS
:rtype: bool
|
entailment
|
def add(self, key, value, time, compress_level=-1):
"""
Add a key/value to server ony if it does not exist.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True if key is added False if key already exists
:rtype: bool
"""
return self._set_add_replace('add', key, value, time, compress_level=compress_level)
|
Add a key/value to server ony if it does not exist.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True if key is added False if key already exists
:rtype: bool
|
entailment
|
def replace(self, key, value, time, compress_level=-1):
"""
Replace a key/value to server ony if it does exist.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True if key is replace False if key does not exists
:rtype: bool
"""
return self._set_add_replace('replace', key, value, time, compress_level=compress_level)
|
Replace a key/value to server ony if it does exist.
:param key: Key's name
:type key: six.string_types
:param value: A value to be stored on server.
:type value: object
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True if key is replace False if key does not exists
:rtype: bool
|
entailment
|
def set_multi(self, mappings, time=100, compress_level=-1):
"""
Set multiple keys with its values on server.
If a key is a (key, cas) tuple, insert as if cas(key, value, cas) had
been called.
:param mappings: A dict with keys/values
:type mappings: dict
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True
:rtype: bool
"""
mappings = mappings.items()
msg = []
for key, value in mappings:
if isinstance(key, tuple):
key, cas = key
else:
cas = None
if cas == 0:
# Like cas(), if the cas value is 0, treat it as compare-and-set against not
# existing.
command = 'addq'
else:
command = 'setq'
flags, value = self.serialize(value, compress_level=compress_level)
m = struct.pack(self.HEADER_STRUCT +
self.COMMANDS[command]['struct'] % (len(key), len(value)),
self.MAGIC['request'],
self.COMMANDS[command]['command'],
len(key),
8, 0, 0, len(key) + len(value) + 8, 0, cas or 0,
flags, time, str_to_bytes(key), value)
msg.append(m)
m = struct.pack(self.HEADER_STRUCT +
self.COMMANDS['noop']['struct'],
self.MAGIC['request'],
self.COMMANDS['noop']['command'],
0, 0, 0, 0, 0, 0, 0)
msg.append(m)
if six.PY2:
msg = ''.join(msg)
else:
msg = b''.join(msg)
self._send(msg)
opcode = -1
retval = True
while opcode != self.COMMANDS['noop']['command']:
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
if status != self.STATUS['success']:
retval = False
if status == self.STATUS['server_disconnected']:
break
return retval
|
Set multiple keys with its values on server.
If a key is a (key, cas) tuple, insert as if cas(key, value, cas) had
been called.
:param mappings: A dict with keys/values
:type mappings: dict
:param time: Time in seconds that your key will expire.
:type time: int
:param compress_level: How much to compress.
0 = no compression, 1 = fastest, 9 = slowest but best,
-1 = default compression level.
:type compress_level: int
:return: True
:rtype: bool
|
entailment
|
def _incr_decr(self, command, key, value, default, time):
"""
Function which increments and decrements.
:param key: Key's name
:type key: six.string_types
:param value: Number to be (de|in)cremented
:type value: int
:param default: Default value if key does not exist.
:type default: int
:param time: Time in seconds to expire key.
:type time: int
:return: Actual value of the key on server
:rtype: int
"""
time = time if time >= 0 else self.MAXIMUM_EXPIRE_TIME
self._send(struct.pack(self.HEADER_STRUCT +
self.COMMANDS[command]['struct'] % len(key),
self.MAGIC['request'],
self.COMMANDS[command]['command'],
len(key),
20, 0, 0, len(key) + 20, 0, 0, value,
default, time, str_to_bytes(key)))
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
if status not in (self.STATUS['success'], self.STATUS['server_disconnected']):
raise MemcachedException('Code: %d Message: %s' % (status, extra_content), status)
if status == self.STATUS['server_disconnected']:
return 0
return struct.unpack('!Q', extra_content)[0]
|
Function which increments and decrements.
:param key: Key's name
:type key: six.string_types
:param value: Number to be (de|in)cremented
:type value: int
:param default: Default value if key does not exist.
:type default: int
:param time: Time in seconds to expire key.
:type time: int
:return: Actual value of the key on server
:rtype: int
|
entailment
|
def incr(self, key, value, default=0, time=1000000):
"""
Increment a key, if it exists, returns its actual value, if it doesn't, return 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be incremented
:type value: int
:param default: Default value if key does not exist.
:type default: int
:param time: Time in seconds to expire key.
:type time: int
:return: Actual value of the key on server
:rtype: int
"""
return self._incr_decr('incr', key, value, default, time)
|
Increment a key, if it exists, returns its actual value, if it doesn't, return 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be incremented
:type value: int
:param default: Default value if key does not exist.
:type default: int
:param time: Time in seconds to expire key.
:type time: int
:return: Actual value of the key on server
:rtype: int
|
entailment
|
def decr(self, key, value, default=0, time=100):
"""
Decrement a key, if it exists, returns its actual value, if it doesn't, return 0.
Minimum value of decrement return is 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be decremented
:type value: int
:param default: Default value if key does not exist.
:type default: int
:param time: Time in seconds to expire key.
:type time: int
:return: Actual value of the key on server
:rtype: int
"""
return self._incr_decr('decr', key, value, default, time)
|
Decrement a key, if it exists, returns its actual value, if it doesn't, return 0.
Minimum value of decrement return is 0.
:param key: Key's name
:type key: six.string_types
:param value: Number to be decremented
:type value: int
:param default: Default value if key does not exist.
:type default: int
:param time: Time in seconds to expire key.
:type time: int
:return: Actual value of the key on server
:rtype: int
|
entailment
|
def delete(self, key, cas=0):
"""
Delete a key/value from server. If key existed and was deleted, return True.
:param key: Key's name to be deleted
:type key: six.string_types
:param cas: If set, only delete the key if its CAS value matches.
:type cas: int
:return: True in case o success and False in case of failure.
:rtype: bool
"""
logger.debug('Deleting key %s', key)
self._send(struct.pack(self.HEADER_STRUCT +
self.COMMANDS['delete']['struct'] % len(key),
self.MAGIC['request'],
self.COMMANDS['delete']['command'],
len(key), 0, 0, 0, len(key), 0, cas, str_to_bytes(key)))
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
if status == self.STATUS['server_disconnected']:
return False
if status != self.STATUS['success'] and status not in (self.STATUS['key_not_found'], self.STATUS['key_exists']):
raise MemcachedException('Code: %d message: %s' % (status, extra_content), status)
logger.debug('Key deleted %s', key)
return status != self.STATUS['key_exists']
|
Delete a key/value from server. If key existed and was deleted, return True.
:param key: Key's name to be deleted
:type key: six.string_types
:param cas: If set, only delete the key if its CAS value matches.
:type cas: int
:return: True in case o success and False in case of failure.
:rtype: bool
|
entailment
|
def delete_multi(self, keys):
"""
Delete multiple keys from server in one command.
:param keys: A list of keys to be deleted
:type keys: list
:return: True in case of success and False in case of failure.
:rtype: bool
"""
logger.debug('Deleting keys %r', keys)
if six.PY2:
msg = ''
else:
msg = b''
for key in keys:
msg += struct.pack(
self.HEADER_STRUCT +
self.COMMANDS['delete']['struct'] % len(key),
self.MAGIC['request'],
self.COMMANDS['delete']['command'],
len(key), 0, 0, 0, len(key), 0, 0, str_to_bytes(key))
msg += struct.pack(
self.HEADER_STRUCT +
self.COMMANDS['noop']['struct'],
self.MAGIC['request'],
self.COMMANDS['noop']['command'],
0, 0, 0, 0, 0, 0, 0)
self._send(msg)
opcode = -1
retval = True
while opcode != self.COMMANDS['noop']['command']:
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
if status != self.STATUS['success']:
retval = False
if status == self.STATUS['server_disconnected']:
break
return retval
|
Delete multiple keys from server in one command.
:param keys: A list of keys to be deleted
:type keys: list
:return: True in case of success and False in case of failure.
:rtype: bool
|
entailment
|
def flush_all(self, time):
"""
Send a command to server flush|delete all keys.
:param time: Time to wait until flush in seconds.
:type time: int
:return: True in case of success, False in case of failure
:rtype: bool
"""
logger.info('Flushing memcached')
self._send(struct.pack(self.HEADER_STRUCT +
self.COMMANDS['flush']['struct'],
self.MAGIC['request'],
self.COMMANDS['flush']['command'],
0, 4, 0, 0, 4, 0, 0, time))
(magic, opcode, keylen, extlen, datatype, status, bodylen, opaque,
cas, extra_content) = self._get_response()
if status not in (self.STATUS['success'], self.STATUS['server_disconnected']):
raise MemcachedException('Code: %d message: %s' % (status, extra_content), status)
logger.debug('Memcached flushed')
return True
|
Send a command to server flush|delete all keys.
:param time: Time to wait until flush in seconds.
:type time: int
:return: True in case of success, False in case of failure
:rtype: bool
|
entailment
|
def stats(self, key=None):
"""
Return server stats.
:param key: Optional if you want status from a key.
:type key: six.string_types
:return: A dict with server stats
:rtype: dict
"""
# TODO: Stats with key is not working.
if key is not None:
if isinstance(key, text_type):
key = str_to_bytes(key)
keylen = len(key)
packed = struct.pack(
self.HEADER_STRUCT + '%ds' % keylen,
self.MAGIC['request'],
self.COMMANDS['stat']['command'],
keylen, 0, 0, 0, keylen, 0, 0, key)
else:
packed = struct.pack(
self.HEADER_STRUCT,
self.MAGIC['request'],
self.COMMANDS['stat']['command'],
0, 0, 0, 0, 0, 0, 0)
self._send(packed)
value = {}
while True:
response = self._get_response()
status = response[5]
if status == self.STATUS['server_disconnected']:
break
keylen = response[2]
bodylen = response[6]
if keylen == 0 and bodylen == 0:
break
extra_content = response[-1]
key = extra_content[:keylen]
body = extra_content[keylen:bodylen]
value[key.decode() if isinstance(key, bytes) else key] = body
return value
|
Return server stats.
:param key: Optional if you want status from a key.
:type key: six.string_types
:return: A dict with server stats
:rtype: dict
|
entailment
|
def add_shex(self, schema: str) -> "PrefixLibrary":
""" Add a ShExC schema to the library
:param schema: ShExC schema text, URL or file name
:return: prefix library object
"""
if '\n' in schema or '\r' in schema or ' ' in schema:
shex = schema
else:
shex = load_shex_file(schema)
for line in shex.split('\n'):
line = line.strip()
m = re.match(r'PREFIX\s+(\S+):\s+<(\S+)>', line)
if not m:
m = re.match(r"@prefix\s+(\S+):\s+<(\S+)>\s+\.", line)
if m:
setattr(self, m.group(1).upper(), Namespace(m.group(2)))
return self
|
Add a ShExC schema to the library
:param schema: ShExC schema text, URL or file name
:return: prefix library object
|
entailment
|
def add_bindings(self, g: Graph) -> "PrefixLibrary":
""" Add bindings in the library to the graph
:param g: graph to add prefixes to
:return: PrefixLibrary object
"""
for prefix, namespace in self:
g.bind(prefix.lower(), namespace)
return self
|
Add bindings in the library to the graph
:param g: graph to add prefixes to
:return: PrefixLibrary object
|
entailment
|
def add_to_object(self, target: object, override: bool = False) -> int:
"""
Add the bindings to the target object
:param target: target to add to
:param override: override existing bindings if they are of type Namespace
:return: number of items actually added
"""
nret = 0
for k, v in self:
key = k.upper()
exists = hasattr(target, key)
if not exists or (override and isinstance(getattr(target, k), (Namespace, _RDFNamespace))):
setattr(target, k, v)
nret += 1
else:
print(f"Warning: {key} is already defined in namespace {target}. Not overridden")
return nret
|
Add the bindings to the target object
:param target: target to add to
:param override: override existing bindings if they are of type Namespace
:return: number of items actually added
|
entailment
|
def nsname(self, uri: Union[str, URIRef]) -> str:
"""
Return the 'ns:name' format of URI
:param uri: URI to transform
:return: nsname format of URI or straight URI if no mapping
"""
uri = str(uri)
nsuri = ""
prefix = None
for pfx, ns in self:
nss = str(ns)
if uri.startswith(nss) and len(nss) > len(nsuri):
nsuri = nss
prefix = pfx
return (prefix.lower() + ':' + uri[len(nsuri):]) if prefix is not None else uri
|
Return the 'ns:name' format of URI
:param uri: URI to transform
:return: nsname format of URI or straight URI if no mapping
|
entailment
|
def open_spec(f):
"""
:param f: file object with spec data
spec file is a yaml document that specifies which modules
can be loaded.
modules - list of base modules that can be loaded
pths - list of .pth files to load
"""
import ruamel.yaml as yaml
keys = ['modules', 'pths', 'test_import', 'install_hints', 'extra_paths']
data = yaml.safe_load(f)
parsed = dict()
## pattern = re.compile("^\s+|\s*,\s*|\s+$")
for k in keys:
v = data.get(k, [])
# Items are always lists
if isinstance(v, basestring):
parsed[k] = [m for m in re.split(r",| ", v)]
# parsed[k] = re.split(pattern, v)
else:
parsed[k] = v
return parsed
|
:param f: file object with spec data
spec file is a yaml document that specifies which modules
can be loaded.
modules - list of base modules that can be loaded
pths - list of .pth files to load
|
entailment
|
def load(self, schema_file: Union[str, TextIO], schema_location: Optional[str]=None) -> ShExJ.Schema:
""" Load a ShEx Schema from schema_location
:param schema_file: name or file-like object to deserialize
:param schema_location: URL or file name of schema. Used to create the base_location
:return: ShEx Schema represented by schema_location
"""
if isinstance(schema_file, str):
schema_file = self.location_rewrite(schema_file)
self.schema_text = load_shex_file(schema_file)
else:
self.schema_text = schema_file.read()
if self.base_location:
self.root_location = self.base_location
elif schema_location:
self.root_location = os.path.dirname(schema_location) + '/'
else:
self.root_location = None
return self.loads(self.schema_text)
|
Load a ShEx Schema from schema_location
:param schema_file: name or file-like object to deserialize
:param schema_location: URL or file name of schema. Used to create the base_location
:return: ShEx Schema represented by schema_location
|
entailment
|
def loads(self, schema_txt: str) -> ShExJ.Schema:
""" Parse and return schema as a ShExJ Schema
:param schema_txt: ShExC or ShExJ representation of a ShEx Schema
:return: ShEx Schema representation of schema
"""
self.schema_text = schema_txt
if schema_txt.strip()[0] == '{':
# TODO: figure out how to propagate self.base_location into this parse
return cast(ShExJ.Schema, loads(schema_txt, ShExJ))
else:
return generate_shexj.parse(schema_txt, self.base_location)
|
Parse and return schema as a ShExJ Schema
:param schema_txt: ShExC or ShExJ representation of a ShEx Schema
:return: ShEx Schema representation of schema
|
entailment
|
def placeOrder(self, id, contract, order):
"""placeOrder(EClient self, OrderId id, Contract contract, Order order)"""
return _swigibpy.EClient_placeOrder(self, id, contract, order)
|
placeOrder(EClient self, OrderId id, Contract contract, Order order)
|
entailment
|
def reqMktDepth(self, id, contract, numRows, mktDepthOptions):
"""reqMktDepth(EClient self, TickerId id, Contract contract, int numRows, TagValueListSPtr const & mktDepthOptions)"""
return _swigibpy.EClient_reqMktDepth(self, id, contract, numRows, mktDepthOptions)
|
reqMktDepth(EClient self, TickerId id, Contract contract, int numRows, TagValueListSPtr const & mktDepthOptions)
|
entailment
|
def exerciseOptions(self, id, contract, exerciseAction, exerciseQuantity, account, override):
"""exerciseOptions(EClient self, TickerId id, Contract contract, int exerciseAction, int exerciseQuantity, IBString const & account, int override)"""
return _swigibpy.EClient_exerciseOptions(self, id, contract, exerciseAction, exerciseQuantity, account, override)
|
exerciseOptions(EClient self, TickerId id, Contract contract, int exerciseAction, int exerciseQuantity, IBString const & account, int override)
|
entailment
|
def reqScannerSubscription(self, tickerId, subscription, scannerSubscriptionOptions):
"""reqScannerSubscription(EClient self, int tickerId, ScannerSubscription subscription, TagValueListSPtr const & scannerSubscriptionOptions)"""
return _swigibpy.EClient_reqScannerSubscription(self, tickerId, subscription, scannerSubscriptionOptions)
|
reqScannerSubscription(EClient self, int tickerId, ScannerSubscription subscription, TagValueListSPtr const & scannerSubscriptionOptions)
|
entailment
|
def reqFundamentalData(self, reqId, arg3, reportType):
"""reqFundamentalData(EClient self, TickerId reqId, Contract arg3, IBString const & reportType)"""
return _swigibpy.EClient_reqFundamentalData(self, reqId, arg3, reportType)
|
reqFundamentalData(EClient self, TickerId reqId, Contract arg3, IBString const & reportType)
|
entailment
|
def calculateImpliedVolatility(self, reqId, contract, optionPrice, underPrice):
"""calculateImpliedVolatility(EClient self, TickerId reqId, Contract contract, double optionPrice, double underPrice)"""
return _swigibpy.EClient_calculateImpliedVolatility(self, reqId, contract, optionPrice, underPrice)
|
calculateImpliedVolatility(EClient self, TickerId reqId, Contract contract, double optionPrice, double underPrice)
|
entailment
|
def calculateOptionPrice(self, reqId, contract, volatility, underPrice):
"""calculateOptionPrice(EClient self, TickerId reqId, Contract contract, double volatility, double underPrice)"""
return _swigibpy.EClient_calculateOptionPrice(self, reqId, contract, volatility, underPrice)
|
calculateOptionPrice(EClient self, TickerId reqId, Contract contract, double volatility, double underPrice)
|
entailment
|
def reqAccountSummary(self, reqId, groupName, tags):
"""reqAccountSummary(EClient self, int reqId, IBString const & groupName, IBString const & tags)"""
return _swigibpy.EClient_reqAccountSummary(self, reqId, groupName, tags)
|
reqAccountSummary(EClient self, int reqId, IBString const & groupName, IBString const & tags)
|
entailment
|
def eConnect(self, host, port, clientId=0, extraAuth=False):
"""eConnect(EClientSocketBase self, char const * host, unsigned int port, int clientId=0, bool extraAuth=False) -> bool"""
return _swigibpy.EClientSocketBase_eConnect(self, host, port, clientId, extraAuth)
|
eConnect(EClientSocketBase self, char const * host, unsigned int port, int clientId=0, bool extraAuth=False) -> bool
|
entailment
|
def reqMktData(self, id, contract, genericTicks, snapshot, mktDataOptions):
"""reqMktData(EClientSocketBase self, TickerId id, Contract contract, IBString const & genericTicks, bool snapshot, TagValueListSPtr const & mktDataOptions)"""
return _swigibpy.EClientSocketBase_reqMktData(self, id, contract, genericTicks, snapshot, mktDataOptions)
|
reqMktData(EClientSocketBase self, TickerId id, Contract contract, IBString const & genericTicks, bool snapshot, TagValueListSPtr const & mktDataOptions)
|
entailment
|
def placeOrder(self, id, contract, order):
"""placeOrder(EClientSocketBase self, OrderId id, Contract contract, Order order)"""
return _swigibpy.EClientSocketBase_placeOrder(self, id, contract, order)
|
placeOrder(EClientSocketBase self, OrderId id, Contract contract, Order order)
|
entailment
|
def reqMktDepth(self, tickerId, contract, numRows, mktDepthOptions):
"""reqMktDepth(EClientSocketBase self, TickerId tickerId, Contract contract, int numRows, TagValueListSPtr const & mktDepthOptions)"""
return _swigibpy.EClientSocketBase_reqMktDepth(self, tickerId, contract, numRows, mktDepthOptions)
|
reqMktDepth(EClientSocketBase self, TickerId tickerId, Contract contract, int numRows, TagValueListSPtr const & mktDepthOptions)
|
entailment
|
def reqHistoricalData(self, id, contract, endDateTime, durationStr, barSizeSetting, whatToShow, useRTH, formatDate, chartOptions):
"""reqHistoricalData(EClientSocketBase self, TickerId id, Contract contract, IBString const & endDateTime, IBString const & durationStr, IBString const & barSizeSetting, IBString const & whatToShow, int useRTH, int formatDate, TagValueListSPtr const & chartOptions)"""
return _swigibpy.EClientSocketBase_reqHistoricalData(self, id, contract, endDateTime, durationStr, barSizeSetting, whatToShow, useRTH, formatDate, chartOptions)
|
reqHistoricalData(EClientSocketBase self, TickerId id, Contract contract, IBString const & endDateTime, IBString const & durationStr, IBString const & barSizeSetting, IBString const & whatToShow, int useRTH, int formatDate, TagValueListSPtr const & chartOptions)
|
entailment
|
def exerciseOptions(self, tickerId, contract, exerciseAction, exerciseQuantity, account, override):
"""exerciseOptions(EClientSocketBase self, TickerId tickerId, Contract contract, int exerciseAction, int exerciseQuantity, IBString const & account, int override)"""
return _swigibpy.EClientSocketBase_exerciseOptions(self, tickerId, contract, exerciseAction, exerciseQuantity, account, override)
|
exerciseOptions(EClientSocketBase self, TickerId tickerId, Contract contract, int exerciseAction, int exerciseQuantity, IBString const & account, int override)
|
entailment
|
def reqRealTimeBars(self, id, contract, barSize, whatToShow, useRTH, realTimeBarsOptions):
"""reqRealTimeBars(EClientSocketBase self, TickerId id, Contract contract, int barSize, IBString const & whatToShow, bool useRTH, TagValueListSPtr const & realTimeBarsOptions)"""
return _swigibpy.EClientSocketBase_reqRealTimeBars(self, id, contract, barSize, whatToShow, useRTH, realTimeBarsOptions)
|
reqRealTimeBars(EClientSocketBase self, TickerId id, Contract contract, int barSize, IBString const & whatToShow, bool useRTH, TagValueListSPtr const & realTimeBarsOptions)
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.