signature stringlengths 8 3.44k | body stringlengths 0 1.41M | docstring stringlengths 1 122k | id stringlengths 5 17 |
|---|---|---|---|
def remove_end_optionals(ir_blocks): | new_ir_blocks = []<EOL>for block in ir_blocks:<EOL><INDENT>if not isinstance(block, EndOptional):<EOL><INDENT>new_ir_blocks.append(block)<EOL><DEDENT><DEDENT>return new_ir_blocks<EOL> | Return a list of IR blocks as a copy of the original, with EndOptional blocks removed. | f12670:m6 |
def validate(self): | super(OutputContextVertex, self).validate()<EOL>if self.location.field is not None:<EOL><INDENT>raise ValueError(u'<STR_LIT>'.format(self.location))<EOL><DEDENT> | Validate that the OutputContextVertex is correctly representable. | f12670:c0:m0 |
def to_match(self): | self.validate()<EOL>mark_name, field_name = self.location.get_location_name()<EOL>validate_safe_string(mark_name)<EOL>if field_name is not None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(field_name, self.location))<EOL><DEDENT>return mark_name<EOL> | Return a unicode object with the MATCH representation of this expression. | f12670:c0:m1 |
def get_unique_directives(ast): | if not ast.directives:<EOL><INDENT>return dict()<EOL><DEDENT>result = dict()<EOL>for directive_obj in ast.directives:<EOL><INDENT>directive_name = directive_obj.name.value<EOL>if directive_name in ALLOWED_DUPLICATED_DIRECTIVES:<EOL><INDENT>pass <EOL><DEDENT>elif directive_name in result:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(directive_name, ast.directives))<EOL><DEDENT>else:<EOL><INDENT>result[directive_name] = directive_obj<EOL><DEDENT><DEDENT>return result<EOL> | Return a dict of directive name to directive object for the given AST node.
Any directives that are allowed to exist more than once on any AST node are ignored.
For any directives that can only exist up to once, we verify that they are not duplicated
raising GraphQLCompilationError in case we find them more than once on the AST node.
Args:
ast: GraphQL AST node, obtained from the graphql library
Returns:
dict of string to directive object | f12671:m0 |
def get_local_filter_directives(ast, current_schema_type, inner_vertex_fields): | result = []<EOL>if ast.directives: <EOL><INDENT>for directive_obj in ast.directives:<EOL><INDENT>if directive_obj.name.value == '<STR_LIT>':<EOL><INDENT>filtered_field_name = get_ast_field_name_or_none(ast)<EOL>if is_filter_with_outer_scope_vertex_field_operator(directive_obj):<EOL><INDENT>if not is_vertex_field_type(current_schema_type):<EOL><INDENT>raise GraphQLCompilationError(<EOL>u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(directive_obj, current_schema_type, filtered_field_name))<EOL><DEDENT>elif isinstance(ast, InlineFragment):<EOL><INDENT>raise GraphQLCompilationError(<EOL>u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(directive_obj, current_schema_type))<EOL><DEDENT>else:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>else:<EOL><INDENT>operation = FilterOperationInfo(<EOL>directive=directive_obj, field_name=filtered_field_name,<EOL>field_type=current_schema_type, field_ast=ast)<EOL>result.append(operation)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>if inner_vertex_fields: <EOL><INDENT>for inner_ast in inner_vertex_fields:<EOL><INDENT>for directive_obj in inner_ast.directives:<EOL><INDENT>if is_filter_with_outer_scope_vertex_field_operator(directive_obj):<EOL><INDENT>filtered_field_name = get_ast_field_name(inner_ast)<EOL>filtered_field_type = get_vertex_field_type(<EOL>current_schema_type, filtered_field_name)<EOL>operation = FilterOperationInfo(<EOL>directive=directive_obj, field_name=filtered_field_name,<EOL>field_type=filtered_field_type, field_ast=inner_ast)<EOL>result.append(operation)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>return result<EOL> | Get all filter directives that apply to the current field.
This helper abstracts away the fact that some vertex field filtering operators apply on the
inner scope (the scope of the inner vertex field on which they are applied), whereas some apply
on the outer scope (the scope that contains the inner vertex field).
See filters.py for more information.
Args:
ast: a GraphQL AST object for which to load local filters, from the graphql library
current_schema_type: GraphQLType, the schema type at the current AST location
inner_vertex_fields: a list of inner AST objects representing vertex fields that are within
the current field. If currently processing a property field (i.e.
there are no inner vertex fields), this argument may be set to None.
Returns:
list of FilterOperationInfo objects.
If the field_ast field is of type InlineFragment, the field_name field is set to None. | f12671:m1 |
def validate_property_directives(directives): | for directive_name in six.iterkeys(directives):<EOL><INDENT>if directive_name in VERTEX_ONLY_DIRECTIVES:<EOL><INDENT>raise GraphQLCompilationError(<EOL>u'<STR_LIT>'.format(directive_name))<EOL><DEDENT><DEDENT> | Validate the directives that appear at a property field. | f12671:m2 |
def validate_vertex_directives(directives): | for directive_name in six.iterkeys(directives):<EOL><INDENT>if directive_name in PROPERTY_ONLY_DIRECTIVES:<EOL><INDENT>raise GraphQLCompilationError(<EOL>u'<STR_LIT>'.format(directive_name))<EOL><DEDENT><DEDENT> | Validate the directives that appear at a vertex field. | f12671:m3 |
def validate_root_vertex_directives(root_ast): | directives_present_at_root = set()<EOL>for directive_obj in root_ast.directives:<EOL><INDENT>directive_name = directive_obj.name.value<EOL>if is_filter_with_outer_scope_vertex_field_operator(directive_obj):<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(directive_obj))<EOL><DEDENT>directives_present_at_root.add(directive_name)<EOL><DEDENT>disallowed_directives = directives_present_at_root & VERTEX_DIRECTIVES_PROHIBITED_ON_ROOT<EOL>if disallowed_directives:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(disallowed_directives))<EOL><DEDENT> | Validate the directives that appear at the root vertex field. | f12671:m4 |
def validate_vertex_field_directive_interactions(parent_location, vertex_field_name, directives): | fold_directive = directives.get('<STR_LIT>', None)<EOL>optional_directive = directives.get('<STR_LIT>', None)<EOL>output_source_directive = directives.get('<STR_LIT>', None)<EOL>recurse_directive = directives.get('<STR_LIT>', None)<EOL>if fold_directive and optional_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if fold_directive and output_source_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if fold_directive and recurse_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if optional_directive and output_source_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if optional_directive and recurse_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT> | Ensure that the specified vertex field directives are not mutually disallowed. | f12671:m5 |
def validate_vertex_field_directive_in_context(parent_location, vertex_field_name,<EOL>directives, context): | fold_directive = directives.get('<STR_LIT>', None)<EOL>optional_directive = directives.get('<STR_LIT>', None)<EOL>recurse_directive = directives.get('<STR_LIT>', None)<EOL>output_source_directive = directives.get('<STR_LIT>', None)<EOL>fold_context = '<STR_LIT>' in context<EOL>optional_context = '<STR_LIT>' in context<EOL>output_source_context = '<STR_LIT>' in context<EOL>if fold_directive and fold_context:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if optional_directive and fold_context:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if output_source_directive and fold_context:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if recurse_directive and fold_context:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if output_source_context and not fold_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if optional_context and fold_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT>if optional_context and output_source_directive:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(parent_location, vertex_field_name))<EOL><DEDENT> | Ensure that the specified vertex field directives are allowed in the current context. | f12671:m6 |
def compile_graphql_to_match(schema, graphql_string, type_equivalence_hints=None): | lowering_func = ir_lowering_match.lower_ir<EOL>query_emitter_func = emit_match.emit_code_from_ir<EOL>return _compile_graphql_generic(<EOL>MATCH_LANGUAGE, lowering_func, query_emitter_func,<EOL>schema, graphql_string, type_equivalence_hints, None)<EOL> | Compile the GraphQL input using the schema into a MATCH query and associated metadata.
Args:
schema: GraphQL schema object describing the schema of the graph to be queried
graphql_string: the GraphQL query to compile to MATCH, as a string
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
a CompilationResult object | f12672:m0 |
def compile_graphql_to_gremlin(schema, graphql_string, type_equivalence_hints=None): | lowering_func = ir_lowering_gremlin.lower_ir<EOL>query_emitter_func = emit_gremlin.emit_code_from_ir<EOL>return _compile_graphql_generic(<EOL>GREMLIN_LANGUAGE, lowering_func, query_emitter_func,<EOL>schema, graphql_string, type_equivalence_hints, None)<EOL> | Compile the GraphQL input using the schema into a Gremlin query and associated metadata.
Args:
schema: GraphQL schema object describing the schema of the graph to be queried
graphql_string: the GraphQL query to compile to Gremlin, as a string
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
a CompilationResult object | f12672:m1 |
def compile_graphql_to_sql(schema, graphql_string, compiler_metadata, type_equivalence_hints=None): | lowering_func = ir_lowering_sql.lower_ir<EOL>query_emitter_func = emit_sql.emit_code_from_ir<EOL>return _compile_graphql_generic(<EOL>SQL_LANGUAGE, lowering_func, query_emitter_func,<EOL>schema, graphql_string, type_equivalence_hints, compiler_metadata)<EOL> | Compile the GraphQL input using the schema into a SQL query and associated metadata.
Args:
schema: GraphQL schema object describing the schema of the graph to be queried
graphql_string: the GraphQL query to compile to SQL, as a string
compiler_metadata: SQLAlchemy metadata containing tables for use during compilation.
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
a CompilationResult object | f12672:m2 |
def _compile_graphql_generic(language, lowering_func, query_emitter_func,<EOL>schema, graphql_string, type_equivalence_hints, compiler_metadata): | ir_and_metadata = graphql_to_ir(<EOL>schema, graphql_string, type_equivalence_hints=type_equivalence_hints)<EOL>lowered_ir_blocks = lowering_func(<EOL>ir_and_metadata.ir_blocks, ir_and_metadata.query_metadata_table,<EOL>type_equivalence_hints=type_equivalence_hints)<EOL>query = query_emitter_func(lowered_ir_blocks, compiler_metadata)<EOL>return CompilationResult(<EOL>query=query,<EOL>language=language,<EOL>output_metadata=ir_and_metadata.output_metadata,<EOL>input_metadata=ir_and_metadata.input_metadata)<EOL> | Compile the GraphQL input, lowering and emitting the query using the given functions.
Args:
language: string indicating the target language to compile to.
lowering_func: Function to lower the compiler IR into a compatible form for the target
language backend.
query_emitter_func: Function that emits a query in the target language from the lowered IR.
schema: GraphQL schema object describing the schema of the graph to be queried.
graphql_string: the GraphQL query to compile to the target language, as a string.
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
compiler_metadata: optional target specific metadata for usage by the query_emitter_func.
Returns:
a CompilationResult object | f12672:m3 |
def _per_location_tuple_to_step(ir_tuple): | root_block = ir_tuple[<NUM_LIT:0>]<EOL>if not isinstance(root_block, root_block_types):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(root_block, ir_tuple))<EOL><DEDENT>coerce_type_block = None<EOL>where_block = None<EOL>as_block = None<EOL>for block in ir_tuple[<NUM_LIT:1>:]:<EOL><INDENT>if isinstance(block, CoerceType):<EOL><INDENT>if coerce_type_block is not None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(block, coerce_type_block, ir_tuple))<EOL><DEDENT>coerce_type_block = block<EOL><DEDENT>elif isinstance(block, MarkLocation):<EOL><INDENT>if as_block is not None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(block, as_block, ir_tuple))<EOL><DEDENT>as_block = block<EOL><DEDENT>elif isinstance(block, Filter):<EOL><INDENT>if where_block is not None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(block, as_block, ir_tuple))<EOL><DEDENT>if as_block is not None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(block, where_block, ir_tuple))<EOL><DEDENT>where_block = block<EOL><DEDENT>else:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(block, ir_tuple))<EOL><DEDENT><DEDENT>step = MatchStep(root_block=root_block,<EOL>coerce_type_block=coerce_type_block,<EOL>where_block=where_block,<EOL>as_block=as_block)<EOL>if isinstance(root_block, Backtrack):<EOL><INDENT>if where_block is not None or coerce_type_block is not None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(step))<EOL><DEDENT><DEDENT>return step<EOL> | Construct a MatchStep from a tuple of its constituent blocks. | f12673:m0 |
def _split_ir_into_match_steps(pruned_ir_blocks): | output = []<EOL>current_tuple = None<EOL>for block in pruned_ir_blocks:<EOL><INDENT>if isinstance(block, OutputSource):<EOL><INDENT>continue<EOL><DEDENT>elif isinstance(block, root_block_types):<EOL><INDENT>if current_tuple is not None:<EOL><INDENT>output.append(current_tuple)<EOL><DEDENT>current_tuple = (block,)<EOL><DEDENT>elif isinstance(block, (CoerceType, Filter, MarkLocation)):<EOL><INDENT>current_tuple += (block,)<EOL><DEDENT>else:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(block, pruned_ir_blocks))<EOL><DEDENT><DEDENT>if current_tuple is None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(pruned_ir_blocks))<EOL><DEDENT>output.append(current_tuple)<EOL>return [_per_location_tuple_to_step(x) for x in output]<EOL> | Split a list of IR blocks into per-location MATCH steps.
Args:
pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step.
Returns:
list of MatchStep namedtuples, each of which contains all basic blocks that correspond
to a single MATCH step. | f12673:m1 |
def _split_match_steps_into_match_traversals(match_steps): | output = []<EOL>current_list = None<EOL>for step in match_steps:<EOL><INDENT>if isinstance(step.root_block, QueryRoot):<EOL><INDENT>if current_list is not None:<EOL><INDENT>output.append(current_list)<EOL><DEDENT>current_list = [step]<EOL><DEDENT>else:<EOL><INDENT>current_list.append(step)<EOL><DEDENT><DEDENT>if current_list is None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(match_steps))<EOL><DEDENT>output.append(current_list)<EOL>return output<EOL> | Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal. | f12673:m2 |
def _extract_global_operations(ir_blocks_except_output_and_folds): | global_operation_blocks = []<EOL>remaining_ir_blocks = []<EOL>in_global_operations_scope = False<EOL>for block in ir_blocks_except_output_and_folds:<EOL><INDENT>if isinstance(block, (ConstructResult, Fold, Unfold)):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(type(block).__name__, ir_blocks_except_output_and_folds))<EOL><DEDENT>elif isinstance(block, GlobalOperationsStart):<EOL><INDENT>in_global_operations_scope = True<EOL><DEDENT>elif in_global_operations_scope:<EOL><INDENT>global_operation_blocks.append(block)<EOL><DEDENT>else:<EOL><INDENT>remaining_ir_blocks.append(block)<EOL><DEDENT><DEDENT>return global_operation_blocks, remaining_ir_blocks<EOL> | Extract all global operation blocks (all blocks following GlobalOperationsStart).
Args:
ir_blocks_except_output_and_folds: list of IR blocks (excluding ConstructResult and all
fold blocks), to extract global operations from
Returns:
tuple (global_operation_blocks, remaining_ir_blocks):
- global_operation_blocks: list of IR blocks following a GlobalOperationsStart block if it
exists, and an empty list otherwise
- remaining_ir_blocks: list of IR blocks excluding GlobalOperationsStart and all global
operation blocks | f12673:m3 |
def convert_to_match_query(ir_blocks): | output_block = ir_blocks[-<NUM_LIT:1>]<EOL>if not isinstance(output_block, ConstructResult):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(output_block, ir_blocks))<EOL><DEDENT>ir_except_output = ir_blocks[:-<NUM_LIT:1>]<EOL>folds, ir_except_output_and_folds = extract_folds_from_ir_blocks(ir_except_output)<EOL>global_operation_ir_blocks_tuple = _extract_global_operations(ir_except_output_and_folds)<EOL>global_operation_blocks, pruned_ir_blocks = global_operation_ir_blocks_tuple<EOL>if len(global_operation_blocks) > <NUM_LIT:1>:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(global_operation_blocks, ir_blocks))<EOL><DEDENT>if len(global_operation_blocks) == <NUM_LIT:1>:<EOL><INDENT>if not isinstance(global_operation_blocks[<NUM_LIT:0>], Filter):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>.format(global_operation_blocks[<NUM_LIT:0>]))<EOL><DEDENT>where_block = global_operation_blocks[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>where_block = None<EOL><DEDENT>match_steps = _split_ir_into_match_steps(pruned_ir_blocks)<EOL>match_traversals = _split_match_steps_into_match_traversals(match_steps)<EOL>return MatchQuery(<EOL>match_traversals=match_traversals,<EOL>folds=folds,<EOL>output_block=output_block,<EOL>where_block=where_block,<EOL>)<EOL> | Convert the list of IR blocks into a MatchQuery object, for easier manipulation. | f12673:m4 |
def get_only_element_from_collection(one_element_collection): | if len(one_element_collection) != <NUM_LIT:1>:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>.format(one_element_collection))<EOL><DEDENT>return funcy.first(one_element_collection)<EOL> | Assert that the collection has exactly one element, then return that element. | f12675:m0 |
def get_ast_field_name(ast): | replacements = {<EOL>TYPENAME_META_FIELD_NAME: '<STR_LIT>'<EOL>}<EOL>base_field_name = ast.name.value<EOL>normalized_name = replacements.get(base_field_name, base_field_name)<EOL>return normalized_name<EOL> | Return the normalized field name for the given AST node. | f12675:m1 |
def get_ast_field_name_or_none(ast): | if isinstance(ast, InlineFragment):<EOL><INDENT>return None<EOL><DEDENT>return get_ast_field_name(ast)<EOL> | Return the field name for the AST node, or None if the AST is an InlineFragment. | f12675:m2 |
def get_field_type_from_schema(schema_type, field_name): | if field_name == '<STR_LIT>':<EOL><INDENT>return GraphQLString<EOL><DEDENT>else:<EOL><INDENT>if field_name not in schema_type.fields:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(field_name, schema_type))<EOL><DEDENT>return schema_type.fields[field_name].type<EOL><DEDENT> | Return the type of the field in the given type, accounting for field name normalization. | f12675:m3 |
def get_vertex_field_type(current_schema_type, vertex_field_name): | <EOL>if not is_vertex_field_name(vertex_field_name):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(current_schema_type, vertex_field_name))<EOL><DEDENT>raw_field_type = get_field_type_from_schema(current_schema_type, vertex_field_name)<EOL>if not isinstance(strip_non_null_from_type(raw_field_type), GraphQLList):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(current_schema_type, vertex_field_name,<EOL>raw_field_type))<EOL><DEDENT>return raw_field_type.of_type<EOL> | Return the type of the vertex within the specified vertex field name of the given type. | f12675:m4 |
def strip_non_null_from_type(graphql_type): | while isinstance(graphql_type, GraphQLNonNull):<EOL><INDENT>graphql_type = graphql_type.of_type<EOL><DEDENT>return graphql_type<EOL> | Return the GraphQL type stripped of its GraphQLNonNull annotations. | f12675:m5 |
def get_edge_direction_and_name(vertex_field_name): | edge_direction = None<EOL>edge_name = None<EOL>if vertex_field_name.startswith(OUTBOUND_EDGE_FIELD_PREFIX):<EOL><INDENT>edge_direction = OUTBOUND_EDGE_DIRECTION<EOL>edge_name = vertex_field_name[len(OUTBOUND_EDGE_FIELD_PREFIX):]<EOL><DEDENT>elif vertex_field_name.startswith(INBOUND_EDGE_FIELD_PREFIX):<EOL><INDENT>edge_direction = INBOUND_EDGE_DIRECTION<EOL>edge_name = vertex_field_name[len(INBOUND_EDGE_FIELD_PREFIX):]<EOL><DEDENT>else:<EOL><INDENT>raise AssertionError(u'<STR_LIT>', vertex_field_name)<EOL><DEDENT>validate_safe_string(edge_name)<EOL>return edge_direction, edge_name<EOL> | Get the edge direction and name from a non-root vertex field name. | f12675:m6 |
def is_vertex_field_name(field_name): | return (<EOL>field_name.startswith(OUTBOUND_EDGE_FIELD_PREFIX) or<EOL>field_name.startswith(INBOUND_EDGE_FIELD_PREFIX)<EOL>)<EOL> | Return True if the field's name indicates it is a non-root vertex field. | f12675:m7 |
def is_vertex_field_type(graphql_type): | <EOL>underlying_type = strip_non_null_from_type(graphql_type)<EOL>return isinstance(underlying_type, (GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType))<EOL> | Return True if the argument is a vertex field type, and False otherwise. | f12675:m8 |
def is_graphql_type(graphql_type): | <EOL>return is_type(graphql_type)<EOL> | Return True if the argument is a GraphQL type object, and False otherwise. | f12675:m9 |
def ensure_unicode_string(value): | if not isinstance(value, six.string_types):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(value))<EOL><DEDENT>return six.text_type(value)<EOL> | Ensure the value is a string, and return it as unicode. | f12675:m10 |
def get_uniquely_named_objects_by_name(object_list): | if not object_list:<EOL><INDENT>return dict()<EOL><DEDENT>result = dict()<EOL>for obj in object_list:<EOL><INDENT>name = obj.name.value<EOL>if name in result:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(name, object_list))<EOL><DEDENT>result[name] = obj<EOL><DEDENT>return result<EOL> | Return dict of name -> object pairs from a list of objects with unique names.
Args:
object_list: list of objects, each X of which has a unique name accessible as X.name.value
Returns:
dict, { X.name.value: X for x in object_list }
If the list is empty or None, returns an empty dict. | f12675:m11 |
def safe_quoted_string(value): | validate_safe_string(value)<EOL>return u'<STR_LIT>'.format(value)<EOL> | Return the provided string, surrounded by single quotes. Unsafe strings cause exceptions. | f12675:m12 |
def validate_safe_string(value): | <EOL>legal_strings_with_special_chars = frozenset({'<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT:%>'})<EOL>if not isinstance(value, six.string_types):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(<EOL>type(value).__name__, value))<EOL><DEDENT>if not value:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>')<EOL><DEDENT>if value[<NUM_LIT:0>] in string.digits:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'.format(value))<EOL><DEDENT>if not set(value).issubset(VARIABLE_ALLOWED_CHARS) andvalue not in legal_strings_with_special_chars:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'.format(value))<EOL><DEDENT> | Ensure the provided string does not have illegal characters. | f12675:m13 |
def validate_output_name(value): | internal_name_prefix = u'<STR_LIT>'<EOL>if value.startswith(internal_name_prefix):<EOL><INDENT>raise GraphQLCompilationError(<EOL>u'<STR_LIT>')<EOL><DEDENT> | Ensure that the provided string is valid for use as an output name. | f12675:m14 |
def validate_edge_direction(edge_direction): | if not isinstance(edge_direction, six.string_types):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(<EOL>type(edge_direction), edge_direction))<EOL><DEDENT>if edge_direction not in ALLOWED_EDGE_DIRECTIONS:<EOL><INDENT>raise ValueError(u'<STR_LIT>'.format(edge_direction))<EOL><DEDENT> | Ensure the provided edge direction is either "in" or "out". | f12675:m15 |
def validate_marked_location(location): | if not isinstance(location, (Location, FoldScopeLocation)):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(<EOL>type(location).__name__, location))<EOL><DEDENT>if location.field is not None:<EOL><INDENT>raise GraphQLCompilationError(u'<STR_LIT>'.format(location))<EOL><DEDENT> | Validate that a Location object is safe for marking, and not at a field. | f12675:m16 |
def _create_fold_path_component(edge_direction, edge_name): | return ((edge_direction, edge_name),)<EOL> | Return a tuple representing a fold_path component of a FoldScopeLocation. | f12675:m17 |
def invert_dict(invertible_dict): | inverted = {}<EOL>for k, v in six.iteritems(invertible_dict):<EOL><INDENT>if not isinstance(v, Hashable):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(<EOL>k, type(v).__name__))<EOL><DEDENT>if v in inverted:<EOL><INDENT>raise TypeError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(<EOL>inverted[v], k))<EOL><DEDENT>inverted[v] = k<EOL><DEDENT>return inverted<EOL> | Invert a dict. A dict is invertible if values are unique and hashable. | f12675:m18 |
def is_variable_argument(argument): | return argument.startswith('<STR_LIT:$>')<EOL> | Return True if the directive argument is a runtime variable, and False otherwise. | f12675:m19 |
def is_tag_argument(argument): | return argument.startswith('<STR_LIT:%>')<EOL> | Return True if the directive argument is a tagged value, and False otherwise. | f12675:m20 |
def get_directive_argument_name(argument): | return argument[<NUM_LIT:1>:]<EOL> | Return the name of a variable or tag argument without the $ or %. | f12675:m21 |
@abstractmethod<EOL><INDENT>def navigate_to_field(self, field):<DEDENT> | raise NotImplementedError()<EOL> | Return a new BaseLocation object at the specified field of the current BaseLocation. | f12675:c0:m0 |
@abstractmethod<EOL><INDENT>def navigate_to_subpath(self, child):<DEDENT> | raise NotImplementedError()<EOL> | Return a new BaseLocation after a traversal to the specified child location. | f12675:c0:m1 |
@abstractmethod<EOL><INDENT>def get_location_name(self):<DEDENT> | raise NotImplementedError()<EOL> | Return a tuple of a unique name of the location, and the current field name (or None). | f12675:c0:m2 |
@abstractmethod<EOL><INDENT>def _check_if_object_of_same_type_is_smaller(self, other):<DEDENT> | raise NotImplementedError()<EOL> | Return True if the other object is smaller than self in the total ordering. | f12675:c0:m3 |
@abstractmethod<EOL><INDENT>def __eq__(self, other):<DEDENT> | raise NotImplementedError()<EOL> | Return True if the BaseLocations are equal, and False otherwise. | f12675:c0:m4 |
def __lt__(self, other): | if isinstance(self, Location) and isinstance(other, Location):<EOL><INDENT>return self._check_if_object_of_same_type_is_smaller(other)<EOL><DEDENT>elif isinstance(self, FoldScopeLocation) and isinstance(other, FoldScopeLocation):<EOL><INDENT>return self._check_if_object_of_same_type_is_smaller(other)<EOL><DEDENT>elif isinstance(self, Location) and isinstance(other, FoldScopeLocation):<EOL><INDENT>if self != other.base_location:<EOL><INDENT>return self < other.base_location<EOL><DEDENT>return False<EOL><DEDENT>elif isinstance(self, FoldScopeLocation) and isinstance(other, Location):<EOL><INDENT>return not other <= self<EOL><DEDENT>else:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(type(self).__name__, type(other).__name__, self, other))<EOL><DEDENT> | Return True if the other object is smaller than self in the total ordering. | f12675:c0:m5 |
def __init__(self, query_path, field=None, visit_counter=<NUM_LIT:1>): | if not isinstance(query_path, tuple):<EOL><INDENT>raise TypeError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(type(query_path).__name__, query_path))<EOL><DEDENT>if field and not isinstance(field, six.string_types):<EOL><INDENT>raise TypeError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(type(field).__name__, field))<EOL><DEDENT>self.query_path = query_path<EOL>self.field = field<EOL>self.visit_counter = visit_counter<EOL> | Create a new Location object.
Used to uniquely identify locations in the graph traversal, with three components.
- The 'query_path' is a tuple containing the in-order nested set of vertex fields where
the Location is.
- The 'field' is a string set to the name of a property field, if the
Location is at a property field, or None otherwise.
- The 'visit_counter' is a counter that disambiguates between consecutive,
but semantically different, visits to the same 'query_path' and 'field'.
In the following example, note that the Location objects for 'X' and 'Y'
have identical values for both 'query_path' (empty tuple) and 'field' (None),
but are not semantically equivalent:
g.as('X').out('foo').back('X').as('Y').out('bar').optional('Y')
The difference between 'X' and 'Y' is in the .optional() statement --
.optional('Y') says that the 'bar' edge is optional, and .optional('X') says that
both 'foo' and 'bar' are optional. Hence, the Location objects for 'X' and 'Y'
should have different 'visit_counter' values.
Args:
query_path: tuple of strings, in-order, one for each vertex in the
current nested position in the graph
field: string if at a field in a vertex, or None if at a vertex
visit_counter: int, number that allows semantic disambiguation of otherwise equivalent
Location objects -- see the explanation above.
Returns:
new Location object with the provided properties | f12675:c1:m0 |
def navigate_to_field(self, field): | if self.field:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(self))<EOL><DEDENT>return Location(self.query_path, field=field, visit_counter=self.visit_counter)<EOL> | Return a new Location object at the specified field of the current Location's vertex. | f12675:c1:m1 |
def at_vertex(self): | if not self.field:<EOL><INDENT>return self<EOL><DEDENT>return Location(self.query_path, field=None, visit_counter=self.visit_counter)<EOL> | Get the Location ignoring its field component. | f12675:c1:m2 |
def navigate_to_subpath(self, child): | if not isinstance(child, six.string_types):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(child))<EOL><DEDENT>if self.field:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(self))<EOL><DEDENT>return Location(self.query_path + (child,))<EOL> | Return a new Location object at a child vertex of the current Location's vertex. | f12675:c1:m3 |
def navigate_to_fold(self, folded_child): | if not isinstance(folded_child, six.string_types):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(folded_child))<EOL><DEDENT>if self.field:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(self))<EOL><DEDENT>edge_direction, edge_name = get_edge_direction_and_name(folded_child)<EOL>fold_path = _create_fold_path_component(edge_direction, edge_name)<EOL>return FoldScopeLocation(self, fold_path)<EOL> | Return a new FoldScopeLocation for the folded child vertex of the current Location. | f12675:c1:m4 |
def revisit(self): | if self.field:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(self))<EOL><DEDENT>return Location(self.query_path, field=None, visit_counter=(self.visit_counter + <NUM_LIT:1>))<EOL> | Return a new Location object with an incremented 'visit_counter'. | f12675:c1:m5 |
def get_location_name(self): | mark_name = u'<STR_LIT>'.join(self.query_path) + u'<STR_LIT>' + six.text_type(self.visit_counter)<EOL>return (mark_name, self.field)<EOL> | Return a tuple of a unique name of the Location, and the current field name (or None). | f12675:c1:m6 |
def is_revisited_at(self, other_location): | <EOL>return (isinstance(other_location, Location) and<EOL>self.query_path == other_location.query_path and<EOL>self.visit_counter < other_location.visit_counter)<EOL> | Return True if other_location is a revisit of this location, and False otherwise. | f12675:c1:m7 |
def __str__(self): | return u'<STR_LIT>'.format(self.query_path, self.field, self.visit_counter)<EOL> | Return a human-readable str representation of the Location object. | f12675:c1:m8 |
def __repr__(self): | return self.__str__()<EOL> | Return a human-readable str representation of the Location object. | f12675:c1:m9 |
def __eq__(self, other): | return (type(self) == type(other) and<EOL>self.query_path == other.query_path and<EOL>self.field == other.field and<EOL>self.visit_counter == other.visit_counter)<EOL> | Return True if the Locations are equal, and False otherwise. | f12675:c1:m10 |
def __ne__(self, other): | return not self.__eq__(other)<EOL> | Check another object for non-equality against this one. | f12675:c1:m11 |
def _check_if_object_of_same_type_is_smaller(self, other): | if not isinstance(other, Location):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>.format(type(other).__name__, other))<EOL><DEDENT>if len(self.query_path) != len(other.query_path):<EOL><INDENT>return len(self.query_path) < len(other.query_path)<EOL><DEDENT>if self.query_path != other.query_path:<EOL><INDENT>return self.query_path < other.query_path<EOL><DEDENT>if self.visit_counter != other.visit_counter:<EOL><INDENT>return self.visit_counter < other.visit_counter<EOL><DEDENT>if self.field is None:<EOL><INDENT>return other.field is not None<EOL><DEDENT>if other.field is None:<EOL><INDENT>return False<EOL><DEDENT>return self.field < other.field<EOL> | Return True if the other object is smaller than self in the total ordering. | f12675:c1:m12 |
def __hash__(self): | return hash(self.query_path) ^ hash(self.field) ^ hash(self.visit_counter)<EOL> | Return the object's hash value. | f12675:c1:m13 |
def __init__(self, base_location, fold_path, field=None): | if not isinstance(base_location, Location):<EOL><INDENT>raise TypeError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(type(base_location), base_location))<EOL><DEDENT>if base_location.field:<EOL><INDENT>raise ValueError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(base_location))<EOL><DEDENT>if not isinstance(fold_path, tuple) or len(fold_path) == <NUM_LIT:0>:<EOL><INDENT>raise TypeError(u'<STR_LIT>'<EOL>.format(type(fold_path), fold_path))<EOL><DEDENT>fold_path_is_valid = all(<EOL>len(element) == <NUM_LIT:2> and element[<NUM_LIT:0>] in ALLOWED_EDGE_DIRECTIONS<EOL>for element in fold_path<EOL>)<EOL>if not fold_path_is_valid:<EOL><INDENT>raise ValueError(u'<STR_LIT>'.format(fold_path))<EOL><DEDENT>self.base_location = base_location<EOL>self.fold_path = fold_path<EOL>self.field = field<EOL> | Create a new FoldScopeLocation object. Used to represent the locations of @fold scopes.
Args:
base_location: Location object defining where the @fold scope is rooted. In other words,
the location of the tightest scope that fully contains the @fold scope.
fold_path: tuple of (edge_direction, edge_name) tuples, containing the traversal path
of the fold, starting from the base_location of the @fold scope.
field: string if at a field in a vertex, or None if at a vertex
Returns:
new FoldScopeLocation object | f12675:c2:m0 |
def get_location_name(self): | <EOL>first_folded_edge_direction, first_folded_edge_name = self.get_first_folded_edge()<EOL>unique_name = u'<STR_LIT>'.join((<EOL>self.base_location.get_location_name()[<NUM_LIT:0>],<EOL>u'<STR_LIT>',<EOL>first_folded_edge_direction,<EOL>u'<STR_LIT:_>',<EOL>first_folded_edge_name<EOL>))<EOL>return (unique_name, self.field)<EOL> | Return a tuple of a unique name of the location, and the current field name (or None). | f12675:c2:m1 |
def get_first_folded_edge(self): | <EOL>first_folded_edge_direction, first_folded_edge_name = self.fold_path[<NUM_LIT:0>]<EOL>return first_folded_edge_direction, first_folded_edge_name<EOL> | Return a tuple representing the first folded edge within the fold scope. | f12675:c2:m2 |
def at_vertex(self): | if not self.field:<EOL><INDENT>return self<EOL><DEDENT>return FoldScopeLocation(self.base_location, self.fold_path, field=None)<EOL> | Get the FoldScopeLocation ignoring its field component. | f12675:c2:m3 |
def navigate_to_field(self, field): | if self.field:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(self))<EOL><DEDENT>return FoldScopeLocation(self.base_location, self.fold_path, field=field)<EOL> | Return a new location object at the specified field of the current location. | f12675:c2:m4 |
def navigate_to_subpath(self, child): | if not isinstance(child, six.string_types):<EOL><INDENT>raise TypeError(u'<STR_LIT>'.format(child))<EOL><DEDENT>if self.field:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(self))<EOL><DEDENT>edge_direction, edge_name = get_edge_direction_and_name(child)<EOL>new_fold_path = self.fold_path + _create_fold_path_component(edge_direction, edge_name)<EOL>return FoldScopeLocation(self.base_location, new_fold_path)<EOL> | Return a new location after a traversal to the specified child location. | f12675:c2:m5 |
def __str__(self): | return u'<STR_LIT>'.format(<EOL>self.base_location, self.fold_path, self.field)<EOL> | Return a human-readable str representation of the FoldScopeLocation object. | f12675:c2:m6 |
def __repr__(self): | return self.__str__()<EOL> | Return a human-readable str representation of the FoldScopeLocation object. | f12675:c2:m7 |
def __eq__(self, other): | return (type(self) == type(other) and<EOL>self.base_location == other.base_location and<EOL>self.fold_path == other.fold_path and<EOL>self.field == other.field)<EOL> | Return True if the FoldScopeLocations are equal, and False otherwise. | f12675:c2:m8 |
def __ne__(self, other): | return not self.__eq__(other)<EOL> | Check another object for non-equality against this one. | f12675:c2:m9 |
def __hash__(self): | return hash(self.base_location) ^ hash(self.fold_path) ^ hash(self.field)<EOL> | Return the object's hash value. | f12675:c2:m10 |
def _check_if_object_of_same_type_is_smaller(self, other): | if not isinstance(other, FoldScopeLocation):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>.format(type(other).__name__, other))<EOL><DEDENT>if self.base_location != other.base_location:<EOL><INDENT>return self.base_location < other.base_location<EOL><DEDENT>if len(self.fold_path) != len(other.fold_path):<EOL><INDENT>return len(self.fold_path) < len(other.fold_path)<EOL><DEDENT>if self.fold_path != other.fold_path:<EOL><INDENT>return self.fold_path < other.fold_path<EOL><DEDENT>if self.field is None:<EOL><INDENT>return other.field is not None<EOL><DEDENT>if other.field is None:<EOL><INDENT>return False<EOL><DEDENT>return self.field < other.field<EOL> | Return True if the other object is smaller than self in the total ordering. | f12675:c2:m11 |
def rewrite_binary_composition_inside_ternary_conditional(ir_blocks): | def visitor_fn(expression):<EOL><INDENT>"""<STR_LIT>"""<EOL>if not isinstance(expression, TernaryConditional):<EOL><INDENT>return expression<EOL><DEDENT>if_true = expression.if_true<EOL>if_false = expression.if_false<EOL>true_branch_rewriting_necessary = isinstance(if_true, BinaryComposition)<EOL>false_branch_rewriting_necessary = isinstance(if_false, BinaryComposition)<EOL>if not (true_branch_rewriting_necessary or false_branch_rewriting_necessary):<EOL><INDENT>return expression<EOL><DEDENT>if true_branch_rewriting_necessary:<EOL><INDENT>if_true = TernaryConditional(if_true, TrueLiteral, FalseLiteral)<EOL><DEDENT>if false_branch_rewriting_necessary:<EOL><INDENT>if_false = TernaryConditional(if_false, TrueLiteral, FalseLiteral)<EOL><DEDENT>ternary = TernaryConditional(expression.predicate, if_true, if_false)<EOL>return BinaryComposition(u'<STR_LIT:=>', ternary, TrueLiteral)<EOL><DEDENT>new_ir_blocks = [<EOL>block.visit_and_update_expressions(visitor_fn)<EOL>for block in ir_blocks<EOL>]<EOL>return new_ir_blocks<EOL> | Rewrite BinaryConditional expressions in the true/false values of TernaryConditionals. | f12676:m0 |
def lower_has_substring_binary_compositions(ir_blocks): | def visitor_fn(expression):<EOL><INDENT>"""<STR_LIT>"""<EOL>if not isinstance(expression, BinaryComposition) or expression.operator != u'<STR_LIT>':<EOL><INDENT>return expression<EOL><DEDENT>return BinaryComposition(<EOL>u'<STR_LIT>',<EOL>expression.left,<EOL>BinaryComposition(<EOL>u'<STR_LIT:+>',<EOL>Literal('<STR_LIT:%>'),<EOL>BinaryComposition(<EOL>u'<STR_LIT:+>',<EOL>expression.right,<EOL>Literal('<STR_LIT:%>')<EOL>)<EOL>)<EOL>)<EOL><DEDENT>new_ir_blocks = [<EOL>block.visit_and_update_expressions(visitor_fn)<EOL>for block in ir_blocks<EOL>]<EOL>return new_ir_blocks<EOL> | Lower Filter blocks that use the "has_substring" operation into MATCH-representable form. | f12676:m1 |
def truncate_repeated_single_step_traversals(match_query): | <EOL>new_match_traversals = []<EOL>visited_locations = set()<EOL>for current_match_traversal in match_query.match_traversals:<EOL><INDENT>ignore_traversal = False<EOL>if len(current_match_traversal) == <NUM_LIT:1>:<EOL><INDENT>single_step = current_match_traversal[<NUM_LIT:0>]<EOL>if single_step.as_block is None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(current_match_traversal, match_query))<EOL><DEDENT>if single_step.as_block.location in visited_locations:<EOL><INDENT>ignore_traversal = True<EOL><DEDENT><DEDENT>if not ignore_traversal:<EOL><INDENT>for step in current_match_traversal:<EOL><INDENT>if step.as_block is not None:<EOL><INDENT>visited_locations.add(step.as_block.location)<EOL><DEDENT><DEDENT>new_match_traversals.append(current_match_traversal)<EOL><DEDENT><DEDENT>return match_query._replace(match_traversals=new_match_traversals)<EOL> | Truncate one-step traversals that overlap a previous traversal location. | f12676:m2 |
def lower_backtrack_blocks(match_query, location_types): | <EOL>new_match_traversals = []<EOL>location_translations = dict()<EOL>for current_match_traversal in match_query.match_traversals:<EOL><INDENT>new_traversal = []<EOL>for step in current_match_traversal:<EOL><INDENT>if not isinstance(step.root_block, Backtrack):<EOL><INDENT>new_traversal.append(step)<EOL><DEDENT>else:<EOL><INDENT>if new_traversal:<EOL><INDENT>new_match_traversals.append(new_traversal)<EOL>new_traversal = []<EOL><DEDENT>backtrack_location = step.root_block.location<EOL>backtrack_location_type = location_types[backtrack_location]<EOL>new_root_block = QueryRoot({backtrack_location_type.name})<EOL>new_as_block = MarkLocation(backtrack_location)<EOL>if step.as_block is not None:<EOL><INDENT>location_translations[step.as_block.location] = backtrack_location<EOL><DEDENT>if step.coerce_type_block is not None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(step, match_query))<EOL><DEDENT>new_step = step._replace(root_block=new_root_block, as_block=new_as_block)<EOL>new_traversal.append(new_step)<EOL><DEDENT><DEDENT>new_match_traversals.append(new_traversal)<EOL><DEDENT>_flatten_location_translations(location_translations)<EOL>new_match_query = match_query._replace(match_traversals=new_match_traversals)<EOL>return _translate_equivalent_locations(new_match_query, location_translations)<EOL> | Lower Backtrack blocks into (QueryRoot, MarkLocation) pairs of blocks. | f12676:m3 |
def _flatten_location_translations(location_translations): | sources_to_process = set(six.iterkeys(location_translations))<EOL>def _update_translation(source):<EOL><INDENT>"""<STR_LIT>"""<EOL>destination = location_translations[source]<EOL>if destination not in location_translations:<EOL><INDENT>return destination<EOL><DEDENT>else:<EOL><INDENT>sources_to_process.discard(destination)<EOL>final_destination = _update_translation(destination)<EOL>location_translations[source] = final_destination<EOL>return final_destination<EOL><DEDENT><DEDENT>while sources_to_process:<EOL><INDENT>_update_translation(sources_to_process.pop())<EOL><DEDENT> | If location A translates to B, and B to C, then make A translate directly to C.
Args:
location_translations: dict of Location -> Location, where the key translates to the value.
Mutated in place for efficiency and simplicity of implementation. | f12676:m4 |
def _translate_equivalent_locations(match_query, location_translations): | new_match_traversals = []<EOL>def visitor_fn(expression):<EOL><INDENT>"""<STR_LIT>"""<EOL>if isinstance(expression, (ContextField, GlobalContextField)):<EOL><INDENT>old_location = expression.location.at_vertex()<EOL>new_location = location_translations.get(old_location, old_location)<EOL>if expression.location.field is not None:<EOL><INDENT>new_location = new_location.navigate_to_field(expression.location.field)<EOL><DEDENT>expression_cls = type(expression)<EOL>return expression_cls(new_location, expression.field_type)<EOL><DEDENT>elif isinstance(expression, ContextFieldExistence):<EOL><INDENT>old_location = expression.location<EOL>new_location = location_translations.get(old_location, old_location)<EOL>return ContextFieldExistence(new_location)<EOL><DEDENT>elif isinstance(expression, FoldedContextField):<EOL><INDENT>old_location = expression.fold_scope_location.base_location<EOL>new_location = location_translations.get(old_location, old_location)<EOL>fold_path = expression.fold_scope_location.fold_path<EOL>fold_field = expression.fold_scope_location.field<EOL>new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field)<EOL>field_type = expression.field_type<EOL>return FoldedContextField(new_fold_scope_location, field_type)<EOL><DEDENT>else:<EOL><INDENT>return expression<EOL><DEDENT><DEDENT>for current_match_traversal in match_query.match_traversals:<EOL><INDENT>new_traversal = []<EOL>for step in current_match_traversal:<EOL><INDENT>new_step = step<EOL>if isinstance(new_step.root_block, Backtrack):<EOL><INDENT>old_location = new_step.root_block.location<EOL>if old_location in location_translations:<EOL><INDENT>new_location = location_translations[old_location]<EOL>new_step = new_step._replace(root_block=Backtrack(new_location))<EOL><DEDENT><DEDENT>if new_step.as_block is not None:<EOL><INDENT>old_location = new_step.as_block.location<EOL>if old_location in location_translations:<EOL><INDENT>new_location = location_translations[old_location]<EOL>new_step = new_step._replace(as_block=MarkLocation(new_location))<EOL><DEDENT><DEDENT>if new_step.where_block is not None:<EOL><INDENT>new_where_block = new_step.where_block.visit_and_update_expressions(visitor_fn)<EOL>new_step = new_step._replace(where_block=new_where_block)<EOL><DEDENT>new_traversal.append(new_step)<EOL><DEDENT>new_match_traversals.append(new_traversal)<EOL><DEDENT>new_folds = {}<EOL>for fold_scope_location, fold_ir_blocks in six.iteritems(match_query.folds):<EOL><INDENT>fold_path = fold_scope_location.fold_path<EOL>fold_field = fold_scope_location.field<EOL>old_location = fold_scope_location.base_location<EOL>new_location = location_translations.get(old_location, old_location)<EOL>new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field)<EOL>new_folds[new_fold_scope_location] = fold_ir_blocks<EOL><DEDENT>new_output_block = match_query.output_block.visit_and_update_expressions(visitor_fn)<EOL>new_where_block = None<EOL>if match_query.where_block is not None:<EOL><INDENT>new_where_block = match_query.where_block.visit_and_update_expressions(visitor_fn)<EOL><DEDENT>return match_query._replace(match_traversals=new_match_traversals, folds=new_folds,<EOL>output_block=new_output_block, where_block=new_where_block)<EOL> | Translate Location objects into their equivalent locations, based on the given dict. | f12676:m5 |
def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks): | new_folded_ir_blocks = []<EOL>for block in folded_ir_blocks:<EOL><INDENT>if isinstance(block, CoerceType):<EOL><INDENT>new_block = convert_coerce_type_to_instanceof_filter(block)<EOL><DEDENT>else:<EOL><INDENT>new_block = block<EOL><DEDENT>new_folded_ir_blocks.append(new_block)<EOL><DEDENT>return new_folded_ir_blocks<EOL> | Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks. | f12676:m6 |
def remove_backtrack_blocks_from_fold(folded_ir_blocks): | new_folded_ir_blocks = []<EOL>for block in folded_ir_blocks:<EOL><INDENT>if not isinstance(block, Backtrack):<EOL><INDENT>new_folded_ir_blocks.append(block)<EOL><DEDENT><DEDENT>return new_folded_ir_blocks<EOL> | Return a list of IR blocks with all Backtrack blocks removed. | f12676:m7 |
def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query): | lowered_match_queries = []<EOL>for match_query in compound_match_query.match_queries:<EOL><INDENT>new_match_query = truncate_repeated_single_step_traversals(match_query)<EOL>lowered_match_queries.append(new_match_query)<EOL><DEDENT>return compound_match_query._replace(match_queries=lowered_match_queries)<EOL> | For each sub-query, remove one-step traversals that overlap a previous traversal location. | f12676:m8 |
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None): | sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table)<EOL>location_types = {<EOL>location: location_info.type<EOL>for location, location_info in query_metadata_table.registered_locations<EOL>}<EOL>coerced_locations = {<EOL>location<EOL>for location, location_info in query_metadata_table.registered_locations<EOL>if location_info.coerced_from_type is not None<EOL>}<EOL>location_to_optional_results = extract_optional_location_root_info(ir_blocks)<EOL>complex_optional_roots, location_to_optional_roots = location_to_optional_results<EOL>simple_optional_root_info = extract_simple_optional_location_info(<EOL>ir_blocks, complex_optional_roots, location_to_optional_roots)<EOL>ir_blocks = remove_end_optionals(ir_blocks)<EOL>if len(simple_optional_root_info) > <NUM_LIT:0>:<EOL><INDENT>where_filter_predicate = construct_where_filter_predicate(<EOL>query_metadata_table, simple_optional_root_info)<EOL>ir_blocks.insert(-<NUM_LIT:1>, GlobalOperationsStart())<EOL>ir_blocks.insert(-<NUM_LIT:1>, Filter(where_filter_predicate))<EOL><DEDENT>ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table)<EOL>ir_blocks = optimize_boolean_expression_comparisons(ir_blocks)<EOL>ir_blocks = rewrite_binary_composition_inside_ternary_conditional(ir_blocks)<EOL>ir_blocks = merge_consecutive_filter_clauses(ir_blocks)<EOL>ir_blocks = lower_has_substring_binary_compositions(ir_blocks)<EOL>ir_blocks = orientdb_eval_scheduling.workaround_lowering_pass(ir_blocks, query_metadata_table)<EOL>match_query = convert_to_match_query(ir_blocks)<EOL>match_query = lower_comparisons_to_between(match_query)<EOL>match_query = lower_backtrack_blocks(match_query, location_types)<EOL>match_query = truncate_repeated_single_step_traversals(match_query)<EOL>match_query = orientdb_class_with_while.workaround_type_coercions_in_recursions(match_query)<EOL>new_folds = {<EOL>key: merge_consecutive_filter_clauses(<EOL>remove_backtrack_blocks_from_fold(<EOL>lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks)<EOL>)<EOL>)<EOL>for key, folded_ir_blocks in six.iteritems(match_query.folds)<EOL>}<EOL>match_query = match_query._replace(folds=new_folds)<EOL>compound_match_query = convert_optional_traversals_to_compound_match_query(<EOL>match_query, complex_optional_roots, location_to_optional_roots)<EOL>compound_match_query = prune_non_existent_outputs(compound_match_query)<EOL>compound_match_query = collect_filters_to_first_location_occurrence(compound_match_query)<EOL>compound_match_query = lower_context_field_expressions(compound_match_query)<EOL>compound_match_query = truncate_repeated_single_step_traversals_in_sub_queries(<EOL>compound_match_query)<EOL>compound_match_query = orientdb_query_execution.expose_ideal_query_execution_start_points(<EOL>compound_match_query, location_types, coerced_locations)<EOL>return compound_match_query<EOL> | Lower the IR into an IR form that can be represented in MATCH queries.
Args:
ir_blocks: list of IR blocks to lower into MATCH-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
MatchQuery object containing the IR blocks organized in a MATCH-like structure | f12677:m0 |
def _expression_list_to_conjunction(expression_list): | if not isinstance(expression_list, list):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(type(expression_list).__name__, expression_list))<EOL><DEDENT>if len(expression_list) == <NUM_LIT:0>:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(expression_list))<EOL><DEDENT>elif len(expression_list) == <NUM_LIT:1>:<EOL><INDENT>return expression_list[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>remaining_conjunction = _expression_list_to_conjunction(expression_list[<NUM_LIT:1>:])<EOL>return BinaryComposition(u'<STR_LIT>', expression_list[<NUM_LIT:0>], remaining_conjunction)<EOL><DEDENT> | Return an Expression that is the `&&` of all the expressions in the given list. | f12678:m0 |
def _extract_conjuction_elements_from_expression(expression): | if isinstance(expression, BinaryComposition) and expression.operator == u'<STR_LIT>':<EOL><INDENT>for element in _extract_conjuction_elements_from_expression(expression.left):<EOL><INDENT>yield element<EOL><DEDENT>for element in _extract_conjuction_elements_from_expression(expression.right):<EOL><INDENT>yield element<EOL><DEDENT><DEDENT>else:<EOL><INDENT>yield expression<EOL><DEDENT> | Return a generator for expressions that are connected by `&&`s in the given expression. | f12678:m1 |
def _construct_field_operator_expression_dict(expression_list): | between_operators = (u'<STR_LIT>', u'<STR_LIT>')<EOL>inverse_operator = {u'<STR_LIT>': u'<STR_LIT>', u'<STR_LIT>': u'<STR_LIT>'}<EOL>local_field_to_expressions = {}<EOL>remaining_expression_list = deque([])<EOL>for expression in expression_list:<EOL><INDENT>if all((<EOL>isinstance(expression, BinaryComposition),<EOL>expression.operator in between_operators,<EOL>isinstance(expression.left, LocalField) or isinstance(expression.right, LocalField)<EOL>)):<EOL><INDENT>if isinstance(expression.right, LocalField):<EOL><INDENT>new_operator = inverse_operator[expression.operator]<EOL>new_expression = BinaryComposition(new_operator, expression.right, expression.left)<EOL><DEDENT>else:<EOL><INDENT>new_expression = expression<EOL><DEDENT>field_name = new_expression.left.field_name<EOL>expressions_dict = local_field_to_expressions.setdefault(field_name, {})<EOL>expressions_dict.setdefault(new_expression.operator, []).append(new_expression)<EOL><DEDENT>else:<EOL><INDENT>remaining_expression_list.append(expression)<EOL><DEDENT><DEDENT>return local_field_to_expressions, remaining_expression_list<EOL> | Construct a mapping from local fields to specified operators, and corresponding expressions.
Args:
expression_list: list of expressions to analyze
Returns:
local_field_to_expressions:
dict mapping local field names to "operator -> list of BinaryComposition" dictionaries,
for each BinaryComposition operator involving the LocalField
remaining_expression_list:
list of remaining expressions that were *not*
BinaryCompositions on a LocalField using any of the between operators | f12678:m2 |
def _lower_expressions_to_between(base_expression): | expression_list = list(_extract_conjuction_elements_from_expression(base_expression))<EOL>if len(expression_list) == <NUM_LIT:0>:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(expression_list, base_expression))<EOL><DEDENT>elif len(expression_list) == <NUM_LIT:1>:<EOL><INDENT>return base_expression<EOL><DEDENT>else:<EOL><INDENT>between_operators = (u'<STR_LIT>', u'<STR_LIT>')<EOL>local_field_to_expressions, new_expression_list = _construct_field_operator_expression_dict(<EOL>expression_list)<EOL>lowering_occurred = False<EOL>for field_name in local_field_to_expressions:<EOL><INDENT>expressions_dict = local_field_to_expressions[field_name]<EOL>if all(operator in expressions_dict and len(expressions_dict[operator]) == <NUM_LIT:1><EOL>for operator in between_operators):<EOL><INDENT>field = LocalField(field_name)<EOL>lower_bound = expressions_dict[u'<STR_LIT>'][<NUM_LIT:0>].right<EOL>upper_bound = expressions_dict[u'<STR_LIT>'][<NUM_LIT:0>].right<EOL>new_expression_list.appendleft(BetweenClause(field, lower_bound, upper_bound))<EOL>lowering_occurred = True<EOL><DEDENT>else:<EOL><INDENT>for expression in expressions_dict.values():<EOL><INDENT>new_expression_list.extend(expression)<EOL><DEDENT><DEDENT><DEDENT>if lowering_occurred:<EOL><INDENT>return _expression_list_to_conjunction(list(new_expression_list))<EOL><DEDENT>else:<EOL><INDENT>return base_expression<EOL><DEDENT><DEDENT> | Return a new expression, with any eligible comparisons lowered to `between` clauses. | f12678:m3 |
def lower_comparisons_to_between(match_query): | new_match_traversals = []<EOL>for current_match_traversal in match_query.match_traversals:<EOL><INDENT>new_traversal = []<EOL>for step in current_match_traversal:<EOL><INDENT>if step.where_block:<EOL><INDENT>expression = step.where_block.predicate<EOL>new_where_block = Filter(_lower_expressions_to_between(expression))<EOL>new_traversal.append(step._replace(where_block=new_where_block))<EOL><DEDENT>else:<EOL><INDENT>new_traversal.append(step)<EOL><DEDENT><DEDENT>new_match_traversals.append(new_traversal)<EOL><DEDENT>return match_query._replace(match_traversals=new_match_traversals)<EOL> | Return a new MatchQuery, with all eligible comparison filters lowered to between clauses. | f12678:m4 |
def _prune_traverse_using_omitted_locations(match_traversal, omitted_locations,<EOL>complex_optional_roots, location_to_optional_roots): | new_match_traversal = []<EOL>for step in match_traversal:<EOL><INDENT>new_step = step<EOL>if isinstance(step.root_block, Traverse) and step.root_block.optional:<EOL><INDENT>current_location = step.as_block.location<EOL>optional_root_locations_stack = location_to_optional_roots.get(current_location, None)<EOL>optional_root_location = optional_root_locations_stack[-<NUM_LIT:1>]<EOL>if optional_root_location is None:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(current_location, location_to_optional_roots))<EOL><DEDENT>elif optional_root_location in omitted_locations:<EOL><INDENT>field_name = step.root_block.get_field_name()<EOL>new_predicate = filter_edge_field_non_existence(LocalField(field_name))<EOL>old_filter = new_match_traversal[-<NUM_LIT:1>].where_block<EOL>if old_filter is not None:<EOL><INDENT>new_predicate = BinaryComposition(u'<STR_LIT>', old_filter.predicate, new_predicate)<EOL><DEDENT>new_match_step = new_match_traversal[-<NUM_LIT:1>]._replace(<EOL>where_block=Filter(new_predicate))<EOL>new_match_traversal[-<NUM_LIT:1>] = new_match_step<EOL>new_step = None<EOL><DEDENT>elif optional_root_location in complex_optional_roots:<EOL><INDENT>new_root_block = Traverse(step.root_block.direction, step.root_block.edge_name)<EOL>new_step = step._replace(root_block=new_root_block)<EOL><DEDENT>else:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>if new_step is None:<EOL><INDENT>break<EOL><DEDENT>else:<EOL><INDENT>new_match_traversal.append(new_step)<EOL><DEDENT><DEDENT>return new_match_traversal<EOL> | Return a prefix of the given traverse, excluding any blocks after an omitted optional.
Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal
removing all MatchStep objects that are within any omitted location.
Args:
match_traversal: list of MatchStep objects to be pruned
omitted_locations: subset of complex_optional_roots to be omitted
complex_optional_roots: list of all @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
list of MatchStep objects as a copy of the given match traversal
with all steps within any omitted location removed. | f12679:m0 |
def convert_optional_traversals_to_compound_match_query(<EOL>match_query, complex_optional_roots, location_to_optional_roots): | tree = construct_optional_traversal_tree(<EOL>complex_optional_roots, location_to_optional_roots)<EOL>rooted_optional_root_location_subsets = tree.get_all_rooted_subtrees_as_lists()<EOL>omitted_location_subsets = [<EOL>set(complex_optional_roots) - set(subset)<EOL>for subset in rooted_optional_root_location_subsets<EOL>]<EOL>sorted_omitted_location_subsets = sorted(omitted_location_subsets)<EOL>compound_match_traversals = []<EOL>for omitted_locations in reversed(sorted_omitted_location_subsets):<EOL><INDENT>new_match_traversals = []<EOL>for match_traversal in match_query.match_traversals:<EOL><INDENT>location = match_traversal[<NUM_LIT:0>].as_block.location<EOL>optional_root_locations_stack = location_to_optional_roots.get(location, None)<EOL>if optional_root_locations_stack is not None:<EOL><INDENT>optional_root_location = optional_root_locations_stack[-<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>optional_root_location = None<EOL><DEDENT>if optional_root_location is None or optional_root_location not in omitted_locations:<EOL><INDENT>new_match_traversal = _prune_traverse_using_omitted_locations(<EOL>match_traversal, set(omitted_locations),<EOL>complex_optional_roots, location_to_optional_roots)<EOL>new_match_traversals.append(new_match_traversal)<EOL><DEDENT>else:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>compound_match_traversals.append(new_match_traversals)<EOL><DEDENT>match_queries = [<EOL>MatchQuery(<EOL>match_traversals=match_traversals,<EOL>folds=match_query.folds,<EOL>output_block=match_query.output_block,<EOL>where_block=match_query.where_block,<EOL>)<EOL>for match_traversals in compound_match_traversals<EOL>]<EOL>return CompoundMatchQuery(match_queries=match_queries)<EOL> | Return 2^n distinct MatchQuery objects in a CompoundMatchQuery.
Given a MatchQuery containing `n` optional traverses that expand vertex fields,
construct `2^n` different MatchQuery objects:
one for each possible subset of optional edges that can be followed.
For each edge `e` in a subset of optional edges chosen to be omitted,
discard all traversals following `e`, and add filters specifying that `e` *does not exist*.
Args:
match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields
complex_optional_roots: list of @optional locations (location preceding an @optional
traverse) that expand vertex fields within
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
CompoundMatchQuery object containing 2^n MatchQuery objects,
one for each possible subset of the n optional edges being followed | f12679:m1 |
def _get_present_locations(match_traversals): | present_locations = set()<EOL>present_non_optional_locations = set()<EOL>for match_traversal in match_traversals:<EOL><INDENT>for step in match_traversal:<EOL><INDENT>if step.as_block is not None:<EOL><INDENT>location_name, _ = step.as_block.location.get_location_name()<EOL>present_locations.add(location_name)<EOL>if isinstance(step.root_block, Traverse) and not step.root_block.optional:<EOL><INDENT>present_non_optional_locations.add(location_name)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>if not present_non_optional_locations.issubset(present_locations):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(present_non_optional_locations, present_locations))<EOL><DEDENT>return present_locations, present_non_optional_locations<EOL> | Return the set of locations and non-optional locations present in the given match traversals.
When enumerating the possibilities for optional traversals,
the resulting match traversals may have sections of the query omitted.
These locations will not be included in the returned `present_locations`.
All of the above locations that are not optional traverse locations
will be included in present_non_optional_locations.
Args:
match_traversals: one possible list of match traversals generated from a query
containing @optional traversal(s)
Returns:
tuple (present_locations, present_non_optional_locations):
- present_locations: set of all locations present in the given match traversals
- present_non_optional_locations: set of all locations present in the match traversals
that are not reached through optional traverses.
Guaranteed to be a subset of present_locations. | f12679:m2 |
def prune_non_existent_outputs(compound_match_query): | if len(compound_match_query.match_queries) == <NUM_LIT:1>:<EOL><INDENT>return compound_match_query<EOL><DEDENT>elif len(compound_match_query.match_queries) == <NUM_LIT:0>:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>match_queries = []<EOL>for match_query in compound_match_query.match_queries:<EOL><INDENT>match_traversals = match_query.match_traversals<EOL>output_block = match_query.output_block<EOL>present_locations_tuple = _get_present_locations(match_traversals)<EOL>present_locations, present_non_optional_locations = present_locations_tuple<EOL>new_output_fields = {}<EOL>for output_name, expression in six.iteritems(output_block.fields):<EOL><INDENT>if isinstance(expression, OutputContextField):<EOL><INDENT>location_name, _ = expression.location.get_location_name()<EOL>if location_name not in present_locations:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(expression.location, present_locations))<EOL><DEDENT>new_output_fields[output_name] = expression<EOL><DEDENT>elif isinstance(expression, FoldedContextField):<EOL><INDENT>base_location = expression.fold_scope_location.base_location<EOL>location_name, _ = base_location.get_location_name()<EOL>if location_name not in present_locations:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(base_location, present_locations))<EOL><DEDENT>new_output_fields[output_name] = expression<EOL><DEDENT>elif isinstance(expression, TernaryConditional):<EOL><INDENT>location_name, _ = expression.if_true.location.get_location_name()<EOL>if location_name in present_locations:<EOL><INDENT>if location_name in present_non_optional_locations:<EOL><INDENT>new_output_fields[output_name] = expression.if_true<EOL><DEDENT>else:<EOL><INDENT>new_output_fields[output_name] = expression<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(type(expression).__name__, output_block))<EOL><DEDENT><DEDENT>match_queries.append(<EOL>MatchQuery(<EOL>match_traversals=match_traversals,<EOL>folds=match_query.folds,<EOL>output_block=ConstructResult(new_output_fields),<EOL>where_block=match_query.where_block,<EOL>)<EOL>)<EOL><DEDENT>return CompoundMatchQuery(match_queries=match_queries)<EOL><DEDENT> | Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery.
Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks,
For each of these, remove the outputs (that have been implicitly pruned away) from each
corresponding ConstructResult block.
Args:
compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects
(see convert_optional_traversals_to_compound_match_query)
Returns:
CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects | f12679:m3 |
def _construct_location_to_filter_list(match_query): | <EOL>location_to_filters = {}<EOL>for match_traversal in match_query.match_traversals:<EOL><INDENT>for match_step in match_traversal:<EOL><INDENT>current_filter = match_step.where_block<EOL>if current_filter is not None:<EOL><INDENT>current_location = match_step.as_block.location<EOL>location_to_filters.setdefault(current_location, []).append(<EOL>current_filter)<EOL><DEDENT><DEDENT><DEDENT>return location_to_filters<EOL> | Return a dict mapping location -> list of filters applied at that location.
Args:
match_query: MatchQuery object from which to extract location -> filters dict
Returns:
dict mapping each location in match_query to a list of
Filter objects applied at that location | f12679:m4 |
def _filter_list_to_conjunction_expression(filter_list): | if not isinstance(filter_list, list):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(filter_list))<EOL><DEDENT>if any((not isinstance(filter_block, Filter) for filter_block in filter_list)):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'.format(filter_list))<EOL><DEDENT>expression_list = [filter_block.predicate for filter_block in filter_list]<EOL>return expression_list_to_conjunction(expression_list)<EOL> | Convert a list of filters to an Expression that is the conjunction of all of them. | f12679:m5 |
def _apply_filters_to_first_location_occurrence(match_traversal, location_to_filters,<EOL>already_filtered_locations): | new_match_traversal = []<EOL>newly_filtered_locations = set()<EOL>for match_step in match_traversal:<EOL><INDENT>current_location = match_step.as_block.location<EOL>if current_location in newly_filtered_locations:<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'<EOL>.format(current_location, match_traversal))<EOL><DEDENT>if all((current_location in location_to_filters,<EOL>current_location not in already_filtered_locations)):<EOL><INDENT>where_block = Filter(<EOL>_filter_list_to_conjunction_expression(<EOL>location_to_filters[current_location]<EOL>)<EOL>)<EOL>newly_filtered_locations.add(current_location)<EOL><DEDENT>else:<EOL><INDENT>where_block = None<EOL><DEDENT>new_match_step = MatchStep(<EOL>root_block=match_step.root_block,<EOL>coerce_type_block=match_step.coerce_type_block,<EOL>where_block=where_block,<EOL>as_block=match_step.as_block<EOL>)<EOL>new_match_traversal.append(new_match_step)<EOL><DEDENT>return new_match_traversal, newly_filtered_locations<EOL> | Apply all filters for a specific location into its first occurrence in a given traversal.
For each location in the given match traversal,
construct a conjunction of all filters applied to that location,
and apply the resulting Filter to the first instance of the location.
Args:
match_traversal: list of MatchStep objects to be lowered
location_to_filters: dict mapping each location in the MatchQuery which contains
the given match traversal to a list of filters applied at that location
already_filtered_locations: set of locations that have already had their filters applied
Returns:
new list of MatchStep objects with all filters for any given location composed into
a single filter which is applied to the first instance of that location | f12679:m6 |
def collect_filters_to_first_location_occurrence(compound_match_query): | new_match_queries = []<EOL>for match_query in compound_match_query.match_queries:<EOL><INDENT>location_to_filters = _construct_location_to_filter_list(match_query)<EOL>already_filtered_locations = set()<EOL>new_match_traversals = []<EOL>for match_traversal in match_query.match_traversals:<EOL><INDENT>result = _apply_filters_to_first_location_occurrence(<EOL>match_traversal, location_to_filters, already_filtered_locations)<EOL>new_match_traversal, newly_filtered_locations = result<EOL>new_match_traversals.append(new_match_traversal)<EOL>already_filtered_locations.update(newly_filtered_locations)<EOL><DEDENT>new_match_queries.append(<EOL>MatchQuery(<EOL>match_traversals=new_match_traversals,<EOL>folds=match_query.folds,<EOL>output_block=match_query.output_block,<EOL>where_block=match_query.where_block,<EOL>)<EOL>)<EOL><DEDENT>return CompoundMatchQuery(match_queries=new_match_queries)<EOL> | Collect all filters for a particular location to the first instance of the location.
Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may
result in filters being applied to locations after their first occurence.
OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery,
we collect all the filters for each location in a list. For each location,
we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply
the new filter to only the first instance of that location.
All other instances will have no filters (None).
Args:
compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects
Returns:
CompoundMatchQuery with all filters for each location applied to the first instance
of that location. | f12679:m7 |
def _update_context_field_binary_composition(present_locations, expression): | if not any((isinstance(expression.left, ContextField),<EOL>isinstance(expression.right, ContextField))):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(expression))<EOL><DEDENT>if isinstance(expression.left, ContextField):<EOL><INDENT>context_field = expression.left<EOL>location_name, _ = context_field.location.get_location_name()<EOL>if location_name not in present_locations:<EOL><INDENT>return TrueLiteral<EOL><DEDENT><DEDENT>if isinstance(expression.right, ContextField):<EOL><INDENT>context_field = expression.right<EOL>location_name, _ = context_field.location.get_location_name()<EOL>if location_name not in present_locations:<EOL><INDENT>return TrueLiteral<EOL><DEDENT><DEDENT>return expression<EOL> | Lower BinaryCompositions involving non-existent ContextFields to True.
Args:
present_locations: set of all locations in the current MatchQuery that have not been pruned
expression: BinaryComposition with at least one ContextField operand
Returns:
TrueLiteral iff either ContextField operand is not in `present_locations`,
and the original expression otherwise | f12679:m8 |
def _simplify_non_context_field_binary_composition(expression): | if any((isinstance(expression.left, ContextField),<EOL>isinstance(expression.right, ContextField))):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(expression))<EOL><DEDENT>if expression.operator == u'<STR_LIT>':<EOL><INDENT>if expression.left == TrueLiteral or expression.right == TrueLiteral:<EOL><INDENT>return TrueLiteral<EOL><DEDENT>else:<EOL><INDENT>return expression<EOL><DEDENT><DEDENT>elif expression.operator == u'<STR_LIT>':<EOL><INDENT>if expression.left == TrueLiteral:<EOL><INDENT>return expression.right<EOL><DEDENT>if expression.right == TrueLiteral:<EOL><INDENT>return expression.left<EOL><DEDENT>else:<EOL><INDENT>return expression<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return expression<EOL><DEDENT> | Return a simplified BinaryComposition if either operand is a TrueLiteral.
Args:
expression: BinaryComposition without any ContextField operand(s)
Returns:
simplified expression if the given expression is a disjunction/conjunction
and one of it's operands is a TrueLiteral,
and the original expression otherwise | f12679:m9 |
def _simplify_ternary_conditional(expression): | if expression.predicate == TrueLiteral:<EOL><INDENT>return expression.if_true<EOL><DEDENT>else:<EOL><INDENT>return expression<EOL><DEDENT> | Return the `if_true` clause if the predicate of the TernaryConditional is a TrueLiteral.
Args:
expression: TernaryConditional to be simplified.
Returns:
the if_true expression of the given TernaryConditional, if the predicate is True,
and the original TernaryConditional otherwise | f12679:m10 |
def _update_context_field_expression(present_locations, expression): | no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable)<EOL>if isinstance(expression, BinaryComposition):<EOL><INDENT>if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField):<EOL><INDENT>return _update_context_field_binary_composition(present_locations, expression)<EOL><DEDENT>else:<EOL><INDENT>return _simplify_non_context_field_binary_composition(expression)<EOL><DEDENT><DEDENT>elif isinstance(expression, TernaryConditional):<EOL><INDENT>return _simplify_ternary_conditional(expression)<EOL><DEDENT>elif isinstance(expression, BetweenClause):<EOL><INDENT>lower_bound = expression.lower_bound<EOL>upper_bound = expression.upper_bound<EOL>if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT>'.format(expression))<EOL><DEDENT>return expression<EOL><DEDENT>elif isinstance(expression, (OutputContextField, FoldedContextField)):<EOL><INDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(type(expression).__name__, expression))<EOL><DEDENT>elif isinstance(expression, no_op_blocks):<EOL><INDENT>return expression<EOL><DEDENT>raise AssertionError(u'<STR_LIT>'<EOL>u'<STR_LIT:{}>'.format(type(expression).__name__, expression))<EOL> | Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result. | f12679:m11 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.