repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader._add_to_ref
def _add_to_ref(self, rec_curr, line, lnum): """Add new fields to the current reference.""" # Written by DV Klopfenstein # Examples of record lines containing ':' include: # id: GO:0000002 # name: mitochondrial genome maintenance # namespace: biological_process # def: "The maintenance of ... # is_a: GO:0007005 ! mitochondrion organization mtch = re.match(r'^(\S+):\s*(\S.*)$', line) if mtch: field_name = mtch.group(1) field_value = mtch.group(2) if field_name == "id": self._chk_none(rec_curr.id, lnum) rec_curr.id = field_value elif field_name == "alt_id": rec_curr.alt_ids.append(field_value) elif field_name == "name": self._chk_none(rec_curr.name, lnum) rec_curr.name = field_value elif field_name == "namespace": self._chk_none(rec_curr.namespace, lnum) rec_curr.namespace = field_value elif field_name == "is_a": rec_curr._parents.append(field_value.split()[0]) elif field_name == "is_obsolete" and field_value == "true": rec_curr.is_obsolete = True elif field_name in self.optional_attrs: self.update_rec(rec_curr, field_name, field_value) else: self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum)
python
def _add_to_ref(self, rec_curr, line, lnum): """Add new fields to the current reference.""" # Written by DV Klopfenstein # Examples of record lines containing ':' include: # id: GO:0000002 # name: mitochondrial genome maintenance # namespace: biological_process # def: "The maintenance of ... # is_a: GO:0007005 ! mitochondrion organization mtch = re.match(r'^(\S+):\s*(\S.*)$', line) if mtch: field_name = mtch.group(1) field_value = mtch.group(2) if field_name == "id": self._chk_none(rec_curr.id, lnum) rec_curr.id = field_value elif field_name == "alt_id": rec_curr.alt_ids.append(field_value) elif field_name == "name": self._chk_none(rec_curr.name, lnum) rec_curr.name = field_value elif field_name == "namespace": self._chk_none(rec_curr.namespace, lnum) rec_curr.namespace = field_value elif field_name == "is_a": rec_curr._parents.append(field_value.split()[0]) elif field_name == "is_obsolete" and field_value == "true": rec_curr.is_obsolete = True elif field_name in self.optional_attrs: self.update_rec(rec_curr, field_name, field_value) else: self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum)
[ "def", "_add_to_ref", "(", "self", ",", "rec_curr", ",", "line", ",", "lnum", ")", ":", "# Written by DV Klopfenstein", "# Examples of record lines containing ':' include:", "# id: GO:0000002", "# name: mitochondrial genome maintenance", "# namespace: biological_process", "# def: \"The maintenance of ...", "# is_a: GO:0007005 ! mitochondrion organization", "mtch", "=", "re", ".", "match", "(", "r'^(\\S+):\\s*(\\S.*)$'", ",", "line", ")", "if", "mtch", ":", "field_name", "=", "mtch", ".", "group", "(", "1", ")", "field_value", "=", "mtch", ".", "group", "(", "2", ")", "if", "field_name", "==", "\"id\"", ":", "self", ".", "_chk_none", "(", "rec_curr", ".", "id", ",", "lnum", ")", "rec_curr", ".", "id", "=", "field_value", "elif", "field_name", "==", "\"alt_id\"", ":", "rec_curr", ".", "alt_ids", ".", "append", "(", "field_value", ")", "elif", "field_name", "==", "\"name\"", ":", "self", ".", "_chk_none", "(", "rec_curr", ".", "name", ",", "lnum", ")", "rec_curr", ".", "name", "=", "field_value", "elif", "field_name", "==", "\"namespace\"", ":", "self", ".", "_chk_none", "(", "rec_curr", ".", "namespace", ",", "lnum", ")", "rec_curr", ".", "namespace", "=", "field_value", "elif", "field_name", "==", "\"is_a\"", ":", "rec_curr", ".", "_parents", ".", "append", "(", "field_value", ".", "split", "(", ")", "[", "0", "]", ")", "elif", "field_name", "==", "\"is_obsolete\"", "and", "field_value", "==", "\"true\"", ":", "rec_curr", ".", "is_obsolete", "=", "True", "elif", "field_name", "in", "self", ".", "optional_attrs", ":", "self", ".", "update_rec", "(", "rec_curr", ",", "field_name", ",", "field_value", ")", "else", ":", "self", ".", "_die", "(", "\"UNEXPECTED FIELD CONTENT: {L}\\n\"", ".", "format", "(", "L", "=", "line", ")", ",", "lnum", ")" ]
Add new fields to the current reference.
[ "Add", "new", "fields", "to", "the", "current", "reference", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L130-L161
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader.update_rec
def update_rec(self, rec, name, value): """Update current GOTerm with optional record.""" # 'def' is a reserved word in python, do not use it as a Class attr. if name == "def": name = "defn" # If we have a relationship, then we will split this into a further # dictionary. if hasattr(rec, name): if name not in self.attrs_scalar: if name not in self.attrs_nested: getattr(rec, name).add(value) else: self._add_nested(rec, name, value) else: raise Exception("ATTR({NAME}) ALREADY SET({VAL})".format( NAME=name, VAL=getattr(rec, name))) else: # Initialize new GOTerm attr if name in self.attrs_scalar: setattr(rec, name, value) elif name not in self.attrs_nested: setattr(rec, name, set([value])) else: name = '_{:s}'.format(name) setattr(rec, name, defaultdict(list)) self._add_nested(rec, name, value)
python
def update_rec(self, rec, name, value): """Update current GOTerm with optional record.""" # 'def' is a reserved word in python, do not use it as a Class attr. if name == "def": name = "defn" # If we have a relationship, then we will split this into a further # dictionary. if hasattr(rec, name): if name not in self.attrs_scalar: if name not in self.attrs_nested: getattr(rec, name).add(value) else: self._add_nested(rec, name, value) else: raise Exception("ATTR({NAME}) ALREADY SET({VAL})".format( NAME=name, VAL=getattr(rec, name))) else: # Initialize new GOTerm attr if name in self.attrs_scalar: setattr(rec, name, value) elif name not in self.attrs_nested: setattr(rec, name, set([value])) else: name = '_{:s}'.format(name) setattr(rec, name, defaultdict(list)) self._add_nested(rec, name, value)
[ "def", "update_rec", "(", "self", ",", "rec", ",", "name", ",", "value", ")", ":", "# 'def' is a reserved word in python, do not use it as a Class attr.", "if", "name", "==", "\"def\"", ":", "name", "=", "\"defn\"", "# If we have a relationship, then we will split this into a further", "# dictionary.", "if", "hasattr", "(", "rec", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "attrs_scalar", ":", "if", "name", "not", "in", "self", ".", "attrs_nested", ":", "getattr", "(", "rec", ",", "name", ")", ".", "add", "(", "value", ")", "else", ":", "self", ".", "_add_nested", "(", "rec", ",", "name", ",", "value", ")", "else", ":", "raise", "Exception", "(", "\"ATTR({NAME}) ALREADY SET({VAL})\"", ".", "format", "(", "NAME", "=", "name", ",", "VAL", "=", "getattr", "(", "rec", ",", "name", ")", ")", ")", "else", ":", "# Initialize new GOTerm attr", "if", "name", "in", "self", ".", "attrs_scalar", ":", "setattr", "(", "rec", ",", "name", ",", "value", ")", "elif", "name", "not", "in", "self", ".", "attrs_nested", ":", "setattr", "(", "rec", ",", "name", ",", "set", "(", "[", "value", "]", ")", ")", "else", ":", "name", "=", "'_{:s}'", ".", "format", "(", "name", ")", "setattr", "(", "rec", ",", "name", ",", "defaultdict", "(", "list", ")", ")", "self", ".", "_add_nested", "(", "rec", ",", "name", ",", "value", ")" ]
Update current GOTerm with optional record.
[ "Update", "current", "GOTerm", "with", "optional", "record", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L163-L189
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader._add_to_typedef
def _add_to_typedef(self, typedef_curr, line, lnum): """Add new fields to the current typedef.""" mtch = re.match(r'^(\S+):\s*(\S.*)$', line) if mtch: field_name = mtch.group(1) field_value = mtch.group(2).split('!')[0].rstrip() if field_name == "id": self._chk_none(typedef_curr.id, lnum) typedef_curr.id = field_value elif field_name == "name": self._chk_none(typedef_curr.name, lnum) typedef_curr.name = field_value elif field_name == "transitive_over": typedef_curr.transitive_over.append(field_value) elif field_name == "inverse_of": self._chk_none(typedef_curr.inverse_of, lnum) typedef_curr.inverse_of = field_value # Note: there are other tags that aren't imported here. else: self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum)
python
def _add_to_typedef(self, typedef_curr, line, lnum): """Add new fields to the current typedef.""" mtch = re.match(r'^(\S+):\s*(\S.*)$', line) if mtch: field_name = mtch.group(1) field_value = mtch.group(2).split('!')[0].rstrip() if field_name == "id": self._chk_none(typedef_curr.id, lnum) typedef_curr.id = field_value elif field_name == "name": self._chk_none(typedef_curr.name, lnum) typedef_curr.name = field_value elif field_name == "transitive_over": typedef_curr.transitive_over.append(field_value) elif field_name == "inverse_of": self._chk_none(typedef_curr.inverse_of, lnum) typedef_curr.inverse_of = field_value # Note: there are other tags that aren't imported here. else: self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum)
[ "def", "_add_to_typedef", "(", "self", ",", "typedef_curr", ",", "line", ",", "lnum", ")", ":", "mtch", "=", "re", ".", "match", "(", "r'^(\\S+):\\s*(\\S.*)$'", ",", "line", ")", "if", "mtch", ":", "field_name", "=", "mtch", ".", "group", "(", "1", ")", "field_value", "=", "mtch", ".", "group", "(", "2", ")", ".", "split", "(", "'!'", ")", "[", "0", "]", ".", "rstrip", "(", ")", "if", "field_name", "==", "\"id\"", ":", "self", ".", "_chk_none", "(", "typedef_curr", ".", "id", ",", "lnum", ")", "typedef_curr", ".", "id", "=", "field_value", "elif", "field_name", "==", "\"name\"", ":", "self", ".", "_chk_none", "(", "typedef_curr", ".", "name", ",", "lnum", ")", "typedef_curr", ".", "name", "=", "field_value", "elif", "field_name", "==", "\"transitive_over\"", ":", "typedef_curr", ".", "transitive_over", ".", "append", "(", "field_value", ")", "elif", "field_name", "==", "\"inverse_of\"", ":", "self", ".", "_chk_none", "(", "typedef_curr", ".", "inverse_of", ",", "lnum", ")", "typedef_curr", ".", "inverse_of", "=", "field_value", "# Note: there are other tags that aren't imported here.", "else", ":", "self", ".", "_die", "(", "\"UNEXPECTED FIELD CONTENT: {L}\\n\"", ".", "format", "(", "L", "=", "line", ")", ",", "lnum", ")" ]
Add new fields to the current typedef.
[ "Add", "new", "fields", "to", "the", "current", "typedef", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L191-L211
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader._add_nested
def _add_nested(self, rec, name, value): """Adds a term's nested attributes.""" # Remove comments and split term into typedef / target term. (typedef, target_term) = value.split('!')[0].rstrip().split(' ') # Save the nested term. getattr(rec, name)[typedef].append(target_term)
python
def _add_nested(self, rec, name, value): """Adds a term's nested attributes.""" # Remove comments and split term into typedef / target term. (typedef, target_term) = value.split('!')[0].rstrip().split(' ') # Save the nested term. getattr(rec, name)[typedef].append(target_term)
[ "def", "_add_nested", "(", "self", ",", "rec", ",", "name", ",", "value", ")", ":", "# Remove comments and split term into typedef / target term.", "(", "typedef", ",", "target_term", ")", "=", "value", ".", "split", "(", "'!'", ")", "[", "0", "]", ".", "rstrip", "(", ")", ".", "split", "(", "' '", ")", "# Save the nested term.", "getattr", "(", "rec", ",", "name", ")", "[", "typedef", "]", ".", "append", "(", "target_term", ")" ]
Adds a term's nested attributes.
[ "Adds", "a", "term", "s", "nested", "attributes", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L213-L219
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader._init_optional_attrs
def _init_optional_attrs(self, optional_attrs): """Prepare to store data from user-desired optional fields. Not loading these optional fields by default saves in space and speed. But allow the possibility for saving these fields, if the user desires, Including: comment consider def is_class_level is_metadata_tag is_transitive relationship replaced_by subset synonym transitive_over xref """ # Written by DV Klopfenstein # Required attributes are always loaded. All others are optionally loaded. self.attrs_req = ['id', 'alt_id', 'name', 'namespace', 'is_a', 'is_obsolete'] self.attrs_scalar = ['comment', 'defn', 'is_class_level', 'is_metadata_tag', 'is_transitive', 'transitive_over'] self.attrs_nested = frozenset(['relationship']) # Allow user to specify either: 'def' or 'defn' # 'def' is an obo field name, but 'defn' is legal Python attribute name fnc = lambda aopt: aopt if aopt != "defn" else "def" if optional_attrs is None: optional_attrs = [] elif isinstance(optional_attrs, str): optional_attrs = [fnc(optional_attrs)] if optional_attrs not in self.attrs_req else [] elif isinstance(optional_attrs, list) or isinstance(optional_attrs, set): optional_attrs = set([fnc(f) for f in optional_attrs if f not in self.attrs_req]) else: raise Exception("optional_attrs arg MUST BE A str, list, or set.") self.optional_attrs = optional_attrs
python
def _init_optional_attrs(self, optional_attrs): """Prepare to store data from user-desired optional fields. Not loading these optional fields by default saves in space and speed. But allow the possibility for saving these fields, if the user desires, Including: comment consider def is_class_level is_metadata_tag is_transitive relationship replaced_by subset synonym transitive_over xref """ # Written by DV Klopfenstein # Required attributes are always loaded. All others are optionally loaded. self.attrs_req = ['id', 'alt_id', 'name', 'namespace', 'is_a', 'is_obsolete'] self.attrs_scalar = ['comment', 'defn', 'is_class_level', 'is_metadata_tag', 'is_transitive', 'transitive_over'] self.attrs_nested = frozenset(['relationship']) # Allow user to specify either: 'def' or 'defn' # 'def' is an obo field name, but 'defn' is legal Python attribute name fnc = lambda aopt: aopt if aopt != "defn" else "def" if optional_attrs is None: optional_attrs = [] elif isinstance(optional_attrs, str): optional_attrs = [fnc(optional_attrs)] if optional_attrs not in self.attrs_req else [] elif isinstance(optional_attrs, list) or isinstance(optional_attrs, set): optional_attrs = set([fnc(f) for f in optional_attrs if f not in self.attrs_req]) else: raise Exception("optional_attrs arg MUST BE A str, list, or set.") self.optional_attrs = optional_attrs
[ "def", "_init_optional_attrs", "(", "self", ",", "optional_attrs", ")", ":", "# Written by DV Klopfenstein", "# Required attributes are always loaded. All others are optionally loaded.", "self", ".", "attrs_req", "=", "[", "'id'", ",", "'alt_id'", ",", "'name'", ",", "'namespace'", ",", "'is_a'", ",", "'is_obsolete'", "]", "self", ".", "attrs_scalar", "=", "[", "'comment'", ",", "'defn'", ",", "'is_class_level'", ",", "'is_metadata_tag'", ",", "'is_transitive'", ",", "'transitive_over'", "]", "self", ".", "attrs_nested", "=", "frozenset", "(", "[", "'relationship'", "]", ")", "# Allow user to specify either: 'def' or 'defn'", "# 'def' is an obo field name, but 'defn' is legal Python attribute name", "fnc", "=", "lambda", "aopt", ":", "aopt", "if", "aopt", "!=", "\"defn\"", "else", "\"def\"", "if", "optional_attrs", "is", "None", ":", "optional_attrs", "=", "[", "]", "elif", "isinstance", "(", "optional_attrs", ",", "str", ")", ":", "optional_attrs", "=", "[", "fnc", "(", "optional_attrs", ")", "]", "if", "optional_attrs", "not", "in", "self", ".", "attrs_req", "else", "[", "]", "elif", "isinstance", "(", "optional_attrs", ",", "list", ")", "or", "isinstance", "(", "optional_attrs", ",", "set", ")", ":", "optional_attrs", "=", "set", "(", "[", "fnc", "(", "f", ")", "for", "f", "in", "optional_attrs", "if", "f", "not", "in", "self", ".", "attrs_req", "]", ")", "else", ":", "raise", "Exception", "(", "\"optional_attrs arg MUST BE A str, list, or set.\"", ")", "self", ".", "optional_attrs", "=", "optional_attrs" ]
Prepare to store data from user-desired optional fields. Not loading these optional fields by default saves in space and speed. But allow the possibility for saving these fields, if the user desires, Including: comment consider def is_class_level is_metadata_tag is_transitive relationship replaced_by subset synonym transitive_over xref
[ "Prepare", "to", "store", "data", "from", "user", "-", "desired", "optional", "fields", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L221-L248
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader._die
def _die(self, msg, lnum): """Raise an Exception if file read is unexpected.""" raise Exception("**FATAL {FILE}({LNUM}): {MSG}\n".format( FILE=self.obo_file, LNUM=lnum, MSG=msg))
python
def _die(self, msg, lnum): """Raise an Exception if file read is unexpected.""" raise Exception("**FATAL {FILE}({LNUM}): {MSG}\n".format( FILE=self.obo_file, LNUM=lnum, MSG=msg))
[ "def", "_die", "(", "self", ",", "msg", ",", "lnum", ")", ":", "raise", "Exception", "(", "\"**FATAL {FILE}({LNUM}): {MSG}\\n\"", ".", "format", "(", "FILE", "=", "self", ".", "obo_file", ",", "LNUM", "=", "lnum", ",", "MSG", "=", "msg", ")", ")" ]
Raise an Exception if file read is unexpected.
[ "Raise", "an", "Exception", "if", "file", "read", "is", "unexpected", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L251-L254
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
GOTerm.write_hier_rec
def write_hier_rec(self, gos_printed, out=sys.stdout, len_dash=1, max_depth=None, num_child=None, short_prt=False, include_only=None, go_marks=None, depth=1, dp="-"): """Write hierarchy for a GO Term record.""" # Added by DV Klopfenstein GO_id = self.id # Shortens hierarchy report by only printing the hierarchy # for the sub-set of user-specified GO terms which are connected. if include_only is not None and GO_id not in include_only: return nrp = short_prt and GO_id in gos_printed if go_marks is not None: out.write('{} '.format('>' if GO_id in go_marks else ' ')) if len_dash is not None: # Default character indicating hierarchy level is '-'. # '=' is used to indicate a hierarchical path printed in detail previously. letter = '-' if not nrp or not self.children else '=' dp = ''.join([letter]*depth) out.write('{DASHES:{N}} '.format(DASHES=dp, N=len_dash)) if num_child is not None: out.write('{N:>5} '.format(N=len(self.get_all_children()))) out.write('{GO}\tL-{L:>02}\tD-{D:>02}\t{desc}\n'.format( GO=self.id, L=self.level, D=self.depth, desc=self.name)) # Track GOs previously printed only if needed if short_prt: gos_printed.add(GO_id) # Do not print hierarchy below this turn if it has already been printed if nrp: return depth += 1 if max_depth is not None and depth > max_depth: return for p in self.children: p.write_hier_rec(gos_printed, out, len_dash, max_depth, num_child, short_prt, include_only, go_marks, depth, dp)
python
def write_hier_rec(self, gos_printed, out=sys.stdout, len_dash=1, max_depth=None, num_child=None, short_prt=False, include_only=None, go_marks=None, depth=1, dp="-"): """Write hierarchy for a GO Term record.""" # Added by DV Klopfenstein GO_id = self.id # Shortens hierarchy report by only printing the hierarchy # for the sub-set of user-specified GO terms which are connected. if include_only is not None and GO_id not in include_only: return nrp = short_prt and GO_id in gos_printed if go_marks is not None: out.write('{} '.format('>' if GO_id in go_marks else ' ')) if len_dash is not None: # Default character indicating hierarchy level is '-'. # '=' is used to indicate a hierarchical path printed in detail previously. letter = '-' if not nrp or not self.children else '=' dp = ''.join([letter]*depth) out.write('{DASHES:{N}} '.format(DASHES=dp, N=len_dash)) if num_child is not None: out.write('{N:>5} '.format(N=len(self.get_all_children()))) out.write('{GO}\tL-{L:>02}\tD-{D:>02}\t{desc}\n'.format( GO=self.id, L=self.level, D=self.depth, desc=self.name)) # Track GOs previously printed only if needed if short_prt: gos_printed.add(GO_id) # Do not print hierarchy below this turn if it has already been printed if nrp: return depth += 1 if max_depth is not None and depth > max_depth: return for p in self.children: p.write_hier_rec(gos_printed, out, len_dash, max_depth, num_child, short_prt, include_only, go_marks, depth, dp)
[ "def", "write_hier_rec", "(", "self", ",", "gos_printed", ",", "out", "=", "sys", ".", "stdout", ",", "len_dash", "=", "1", ",", "max_depth", "=", "None", ",", "num_child", "=", "None", ",", "short_prt", "=", "False", ",", "include_only", "=", "None", ",", "go_marks", "=", "None", ",", "depth", "=", "1", ",", "dp", "=", "\"-\"", ")", ":", "# Added by DV Klopfenstein", "GO_id", "=", "self", ".", "id", "# Shortens hierarchy report by only printing the hierarchy", "# for the sub-set of user-specified GO terms which are connected.", "if", "include_only", "is", "not", "None", "and", "GO_id", "not", "in", "include_only", ":", "return", "nrp", "=", "short_prt", "and", "GO_id", "in", "gos_printed", "if", "go_marks", "is", "not", "None", ":", "out", ".", "write", "(", "'{} '", ".", "format", "(", "'>'", "if", "GO_id", "in", "go_marks", "else", "' '", ")", ")", "if", "len_dash", "is", "not", "None", ":", "# Default character indicating hierarchy level is '-'.", "# '=' is used to indicate a hierarchical path printed in detail previously.", "letter", "=", "'-'", "if", "not", "nrp", "or", "not", "self", ".", "children", "else", "'='", "dp", "=", "''", ".", "join", "(", "[", "letter", "]", "*", "depth", ")", "out", ".", "write", "(", "'{DASHES:{N}} '", ".", "format", "(", "DASHES", "=", "dp", ",", "N", "=", "len_dash", ")", ")", "if", "num_child", "is", "not", "None", ":", "out", ".", "write", "(", "'{N:>5} '", ".", "format", "(", "N", "=", "len", "(", "self", ".", "get_all_children", "(", ")", ")", ")", ")", "out", ".", "write", "(", "'{GO}\\tL-{L:>02}\\tD-{D:>02}\\t{desc}\\n'", ".", "format", "(", "GO", "=", "self", ".", "id", ",", "L", "=", "self", ".", "level", ",", "D", "=", "self", ".", "depth", ",", "desc", "=", "self", ".", "name", ")", ")", "# Track GOs previously printed only if needed", "if", "short_prt", ":", "gos_printed", ".", "add", "(", "GO_id", ")", "# Do not print hierarchy below this turn if it has already been printed", "if", "nrp", ":", "return", "depth", "+=", "1", "if", "max_depth", "is", "not", "None", "and", "depth", ">", "max_depth", ":", "return", "for", "p", "in", "self", ".", "children", ":", "p", ".", "write_hier_rec", "(", "gos_printed", ",", "out", ",", "len_dash", ",", "max_depth", ",", "num_child", ",", "short_prt", ",", "include_only", ",", "go_marks", ",", "depth", ",", "dp", ")" ]
Write hierarchy for a GO Term record.
[ "Write", "hierarchy", "for", "a", "GO", "Term", "record", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L349-L385
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
GODag.write_hier_all
def write_hier_all(self, out=sys.stdout, len_dash=1, max_depth=None, num_child=None, short_prt=False): """Write hierarchy for all GO Terms in obo file.""" # Print: [biological_process, molecular_function, and cellular_component] for go_id in ['GO:0008150', 'GO:0003674', 'GO:0005575']: self.write_hier(go_id, out, len_dash, max_depth, num_child, short_prt, None)
python
def write_hier_all(self, out=sys.stdout, len_dash=1, max_depth=None, num_child=None, short_prt=False): """Write hierarchy for all GO Terms in obo file.""" # Print: [biological_process, molecular_function, and cellular_component] for go_id in ['GO:0008150', 'GO:0003674', 'GO:0005575']: self.write_hier(go_id, out, len_dash, max_depth, num_child, short_prt, None)
[ "def", "write_hier_all", "(", "self", ",", "out", "=", "sys", ".", "stdout", ",", "len_dash", "=", "1", ",", "max_depth", "=", "None", ",", "num_child", "=", "None", ",", "short_prt", "=", "False", ")", ":", "# Print: [biological_process, molecular_function, and cellular_component]", "for", "go_id", "in", "[", "'GO:0008150'", ",", "'GO:0003674'", ",", "'GO:0005575'", "]", ":", "self", ".", "write_hier", "(", "go_id", ",", "out", ",", "len_dash", ",", "max_depth", ",", "num_child", ",", "short_prt", ",", "None", ")" ]
Write hierarchy for all GO Terms in obo file.
[ "Write", "hierarchy", "for", "all", "GO", "Terms", "in", "obo", "file", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L492-L497
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
GODag.write_hier
def write_hier(self, GO_id, out=sys.stdout, len_dash=1, max_depth=None, num_child=None, short_prt=False, include_only=None, go_marks=None): """Write hierarchy for a GO Term.""" gos_printed = set() self[GO_id].write_hier_rec(gos_printed, out, len_dash, max_depth, num_child, short_prt, include_only, go_marks)
python
def write_hier(self, GO_id, out=sys.stdout, len_dash=1, max_depth=None, num_child=None, short_prt=False, include_only=None, go_marks=None): """Write hierarchy for a GO Term.""" gos_printed = set() self[GO_id].write_hier_rec(gos_printed, out, len_dash, max_depth, num_child, short_prt, include_only, go_marks)
[ "def", "write_hier", "(", "self", ",", "GO_id", ",", "out", "=", "sys", ".", "stdout", ",", "len_dash", "=", "1", ",", "max_depth", "=", "None", ",", "num_child", "=", "None", ",", "short_prt", "=", "False", ",", "include_only", "=", "None", ",", "go_marks", "=", "None", ")", ":", "gos_printed", "=", "set", "(", ")", "self", "[", "GO_id", "]", ".", "write_hier_rec", "(", "gos_printed", ",", "out", ",", "len_dash", ",", "max_depth", ",", "num_child", ",", "short_prt", ",", "include_only", ",", "go_marks", ")" ]
Write hierarchy for a GO Term.
[ "Write", "hierarchy", "for", "a", "GO", "Term", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L499-L505
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
GODag.paths_to_top
def paths_to_top(self, term): """ Returns all possible paths to the root node Each path includes the term given. The order of the path is top -> bottom, i.e. it starts with the root and ends with the given term (inclusively). Parameters: ----------- - term: the id of the GO term, where the paths begin (i.e. the accession 'GO:0003682') Returns: -------- - a list of lists of GO Terms """ # error handling consistent with original authors if term not in self: print("Term %s not found!" % term, file=sys.stderr) return def _paths_to_top_recursive(rec): if rec.level == 0: return [[rec]] paths = [] for parent in rec.parents: top_paths = _paths_to_top_recursive(parent) for top_path in top_paths: top_path.append(rec) paths.append(top_path) return paths go_term = self[term] return _paths_to_top_recursive(go_term)
python
def paths_to_top(self, term): """ Returns all possible paths to the root node Each path includes the term given. The order of the path is top -> bottom, i.e. it starts with the root and ends with the given term (inclusively). Parameters: ----------- - term: the id of the GO term, where the paths begin (i.e. the accession 'GO:0003682') Returns: -------- - a list of lists of GO Terms """ # error handling consistent with original authors if term not in self: print("Term %s not found!" % term, file=sys.stderr) return def _paths_to_top_recursive(rec): if rec.level == 0: return [[rec]] paths = [] for parent in rec.parents: top_paths = _paths_to_top_recursive(parent) for top_path in top_paths: top_path.append(rec) paths.append(top_path) return paths go_term = self[term] return _paths_to_top_recursive(go_term)
[ "def", "paths_to_top", "(", "self", ",", "term", ")", ":", "# error handling consistent with original authors", "if", "term", "not", "in", "self", ":", "print", "(", "\"Term %s not found!\"", "%", "term", ",", "file", "=", "sys", ".", "stderr", ")", "return", "def", "_paths_to_top_recursive", "(", "rec", ")", ":", "if", "rec", ".", "level", "==", "0", ":", "return", "[", "[", "rec", "]", "]", "paths", "=", "[", "]", "for", "parent", "in", "rec", ".", "parents", ":", "top_paths", "=", "_paths_to_top_recursive", "(", "parent", ")", "for", "top_path", "in", "top_paths", ":", "top_path", ".", "append", "(", "rec", ")", "paths", ".", "append", "(", "top_path", ")", "return", "paths", "go_term", "=", "self", "[", "term", "]", "return", "_paths_to_top_recursive", "(", "go_term", ")" ]
Returns all possible paths to the root node Each path includes the term given. The order of the path is top -> bottom, i.e. it starts with the root and ends with the given term (inclusively). Parameters: ----------- - term: the id of the GO term, where the paths begin (i.e. the accession 'GO:0003682') Returns: -------- - a list of lists of GO Terms
[ "Returns", "all", "possible", "paths", "to", "the", "root", "node" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L522-L556
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
GODag.make_graph_pydot
def make_graph_pydot(self, recs, nodecolor, edgecolor, dpi, draw_parents=True, draw_children=True): """draw AMIGO style network, lineage containing one query record.""" import pydot G = pydot.Dot(graph_type='digraph', dpi="{}".format(dpi)) # Directed Graph edgeset = set() usr_ids = [rec.id for rec in recs] for rec in recs: if draw_parents: edgeset.update(rec.get_all_parent_edges()) if draw_children: edgeset.update(rec.get_all_child_edges()) lw = self._label_wrap rec_id_set = set([rec_id for endpts in edgeset for rec_id in endpts]) nodes = {str(ID):pydot.Node( lw(ID).replace("GO:",""), # Node name shape="box", style="rounded, filled", # Highlight query terms in plum: fillcolor="beige" if ID not in usr_ids else "plum", color=nodecolor) for ID in rec_id_set} # add nodes explicitly via add_node for rec_id, node in nodes.items(): G.add_node(node) for src, target in edgeset: # default layout in graphviz is top->bottom, so we invert # the direction and plot using dir="back" G.add_edge(pydot.Edge(nodes[target], nodes[src], shape="normal", color=edgecolor, label="is_a", dir="back")) return G
python
def make_graph_pydot(self, recs, nodecolor, edgecolor, dpi, draw_parents=True, draw_children=True): """draw AMIGO style network, lineage containing one query record.""" import pydot G = pydot.Dot(graph_type='digraph', dpi="{}".format(dpi)) # Directed Graph edgeset = set() usr_ids = [rec.id for rec in recs] for rec in recs: if draw_parents: edgeset.update(rec.get_all_parent_edges()) if draw_children: edgeset.update(rec.get_all_child_edges()) lw = self._label_wrap rec_id_set = set([rec_id for endpts in edgeset for rec_id in endpts]) nodes = {str(ID):pydot.Node( lw(ID).replace("GO:",""), # Node name shape="box", style="rounded, filled", # Highlight query terms in plum: fillcolor="beige" if ID not in usr_ids else "plum", color=nodecolor) for ID in rec_id_set} # add nodes explicitly via add_node for rec_id, node in nodes.items(): G.add_node(node) for src, target in edgeset: # default layout in graphviz is top->bottom, so we invert # the direction and plot using dir="back" G.add_edge(pydot.Edge(nodes[target], nodes[src], shape="normal", color=edgecolor, label="is_a", dir="back")) return G
[ "def", "make_graph_pydot", "(", "self", ",", "recs", ",", "nodecolor", ",", "edgecolor", ",", "dpi", ",", "draw_parents", "=", "True", ",", "draw_children", "=", "True", ")", ":", "import", "pydot", "G", "=", "pydot", ".", "Dot", "(", "graph_type", "=", "'digraph'", ",", "dpi", "=", "\"{}\"", ".", "format", "(", "dpi", ")", ")", "# Directed Graph", "edgeset", "=", "set", "(", ")", "usr_ids", "=", "[", "rec", ".", "id", "for", "rec", "in", "recs", "]", "for", "rec", "in", "recs", ":", "if", "draw_parents", ":", "edgeset", ".", "update", "(", "rec", ".", "get_all_parent_edges", "(", ")", ")", "if", "draw_children", ":", "edgeset", ".", "update", "(", "rec", ".", "get_all_child_edges", "(", ")", ")", "lw", "=", "self", ".", "_label_wrap", "rec_id_set", "=", "set", "(", "[", "rec_id", "for", "endpts", "in", "edgeset", "for", "rec_id", "in", "endpts", "]", ")", "nodes", "=", "{", "str", "(", "ID", ")", ":", "pydot", ".", "Node", "(", "lw", "(", "ID", ")", ".", "replace", "(", "\"GO:\"", ",", "\"\"", ")", ",", "# Node name", "shape", "=", "\"box\"", ",", "style", "=", "\"rounded, filled\"", ",", "# Highlight query terms in plum:", "fillcolor", "=", "\"beige\"", "if", "ID", "not", "in", "usr_ids", "else", "\"plum\"", ",", "color", "=", "nodecolor", ")", "for", "ID", "in", "rec_id_set", "}", "# add nodes explicitly via add_node", "for", "rec_id", ",", "node", "in", "nodes", ".", "items", "(", ")", ":", "G", ".", "add_node", "(", "node", ")", "for", "src", ",", "target", "in", "edgeset", ":", "# default layout in graphviz is top->bottom, so we invert", "# the direction and plot using dir=\"back\"", "G", ".", "add_edge", "(", "pydot", ".", "Edge", "(", "nodes", "[", "target", "]", ",", "nodes", "[", "src", "]", ",", "shape", "=", "\"normal\"", ",", "color", "=", "edgecolor", ",", "label", "=", "\"is_a\"", ",", "dir", "=", "\"back\"", ")", ")", "return", "G" ]
draw AMIGO style network, lineage containing one query record.
[ "draw", "AMIGO", "style", "network", "lineage", "containing", "one", "query", "record", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L563-L601
ga4gh/ga4gh-server
ga4gh/server/sqlite_backend.py
sqliteRowsToDicts
def sqliteRowsToDicts(sqliteRows): """ Unpacks sqlite rows as returned by fetchall into an array of simple dicts. :param sqliteRows: array of rows returned from fetchall DB call :return: array of dicts, keyed by the column names. """ return map(lambda r: dict(zip(r.keys(), r)), sqliteRows)
python
def sqliteRowsToDicts(sqliteRows): """ Unpacks sqlite rows as returned by fetchall into an array of simple dicts. :param sqliteRows: array of rows returned from fetchall DB call :return: array of dicts, keyed by the column names. """ return map(lambda r: dict(zip(r.keys(), r)), sqliteRows)
[ "def", "sqliteRowsToDicts", "(", "sqliteRows", ")", ":", "return", "map", "(", "lambda", "r", ":", "dict", "(", "zip", "(", "r", ".", "keys", "(", ")", ",", "r", ")", ")", ",", "sqliteRows", ")" ]
Unpacks sqlite rows as returned by fetchall into an array of simple dicts. :param sqliteRows: array of rows returned from fetchall DB call :return: array of dicts, keyed by the column names.
[ "Unpacks", "sqlite", "rows", "as", "returned", "by", "fetchall", "into", "an", "array", "of", "simple", "dicts", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/sqlite_backend.py#L13-L21
ga4gh/ga4gh-server
ga4gh/server/sqlite_backend.py
limitsSql
def limitsSql(startIndex=0, maxResults=0): """ Construct a SQL LIMIT clause """ if startIndex and maxResults: return " LIMIT {}, {}".format(startIndex, maxResults) elif startIndex: raise Exception("startIndex was provided, but maxResults was not") elif maxResults: return " LIMIT {}".format(maxResults) else: return ""
python
def limitsSql(startIndex=0, maxResults=0): """ Construct a SQL LIMIT clause """ if startIndex and maxResults: return " LIMIT {}, {}".format(startIndex, maxResults) elif startIndex: raise Exception("startIndex was provided, but maxResults was not") elif maxResults: return " LIMIT {}".format(maxResults) else: return ""
[ "def", "limitsSql", "(", "startIndex", "=", "0", ",", "maxResults", "=", "0", ")", ":", "if", "startIndex", "and", "maxResults", ":", "return", "\" LIMIT {}, {}\"", ".", "format", "(", "startIndex", ",", "maxResults", ")", "elif", "startIndex", ":", "raise", "Exception", "(", "\"startIndex was provided, but maxResults was not\"", ")", "elif", "maxResults", ":", "return", "\" LIMIT {}\"", ".", "format", "(", "maxResults", ")", "else", ":", "return", "\"\"" ]
Construct a SQL LIMIT clause
[ "Construct", "a", "SQL", "LIMIT", "clause" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/sqlite_backend.py#L35-L46
ga4gh/ga4gh-server
ga4gh/server/sqlite_backend.py
iterativeFetch
def iterativeFetch(query, batchSize=default_batch_size): """ Returns rows of a sql fetch query on demand """ while True: rows = query.fetchmany(batchSize) if not rows: break rowDicts = sqliteRowsToDicts(rows) for rowDict in rowDicts: yield rowDict
python
def iterativeFetch(query, batchSize=default_batch_size): """ Returns rows of a sql fetch query on demand """ while True: rows = query.fetchmany(batchSize) if not rows: break rowDicts = sqliteRowsToDicts(rows) for rowDict in rowDicts: yield rowDict
[ "def", "iterativeFetch", "(", "query", ",", "batchSize", "=", "default_batch_size", ")", ":", "while", "True", ":", "rows", "=", "query", ".", "fetchmany", "(", "batchSize", ")", "if", "not", "rows", ":", "break", "rowDicts", "=", "sqliteRowsToDicts", "(", "rows", ")", "for", "rowDict", "in", "rowDicts", ":", "yield", "rowDict" ]
Returns rows of a sql fetch query on demand
[ "Returns", "rows", "of", "a", "sql", "fetch", "query", "on", "demand" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/sqlite_backend.py#L52-L62
ga4gh/ga4gh-server
ga4gh/server/paging.py
_parsePageToken
def _parsePageToken(pageToken, numValues): """ Parses the specified pageToken and returns a list of the specified number of values. Page tokens are assumed to consist of a fixed number of integers seperated by colons. If the page token does not conform to this specification, raise a InvalidPageToken exception. """ tokens = pageToken.split(":") if len(tokens) != numValues: msg = "Invalid number of values in page token" raise exceptions.BadPageTokenException(msg) try: values = map(int, tokens) except ValueError: msg = "Malformed integers in page token" raise exceptions.BadPageTokenException(msg) return values
python
def _parsePageToken(pageToken, numValues): """ Parses the specified pageToken and returns a list of the specified number of values. Page tokens are assumed to consist of a fixed number of integers seperated by colons. If the page token does not conform to this specification, raise a InvalidPageToken exception. """ tokens = pageToken.split(":") if len(tokens) != numValues: msg = "Invalid number of values in page token" raise exceptions.BadPageTokenException(msg) try: values = map(int, tokens) except ValueError: msg = "Malformed integers in page token" raise exceptions.BadPageTokenException(msg) return values
[ "def", "_parsePageToken", "(", "pageToken", ",", "numValues", ")", ":", "tokens", "=", "pageToken", ".", "split", "(", "\":\"", ")", "if", "len", "(", "tokens", ")", "!=", "numValues", ":", "msg", "=", "\"Invalid number of values in page token\"", "raise", "exceptions", ".", "BadPageTokenException", "(", "msg", ")", "try", ":", "values", "=", "map", "(", "int", ",", "tokens", ")", "except", "ValueError", ":", "msg", "=", "\"Malformed integers in page token\"", "raise", "exceptions", ".", "BadPageTokenException", "(", "msg", ")", "return", "values" ]
Parses the specified pageToken and returns a list of the specified number of values. Page tokens are assumed to consist of a fixed number of integers seperated by colons. If the page token does not conform to this specification, raise a InvalidPageToken exception.
[ "Parses", "the", "specified", "pageToken", "and", "returns", "a", "list", "of", "the", "specified", "number", "of", "values", ".", "Page", "tokens", "are", "assumed", "to", "consist", "of", "a", "fixed", "number", "of", "integers", "seperated", "by", "colons", ".", "If", "the", "page", "token", "does", "not", "conform", "to", "this", "specification", "raise", "a", "InvalidPageToken", "exception", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L13-L30
ga4gh/ga4gh-server
ga4gh/server/paging.py
_parseIntegerArgument
def _parseIntegerArgument(args, key, defaultValue): """ Attempts to parse the specified key in the specified argument dictionary into an integer. If the argument cannot be parsed, raises a BadRequestIntegerException. If the key is not present, return the specified default value. """ ret = defaultValue try: if key in args: try: ret = int(args[key]) except ValueError: raise exceptions.BadRequestIntegerException(key, args[key]) except TypeError: raise Exception((key, args)) return ret
python
def _parseIntegerArgument(args, key, defaultValue): """ Attempts to parse the specified key in the specified argument dictionary into an integer. If the argument cannot be parsed, raises a BadRequestIntegerException. If the key is not present, return the specified default value. """ ret = defaultValue try: if key in args: try: ret = int(args[key]) except ValueError: raise exceptions.BadRequestIntegerException(key, args[key]) except TypeError: raise Exception((key, args)) return ret
[ "def", "_parseIntegerArgument", "(", "args", ",", "key", ",", "defaultValue", ")", ":", "ret", "=", "defaultValue", "try", ":", "if", "key", "in", "args", ":", "try", ":", "ret", "=", "int", "(", "args", "[", "key", "]", ")", "except", "ValueError", ":", "raise", "exceptions", ".", "BadRequestIntegerException", "(", "key", ",", "args", "[", "key", "]", ")", "except", "TypeError", ":", "raise", "Exception", "(", "(", "key", ",", "args", ")", ")", "return", "ret" ]
Attempts to parse the specified key in the specified argument dictionary into an integer. If the argument cannot be parsed, raises a BadRequestIntegerException. If the key is not present, return the specified default value.
[ "Attempts", "to", "parse", "the", "specified", "key", "in", "the", "specified", "argument", "dictionary", "into", "an", "integer", ".", "If", "the", "argument", "cannot", "be", "parsed", "raises", "a", "BadRequestIntegerException", ".", "If", "the", "key", "is", "not", "present", "return", "the", "specified", "default", "value", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L33-L49
ga4gh/ga4gh-server
ga4gh/server/paging.py
IntervalIterator._initialiseIteration
def _initialiseIteration(self): """ Starts a new iteration. """ self._searchIterator = self._search( self._request.start, self._request.end if self._request.end != 0 else None) self._currentObject = next(self._searchIterator, None) if self._currentObject is not None: self._nextObject = next(self._searchIterator, None) self._searchAnchor = self._request.start self._distanceFromAnchor = 0 firstObjectStart = self._getStart(self._currentObject) if firstObjectStart > self._request.start: self._searchAnchor = firstObjectStart
python
def _initialiseIteration(self): """ Starts a new iteration. """ self._searchIterator = self._search( self._request.start, self._request.end if self._request.end != 0 else None) self._currentObject = next(self._searchIterator, None) if self._currentObject is not None: self._nextObject = next(self._searchIterator, None) self._searchAnchor = self._request.start self._distanceFromAnchor = 0 firstObjectStart = self._getStart(self._currentObject) if firstObjectStart > self._request.start: self._searchAnchor = firstObjectStart
[ "def", "_initialiseIteration", "(", "self", ")", ":", "self", ".", "_searchIterator", "=", "self", ".", "_search", "(", "self", ".", "_request", ".", "start", ",", "self", ".", "_request", ".", "end", "if", "self", ".", "_request", ".", "end", "!=", "0", "else", "None", ")", "self", ".", "_currentObject", "=", "next", "(", "self", ".", "_searchIterator", ",", "None", ")", "if", "self", ".", "_currentObject", "is", "not", "None", ":", "self", ".", "_nextObject", "=", "next", "(", "self", ".", "_searchIterator", ",", "None", ")", "self", ".", "_searchAnchor", "=", "self", ".", "_request", ".", "start", "self", ".", "_distanceFromAnchor", "=", "0", "firstObjectStart", "=", "self", ".", "_getStart", "(", "self", ".", "_currentObject", ")", "if", "firstObjectStart", ">", "self", ".", "_request", ".", "start", ":", "self", ".", "_searchAnchor", "=", "firstObjectStart" ]
Starts a new iteration.
[ "Starts", "a", "new", "iteration", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L83-L97
ga4gh/ga4gh-server
ga4gh/server/paging.py
IntervalIterator._pickUpIteration
def _pickUpIteration(self, searchAnchor, objectsToSkip): """ Picks up iteration from a previously provided page token. There are two different phases here: 1) We are iterating over the initial set of intervals in which start is < the search start coorindate. 2) We are iterating over the remaining intervals in which start >= to the search start coordinate. """ self._searchAnchor = searchAnchor self._distanceFromAnchor = objectsToSkip self._searchIterator = self._search( searchAnchor, self._request.end if self._request.end != 0 else None) obj = next(self._searchIterator) if searchAnchor == self._request.start: # This is the initial set of intervals, we just skip forward # objectsToSkip positions for _ in range(objectsToSkip): obj = next(self._searchIterator) else: # Now, we are past this initial set of intervals. # First, we need to skip forward over the intervals where # start < searchAnchor, as we've seen these already. while self._getStart(obj) < searchAnchor: obj = next(self._searchIterator) # Now, we skip over objectsToSkip objects such that # start == searchAnchor for _ in range(objectsToSkip): if self._getStart(obj) != searchAnchor: raise exceptions.BadPageTokenException obj = next(self._searchIterator) self._currentObject = obj self._nextObject = next(self._searchIterator, None)
python
def _pickUpIteration(self, searchAnchor, objectsToSkip): """ Picks up iteration from a previously provided page token. There are two different phases here: 1) We are iterating over the initial set of intervals in which start is < the search start coorindate. 2) We are iterating over the remaining intervals in which start >= to the search start coordinate. """ self._searchAnchor = searchAnchor self._distanceFromAnchor = objectsToSkip self._searchIterator = self._search( searchAnchor, self._request.end if self._request.end != 0 else None) obj = next(self._searchIterator) if searchAnchor == self._request.start: # This is the initial set of intervals, we just skip forward # objectsToSkip positions for _ in range(objectsToSkip): obj = next(self._searchIterator) else: # Now, we are past this initial set of intervals. # First, we need to skip forward over the intervals where # start < searchAnchor, as we've seen these already. while self._getStart(obj) < searchAnchor: obj = next(self._searchIterator) # Now, we skip over objectsToSkip objects such that # start == searchAnchor for _ in range(objectsToSkip): if self._getStart(obj) != searchAnchor: raise exceptions.BadPageTokenException obj = next(self._searchIterator) self._currentObject = obj self._nextObject = next(self._searchIterator, None)
[ "def", "_pickUpIteration", "(", "self", ",", "searchAnchor", ",", "objectsToSkip", ")", ":", "self", ".", "_searchAnchor", "=", "searchAnchor", "self", ".", "_distanceFromAnchor", "=", "objectsToSkip", "self", ".", "_searchIterator", "=", "self", ".", "_search", "(", "searchAnchor", ",", "self", ".", "_request", ".", "end", "if", "self", ".", "_request", ".", "end", "!=", "0", "else", "None", ")", "obj", "=", "next", "(", "self", ".", "_searchIterator", ")", "if", "searchAnchor", "==", "self", ".", "_request", ".", "start", ":", "# This is the initial set of intervals, we just skip forward", "# objectsToSkip positions", "for", "_", "in", "range", "(", "objectsToSkip", ")", ":", "obj", "=", "next", "(", "self", ".", "_searchIterator", ")", "else", ":", "# Now, we are past this initial set of intervals.", "# First, we need to skip forward over the intervals where", "# start < searchAnchor, as we've seen these already.", "while", "self", ".", "_getStart", "(", "obj", ")", "<", "searchAnchor", ":", "obj", "=", "next", "(", "self", ".", "_searchIterator", ")", "# Now, we skip over objectsToSkip objects such that", "# start == searchAnchor", "for", "_", "in", "range", "(", "objectsToSkip", ")", ":", "if", "self", ".", "_getStart", "(", "obj", ")", "!=", "searchAnchor", ":", "raise", "exceptions", ".", "BadPageTokenException", "obj", "=", "next", "(", "self", ".", "_searchIterator", ")", "self", ".", "_currentObject", "=", "obj", "self", ".", "_nextObject", "=", "next", "(", "self", ".", "_searchIterator", ",", "None", ")" ]
Picks up iteration from a previously provided page token. There are two different phases here: 1) We are iterating over the initial set of intervals in which start is < the search start coorindate. 2) We are iterating over the remaining intervals in which start >= to the search start coordinate.
[ "Picks", "up", "iteration", "from", "a", "previously", "provided", "page", "token", ".", "There", "are", "two", "different", "phases", "here", ":", "1", ")", "We", "are", "iterating", "over", "the", "initial", "set", "of", "intervals", "in", "which", "start", "is", "<", "the", "search", "start", "coorindate", ".", "2", ")", "We", "are", "iterating", "over", "the", "remaining", "intervals", "in", "which", "start", ">", "=", "to", "the", "search", "start", "coordinate", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L99-L132
ga4gh/ga4gh-server
ga4gh/server/paging.py
IntervalIterator.next
def next(self): """ Returns the next (object, nextPageToken) pair. """ if self._currentObject is None: raise StopIteration() nextPageToken = None if self._nextObject is not None: start = self._getStart(self._nextObject) # If start > the search anchor, move the search anchor. Otherwise, # increment the distance from the anchor. if start > self._searchAnchor: self._searchAnchor = start self._distanceFromAnchor = 0 else: self._distanceFromAnchor += 1 nextPageToken = "{}:{}".format( self._searchAnchor, self._distanceFromAnchor) ret = self._extractProtocolObject(self._currentObject), nextPageToken self._currentObject = self._nextObject self._nextObject = next(self._searchIterator, None) return ret
python
def next(self): """ Returns the next (object, nextPageToken) pair. """ if self._currentObject is None: raise StopIteration() nextPageToken = None if self._nextObject is not None: start = self._getStart(self._nextObject) # If start > the search anchor, move the search anchor. Otherwise, # increment the distance from the anchor. if start > self._searchAnchor: self._searchAnchor = start self._distanceFromAnchor = 0 else: self._distanceFromAnchor += 1 nextPageToken = "{}:{}".format( self._searchAnchor, self._distanceFromAnchor) ret = self._extractProtocolObject(self._currentObject), nextPageToken self._currentObject = self._nextObject self._nextObject = next(self._searchIterator, None) return ret
[ "def", "next", "(", "self", ")", ":", "if", "self", ".", "_currentObject", "is", "None", ":", "raise", "StopIteration", "(", ")", "nextPageToken", "=", "None", "if", "self", ".", "_nextObject", "is", "not", "None", ":", "start", "=", "self", ".", "_getStart", "(", "self", ".", "_nextObject", ")", "# If start > the search anchor, move the search anchor. Otherwise,", "# increment the distance from the anchor.", "if", "start", ">", "self", ".", "_searchAnchor", ":", "self", ".", "_searchAnchor", "=", "start", "self", ".", "_distanceFromAnchor", "=", "0", "else", ":", "self", ".", "_distanceFromAnchor", "+=", "1", "nextPageToken", "=", "\"{}:{}\"", ".", "format", "(", "self", ".", "_searchAnchor", ",", "self", ".", "_distanceFromAnchor", ")", "ret", "=", "self", ".", "_extractProtocolObject", "(", "self", ".", "_currentObject", ")", ",", "nextPageToken", "self", ".", "_currentObject", "=", "self", ".", "_nextObject", "self", ".", "_nextObject", "=", "next", "(", "self", ".", "_searchIterator", ",", "None", ")", "return", "ret" ]
Returns the next (object, nextPageToken) pair.
[ "Returns", "the", "next", "(", "object", "nextPageToken", ")", "pair", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L134-L155
ga4gh/ga4gh-server
ga4gh/server/paging.py
VariantAnnotationsIntervalIterator.filterVariantAnnotation
def filterVariantAnnotation(self, vann): """ Returns true when an annotation should be included. """ # TODO reintroduce feature ID search ret = False if len(self._effects) != 0 and not vann.transcript_effects: return False elif len(self._effects) == 0: return True for teff in vann.transcript_effects: if self.filterEffect(teff): ret = True return ret
python
def filterVariantAnnotation(self, vann): """ Returns true when an annotation should be included. """ # TODO reintroduce feature ID search ret = False if len(self._effects) != 0 and not vann.transcript_effects: return False elif len(self._effects) == 0: return True for teff in vann.transcript_effects: if self.filterEffect(teff): ret = True return ret
[ "def", "filterVariantAnnotation", "(", "self", ",", "vann", ")", ":", "# TODO reintroduce feature ID search", "ret", "=", "False", "if", "len", "(", "self", ".", "_effects", ")", "!=", "0", "and", "not", "vann", ".", "transcript_effects", ":", "return", "False", "elif", "len", "(", "self", ".", "_effects", ")", "==", "0", ":", "return", "True", "for", "teff", "in", "vann", ".", "transcript_effects", ":", "if", "self", ".", "filterEffect", "(", "teff", ")", ":", "ret", "=", "True", "return", "ret" ]
Returns true when an annotation should be included.
[ "Returns", "true", "when", "an", "annotation", "should", "be", "included", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L246-L259
ga4gh/ga4gh-server
ga4gh/server/paging.py
VariantAnnotationsIntervalIterator.filterEffect
def filterEffect(self, teff): """ Returns true when any of the transcript effects are present in the request. """ ret = False for effect in teff.effects: ret = self._matchAnyEffects(effect) or ret return ret
python
def filterEffect(self, teff): """ Returns true when any of the transcript effects are present in the request. """ ret = False for effect in teff.effects: ret = self._matchAnyEffects(effect) or ret return ret
[ "def", "filterEffect", "(", "self", ",", "teff", ")", ":", "ret", "=", "False", "for", "effect", "in", "teff", ".", "effects", ":", "ret", "=", "self", ".", "_matchAnyEffects", "(", "effect", ")", "or", "ret", "return", "ret" ]
Returns true when any of the transcript effects are present in the request.
[ "Returns", "true", "when", "any", "of", "the", "transcript", "effects", "are", "present", "in", "the", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L261-L269
ga4gh/ga4gh-server
ga4gh/server/paging.py
VariantAnnotationsIntervalIterator._checkIdEquality
def _checkIdEquality(self, requestedEffect, effect): """ Tests whether a requested effect and an effect present in an annotation are equal. """ return self._idPresent(requestedEffect) and ( effect.term_id == requestedEffect.term_id)
python
def _checkIdEquality(self, requestedEffect, effect): """ Tests whether a requested effect and an effect present in an annotation are equal. """ return self._idPresent(requestedEffect) and ( effect.term_id == requestedEffect.term_id)
[ "def", "_checkIdEquality", "(", "self", ",", "requestedEffect", ",", "effect", ")", ":", "return", "self", ".", "_idPresent", "(", "requestedEffect", ")", "and", "(", "effect", ".", "term_id", "==", "requestedEffect", ".", "term_id", ")" ]
Tests whether a requested effect and an effect present in an annotation are equal.
[ "Tests", "whether", "a", "requested", "effect", "and", "an", "effect", "present", "in", "an", "annotation", "are", "equal", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/paging.py#L271-L277
ga4gh/ga4gh-server
scripts/glue.py
ga4ghImportGlue
def ga4ghImportGlue(): """ Call this method before importing a ga4gh module in the scripts dir. Otherwise, you will be using the installed package instead of the development package. Assumes a certain directory structure. """ path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.append(path)
python
def ga4ghImportGlue(): """ Call this method before importing a ga4gh module in the scripts dir. Otherwise, you will be using the installed package instead of the development package. Assumes a certain directory structure. """ path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.append(path)
[ "def", "ga4ghImportGlue", "(", ")", ":", "path", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ")", "sys", ".", "path", ".", "append", "(", "path", ")" ]
Call this method before importing a ga4gh module in the scripts dir. Otherwise, you will be using the installed package instead of the development package. Assumes a certain directory structure.
[ "Call", "this", "method", "before", "importing", "a", "ga4gh", "module", "in", "the", "scripts", "dir", ".", "Otherwise", "you", "will", "be", "using", "the", "installed", "package", "instead", "of", "the", "development", "package", ".", "Assumes", "a", "certain", "directory", "structure", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/scripts/glue.py#L16-L24
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
PysamFileHandleCache._update
def _update(self, dataFile, handle): """ Update the priority of the file handle. The element is first removed and then added to the left of the deque. """ self._cache.remove((dataFile, handle)) self._add(dataFile, handle)
python
def _update(self, dataFile, handle): """ Update the priority of the file handle. The element is first removed and then added to the left of the deque. """ self._cache.remove((dataFile, handle)) self._add(dataFile, handle)
[ "def", "_update", "(", "self", ",", "dataFile", ",", "handle", ")", ":", "self", ".", "_cache", ".", "remove", "(", "(", "dataFile", ",", "handle", ")", ")", "self", ".", "_add", "(", "dataFile", ",", "handle", ")" ]
Update the priority of the file handle. The element is first removed and then added to the left of the deque.
[ "Update", "the", "priority", "of", "the", "file", "handle", ".", "The", "element", "is", "first", "removed", "and", "then", "added", "to", "the", "left", "of", "the", "deque", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L50-L56
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
PysamFileHandleCache._removeLru
def _removeLru(self): """ Remove the least recently used file handle from the cache. The pop method removes an element from the right of the deque. Returns the name of the file that has been removed. """ (dataFile, handle) = self._cache.pop() handle.close() return dataFile
python
def _removeLru(self): """ Remove the least recently used file handle from the cache. The pop method removes an element from the right of the deque. Returns the name of the file that has been removed. """ (dataFile, handle) = self._cache.pop() handle.close() return dataFile
[ "def", "_removeLru", "(", "self", ")", ":", "(", "dataFile", ",", "handle", ")", "=", "self", ".", "_cache", ".", "pop", "(", ")", "handle", ".", "close", "(", ")", "return", "dataFile" ]
Remove the least recently used file handle from the cache. The pop method removes an element from the right of the deque. Returns the name of the file that has been removed.
[ "Remove", "the", "least", "recently", "used", "file", "handle", "from", "the", "cache", ".", "The", "pop", "method", "removes", "an", "element", "from", "the", "right", "of", "the", "deque", ".", "Returns", "the", "name", "of", "the", "file", "that", "has", "been", "removed", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L58-L66
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
PysamFileHandleCache.getFileHandle
def getFileHandle(self, dataFile, openMethod): """ Returns handle associated to the filename. If the file is already opened, update its priority in the cache and return its handle. Otherwise, open the file using openMethod, store it in the cache and return the corresponding handle. """ if dataFile in self._memoTable: handle = self._memoTable[dataFile] self._update(dataFile, handle) return handle else: try: handle = openMethod(dataFile) except ValueError: raise exceptions.FileOpenFailedException(dataFile) self._memoTable[dataFile] = handle self._add(dataFile, handle) if len(self._memoTable) > self._maxCacheSize: dataFile = self._removeLru() del self._memoTable[dataFile] return handle
python
def getFileHandle(self, dataFile, openMethod): """ Returns handle associated to the filename. If the file is already opened, update its priority in the cache and return its handle. Otherwise, open the file using openMethod, store it in the cache and return the corresponding handle. """ if dataFile in self._memoTable: handle = self._memoTable[dataFile] self._update(dataFile, handle) return handle else: try: handle = openMethod(dataFile) except ValueError: raise exceptions.FileOpenFailedException(dataFile) self._memoTable[dataFile] = handle self._add(dataFile, handle) if len(self._memoTable) > self._maxCacheSize: dataFile = self._removeLru() del self._memoTable[dataFile] return handle
[ "def", "getFileHandle", "(", "self", ",", "dataFile", ",", "openMethod", ")", ":", "if", "dataFile", "in", "self", ".", "_memoTable", ":", "handle", "=", "self", ".", "_memoTable", "[", "dataFile", "]", "self", ".", "_update", "(", "dataFile", ",", "handle", ")", "return", "handle", "else", ":", "try", ":", "handle", "=", "openMethod", "(", "dataFile", ")", "except", "ValueError", ":", "raise", "exceptions", ".", "FileOpenFailedException", "(", "dataFile", ")", "self", ".", "_memoTable", "[", "dataFile", "]", "=", "handle", "self", ".", "_add", "(", "dataFile", ",", "handle", ")", "if", "len", "(", "self", ".", "_memoTable", ")", ">", "self", ".", "_maxCacheSize", ":", "dataFile", "=", "self", ".", "_removeLru", "(", ")", "del", "self", ".", "_memoTable", "[", "dataFile", "]", "return", "handle" ]
Returns handle associated to the filename. If the file is already opened, update its priority in the cache and return its handle. Otherwise, open the file using openMethod, store it in the cache and return the corresponding handle.
[ "Returns", "handle", "associated", "to", "the", "filename", ".", "If", "the", "file", "is", "already", "opened", "update", "its", "priority", "in", "the", "cache", "and", "return", "its", "handle", ".", "Otherwise", "open", "the", "file", "using", "openMethod", "store", "it", "in", "the", "cache", "and", "return", "the", "corresponding", "handle", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L68-L90
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
CompoundId.join
def join(cls, splits): """ Join an array of ids into a compound id string """ segments = [] for split in splits: segments.append('"{}",'.format(split)) if len(segments) > 0: segments[-1] = segments[-1][:-1] jsonString = '[{}]'.format(''.join(segments)) return jsonString
python
def join(cls, splits): """ Join an array of ids into a compound id string """ segments = [] for split in splits: segments.append('"{}",'.format(split)) if len(segments) > 0: segments[-1] = segments[-1][:-1] jsonString = '[{}]'.format(''.join(segments)) return jsonString
[ "def", "join", "(", "cls", ",", "splits", ")", ":", "segments", "=", "[", "]", "for", "split", "in", "splits", ":", "segments", ".", "append", "(", "'\"{}\",'", ".", "format", "(", "split", ")", ")", "if", "len", "(", "segments", ")", ">", "0", ":", "segments", "[", "-", "1", "]", "=", "segments", "[", "-", "1", "]", "[", ":", "-", "1", "]", "jsonString", "=", "'[{}]'", ".", "format", "(", "''", ".", "join", "(", "segments", ")", ")", "return", "jsonString" ]
Join an array of ids into a compound id string
[ "Join", "an", "array", "of", "ids", "into", "a", "compound", "id", "string" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L172-L182
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
CompoundId.parse
def parse(cls, compoundIdStr): """ Parses the specified compoundId string and returns an instance of this CompoundId class. :raises: An ObjectWithIdNotFoundException if parsing fails. This is because this method is a client-facing method, and if a malformed identifier (under our internal rules) is provided, the response should be that the identifier does not exist. """ if not isinstance(compoundIdStr, basestring): raise exceptions.BadIdentifierException(compoundIdStr) try: deobfuscated = cls.deobfuscate(compoundIdStr) except TypeError: # When a string that cannot be converted to base64 is passed # as an argument, b64decode raises a TypeError. We must treat # this as an ID not found error. raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) try: encodedSplits = cls.split(deobfuscated) splits = [cls.decode(split) for split in encodedSplits] except (UnicodeDecodeError, ValueError): # Sometimes base64 decoding succeeds but we're left with # unicode gibberish. This is also and IdNotFound. raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) # pull the differentiator out of the splits before instantiating # the class, if the differentiator exists fieldsLength = len(cls.fields) if cls.differentiator is not None: differentiatorIndex = cls.fields.index( cls.differentiatorFieldName) if differentiatorIndex < len(splits): del splits[differentiatorIndex] else: raise exceptions.ObjectWithIdNotFoundException( compoundIdStr) fieldsLength -= 1 if len(splits) != fieldsLength: raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) return cls(None, *splits)
python
def parse(cls, compoundIdStr): """ Parses the specified compoundId string and returns an instance of this CompoundId class. :raises: An ObjectWithIdNotFoundException if parsing fails. This is because this method is a client-facing method, and if a malformed identifier (under our internal rules) is provided, the response should be that the identifier does not exist. """ if not isinstance(compoundIdStr, basestring): raise exceptions.BadIdentifierException(compoundIdStr) try: deobfuscated = cls.deobfuscate(compoundIdStr) except TypeError: # When a string that cannot be converted to base64 is passed # as an argument, b64decode raises a TypeError. We must treat # this as an ID not found error. raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) try: encodedSplits = cls.split(deobfuscated) splits = [cls.decode(split) for split in encodedSplits] except (UnicodeDecodeError, ValueError): # Sometimes base64 decoding succeeds but we're left with # unicode gibberish. This is also and IdNotFound. raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) # pull the differentiator out of the splits before instantiating # the class, if the differentiator exists fieldsLength = len(cls.fields) if cls.differentiator is not None: differentiatorIndex = cls.fields.index( cls.differentiatorFieldName) if differentiatorIndex < len(splits): del splits[differentiatorIndex] else: raise exceptions.ObjectWithIdNotFoundException( compoundIdStr) fieldsLength -= 1 if len(splits) != fieldsLength: raise exceptions.ObjectWithIdNotFoundException(compoundIdStr) return cls(None, *splits)
[ "def", "parse", "(", "cls", ",", "compoundIdStr", ")", ":", "if", "not", "isinstance", "(", "compoundIdStr", ",", "basestring", ")", ":", "raise", "exceptions", ".", "BadIdentifierException", "(", "compoundIdStr", ")", "try", ":", "deobfuscated", "=", "cls", ".", "deobfuscate", "(", "compoundIdStr", ")", "except", "TypeError", ":", "# When a string that cannot be converted to base64 is passed", "# as an argument, b64decode raises a TypeError. We must treat", "# this as an ID not found error.", "raise", "exceptions", ".", "ObjectWithIdNotFoundException", "(", "compoundIdStr", ")", "try", ":", "encodedSplits", "=", "cls", ".", "split", "(", "deobfuscated", ")", "splits", "=", "[", "cls", ".", "decode", "(", "split", ")", "for", "split", "in", "encodedSplits", "]", "except", "(", "UnicodeDecodeError", ",", "ValueError", ")", ":", "# Sometimes base64 decoding succeeds but we're left with", "# unicode gibberish. This is also and IdNotFound.", "raise", "exceptions", ".", "ObjectWithIdNotFoundException", "(", "compoundIdStr", ")", "# pull the differentiator out of the splits before instantiating", "# the class, if the differentiator exists", "fieldsLength", "=", "len", "(", "cls", ".", "fields", ")", "if", "cls", ".", "differentiator", "is", "not", "None", ":", "differentiatorIndex", "=", "cls", ".", "fields", ".", "index", "(", "cls", ".", "differentiatorFieldName", ")", "if", "differentiatorIndex", "<", "len", "(", "splits", ")", ":", "del", "splits", "[", "differentiatorIndex", "]", "else", ":", "raise", "exceptions", ".", "ObjectWithIdNotFoundException", "(", "compoundIdStr", ")", "fieldsLength", "-=", "1", "if", "len", "(", "splits", ")", "!=", "fieldsLength", ":", "raise", "exceptions", ".", "ObjectWithIdNotFoundException", "(", "compoundIdStr", ")", "return", "cls", "(", "None", ",", "*", "splits", ")" ]
Parses the specified compoundId string and returns an instance of this CompoundId class. :raises: An ObjectWithIdNotFoundException if parsing fails. This is because this method is a client-facing method, and if a malformed identifier (under our internal rules) is provided, the response should be that the identifier does not exist.
[ "Parses", "the", "specified", "compoundId", "string", "and", "returns", "an", "instance", "of", "this", "CompoundId", "class", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L207-L247
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
CompoundId.obfuscate
def obfuscate(cls, idStr): """ Mildly obfuscates the specified ID string in an easily reversible fashion. This is not intended for security purposes, but rather to dissuade users from depending on our internal ID structures. """ return unicode(base64.urlsafe_b64encode( idStr.encode('utf-8')).replace(b'=', b''))
python
def obfuscate(cls, idStr): """ Mildly obfuscates the specified ID string in an easily reversible fashion. This is not intended for security purposes, but rather to dissuade users from depending on our internal ID structures. """ return unicode(base64.urlsafe_b64encode( idStr.encode('utf-8')).replace(b'=', b''))
[ "def", "obfuscate", "(", "cls", ",", "idStr", ")", ":", "return", "unicode", "(", "base64", ".", "urlsafe_b64encode", "(", "idStr", ".", "encode", "(", "'utf-8'", ")", ")", ".", "replace", "(", "b'='", ",", "b''", ")", ")" ]
Mildly obfuscates the specified ID string in an easily reversible fashion. This is not intended for security purposes, but rather to dissuade users from depending on our internal ID structures.
[ "Mildly", "obfuscates", "the", "specified", "ID", "string", "in", "an", "easily", "reversible", "fashion", ".", "This", "is", "not", "intended", "for", "security", "purposes", "but", "rather", "to", "dissuade", "users", "from", "depending", "on", "our", "internal", "ID", "structures", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L250-L257
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
CompoundId.deobfuscate
def deobfuscate(cls, data): """ Reverses the obfuscation done by the :meth:`obfuscate` method. If an identifier arrives without correct base64 padding this function will append it to the end. """ # the str() call is necessary to convert the unicode string # to an ascii string since the urlsafe_b64decode method # sometimes chokes on unicode strings return base64.urlsafe_b64decode(str(( data + b'A=='[(len(data) - 1) % 4:])))
python
def deobfuscate(cls, data): """ Reverses the obfuscation done by the :meth:`obfuscate` method. If an identifier arrives without correct base64 padding this function will append it to the end. """ # the str() call is necessary to convert the unicode string # to an ascii string since the urlsafe_b64decode method # sometimes chokes on unicode strings return base64.urlsafe_b64decode(str(( data + b'A=='[(len(data) - 1) % 4:])))
[ "def", "deobfuscate", "(", "cls", ",", "data", ")", ":", "# the str() call is necessary to convert the unicode string", "# to an ascii string since the urlsafe_b64decode method", "# sometimes chokes on unicode strings", "return", "base64", ".", "urlsafe_b64decode", "(", "str", "(", "(", "data", "+", "b'A=='", "[", "(", "len", "(", "data", ")", "-", "1", ")", "%", "4", ":", "]", ")", ")", ")" ]
Reverses the obfuscation done by the :meth:`obfuscate` method. If an identifier arrives without correct base64 padding this function will append it to the end.
[ "Reverses", "the", "obfuscation", "done", "by", "the", ":", "meth", ":", "obfuscate", "method", ".", "If", "an", "identifier", "arrives", "without", "correct", "base64", "padding", "this", "function", "will", "append", "it", "to", "the", "end", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L260-L270
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
DatamodelObject.serializeAttributes
def serializeAttributes(self, msg): """ Sets the attrbutes of a message during serialization. """ attributes = self.getAttributes() for key in attributes: protocol.setAttribute( msg.attributes.attr[key].values, attributes[key]) return msg
python
def serializeAttributes(self, msg): """ Sets the attrbutes of a message during serialization. """ attributes = self.getAttributes() for key in attributes: protocol.setAttribute( msg.attributes.attr[key].values, attributes[key]) return msg
[ "def", "serializeAttributes", "(", "self", ",", "msg", ")", ":", "attributes", "=", "self", ".", "getAttributes", "(", ")", "for", "key", "in", "attributes", ":", "protocol", ".", "setAttribute", "(", "msg", ".", "attributes", ".", "attr", "[", "key", "]", ".", "values", ",", "attributes", "[", "key", "]", ")", "return", "msg" ]
Sets the attrbutes of a message during serialization.
[ "Sets", "the", "attrbutes", "of", "a", "message", "during", "serialization", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L539-L547
ga4gh/ga4gh-server
ga4gh/server/datamodel/__init__.py
DatamodelObject._scanDataFiles
def _scanDataFiles(self, dataDir, patterns): """ Scans the specified directory for files with the specified globbing pattern and calls self._addDataFile for each. Raises an EmptyDirException if no data files are found. """ numDataFiles = 0 for pattern in patterns: scanPath = os.path.join(dataDir, pattern) for filename in glob.glob(scanPath): self._addDataFile(filename) numDataFiles += 1 if numDataFiles == 0: raise exceptions.EmptyDirException(dataDir, patterns)
python
def _scanDataFiles(self, dataDir, patterns): """ Scans the specified directory for files with the specified globbing pattern and calls self._addDataFile for each. Raises an EmptyDirException if no data files are found. """ numDataFiles = 0 for pattern in patterns: scanPath = os.path.join(dataDir, pattern) for filename in glob.glob(scanPath): self._addDataFile(filename) numDataFiles += 1 if numDataFiles == 0: raise exceptions.EmptyDirException(dataDir, patterns)
[ "def", "_scanDataFiles", "(", "self", ",", "dataDir", ",", "patterns", ")", ":", "numDataFiles", "=", "0", "for", "pattern", "in", "patterns", ":", "scanPath", "=", "os", ".", "path", ".", "join", "(", "dataDir", ",", "pattern", ")", "for", "filename", "in", "glob", ".", "glob", "(", "scanPath", ")", ":", "self", ".", "_addDataFile", "(", "filename", ")", "numDataFiles", "+=", "1", "if", "numDataFiles", "==", "0", ":", "raise", "exceptions", ".", "EmptyDirException", "(", "dataDir", ",", "patterns", ")" ]
Scans the specified directory for files with the specified globbing pattern and calls self._addDataFile for each. Raises an EmptyDirException if no data files are found.
[ "Scans", "the", "specified", "directory", "for", "files", "with", "the", "specified", "globbing", "pattern", "and", "calls", "self", ".", "_addDataFile", "for", "each", ".", "Raises", "an", "EmptyDirException", "if", "no", "data", "files", "are", "found", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/__init__.py#L555-L568
ga4gh/ga4gh-server
ga4gh/server/network/__init__.py
getInitialPeerList
def getInitialPeerList(filePath, logger=None): """ Attempts to get a list of peers from a file specified in configuration. This file has one URL per line and can contain newlines and comments. # Main ga4gh node http://1kgenomes.ga4gh.org # Local intranet peer https://192.168.1.1 The server will attempt to add URLs in this file to its registry at startup and will log a warning if the file isn't found. """ ret = [] with open(filePath) as textFile: ret = textFile.readlines() if len(ret) == 0: if logger: logger.warn("Couldn't load the initial " "peer list. Try adding a " "file named 'initial_peers.txt' " "to {}".format(os.getcwd())) # Remove lines that start with a hash or are empty. return filter(lambda x: x != "" and not x.find("#") != -1, ret)
python
def getInitialPeerList(filePath, logger=None): """ Attempts to get a list of peers from a file specified in configuration. This file has one URL per line and can contain newlines and comments. # Main ga4gh node http://1kgenomes.ga4gh.org # Local intranet peer https://192.168.1.1 The server will attempt to add URLs in this file to its registry at startup and will log a warning if the file isn't found. """ ret = [] with open(filePath) as textFile: ret = textFile.readlines() if len(ret) == 0: if logger: logger.warn("Couldn't load the initial " "peer list. Try adding a " "file named 'initial_peers.txt' " "to {}".format(os.getcwd())) # Remove lines that start with a hash or are empty. return filter(lambda x: x != "" and not x.find("#") != -1, ret)
[ "def", "getInitialPeerList", "(", "filePath", ",", "logger", "=", "None", ")", ":", "ret", "=", "[", "]", "with", "open", "(", "filePath", ")", "as", "textFile", ":", "ret", "=", "textFile", ".", "readlines", "(", ")", "if", "len", "(", "ret", ")", "==", "0", ":", "if", "logger", ":", "logger", ".", "warn", "(", "\"Couldn't load the initial \"", "\"peer list. Try adding a \"", "\"file named 'initial_peers.txt' \"", "\"to {}\"", ".", "format", "(", "os", ".", "getcwd", "(", ")", ")", ")", "# Remove lines that start with a hash or are empty.", "return", "filter", "(", "lambda", "x", ":", "x", "!=", "\"\"", "and", "not", "x", ".", "find", "(", "\"#\"", ")", "!=", "-", "1", ",", "ret", ")" ]
Attempts to get a list of peers from a file specified in configuration. This file has one URL per line and can contain newlines and comments. # Main ga4gh node http://1kgenomes.ga4gh.org # Local intranet peer https://192.168.1.1 The server will attempt to add URLs in this file to its registry at startup and will log a warning if the file isn't found.
[ "Attempts", "to", "get", "a", "list", "of", "peers", "from", "a", "file", "specified", "in", "configuration", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/network/__init__.py#L15-L39
ga4gh/ga4gh-server
ga4gh/server/network/__init__.py
insertInitialPeer
def insertInitialPeer(dataRepository, url, logger=None): """ Takes the datarepository, a url, and an optional logger and attempts to add the peer into the repository. """ insertPeer = dataRepository.insertPeer try: peer = datamodel.peers.Peer(url) insertPeer(peer) except exceptions.RepoManagerException as exc: if logger: logger.debug( "Peer already in registry {} {}".format(peer.getUrl(), exc)) except exceptions.BadUrlException as exc: if logger: logger.debug("A URL in the initial " "peer list {} was malformed. {}".format(url), exc)
python
def insertInitialPeer(dataRepository, url, logger=None): """ Takes the datarepository, a url, and an optional logger and attempts to add the peer into the repository. """ insertPeer = dataRepository.insertPeer try: peer = datamodel.peers.Peer(url) insertPeer(peer) except exceptions.RepoManagerException as exc: if logger: logger.debug( "Peer already in registry {} {}".format(peer.getUrl(), exc)) except exceptions.BadUrlException as exc: if logger: logger.debug("A URL in the initial " "peer list {} was malformed. {}".format(url), exc)
[ "def", "insertInitialPeer", "(", "dataRepository", ",", "url", ",", "logger", "=", "None", ")", ":", "insertPeer", "=", "dataRepository", ".", "insertPeer", "try", ":", "peer", "=", "datamodel", ".", "peers", ".", "Peer", "(", "url", ")", "insertPeer", "(", "peer", ")", "except", "exceptions", ".", "RepoManagerException", "as", "exc", ":", "if", "logger", ":", "logger", ".", "debug", "(", "\"Peer already in registry {} {}\"", ".", "format", "(", "peer", ".", "getUrl", "(", ")", ",", "exc", ")", ")", "except", "exceptions", ".", "BadUrlException", "as", "exc", ":", "if", "logger", ":", "logger", ".", "debug", "(", "\"A URL in the initial \"", "\"peer list {} was malformed. {}\"", ".", "format", "(", "url", ")", ",", "exc", ")" ]
Takes the datarepository, a url, and an optional logger and attempts to add the peer into the repository.
[ "Takes", "the", "datarepository", "a", "url", "and", "an", "optional", "logger", "and", "attempts", "to", "add", "the", "peer", "into", "the", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/network/__init__.py#L42-L58
ga4gh/ga4gh-server
ga4gh/server/datamodel/peers.py
isUrl
def isUrl(urlString): """ Attempts to return whether a given URL string is valid by checking for the presence of the URL scheme and netloc using the urlparse module, and then using a regex. From http://stackoverflow.com/questions/7160737/ """ parsed = urlparse.urlparse(urlString) urlparseValid = parsed.netloc != '' and parsed.scheme != '' regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)' r'+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... r'localhost|' # localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) return regex.match(urlString) and urlparseValid
python
def isUrl(urlString): """ Attempts to return whether a given URL string is valid by checking for the presence of the URL scheme and netloc using the urlparse module, and then using a regex. From http://stackoverflow.com/questions/7160737/ """ parsed = urlparse.urlparse(urlString) urlparseValid = parsed.netloc != '' and parsed.scheme != '' regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)' r'+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain... r'localhost|' # localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) return regex.match(urlString) and urlparseValid
[ "def", "isUrl", "(", "urlString", ")", ":", "parsed", "=", "urlparse", ".", "urlparse", "(", "urlString", ")", "urlparseValid", "=", "parsed", ".", "netloc", "!=", "''", "and", "parsed", ".", "scheme", "!=", "''", "regex", "=", "re", ".", "compile", "(", "r'^(?:http|ftp)s?://'", "# http:// or https://", "r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)'", "r'+(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?)|'", "# domain...", "r'localhost|'", "# localhost...", "r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})'", "# ...or ip", "r'(?::\\d+)?'", "# optional port", "r'(?:/?|[/?]\\S+)$'", ",", "re", ".", "IGNORECASE", ")", "return", "regex", ".", "match", "(", "urlString", ")", "and", "urlparseValid" ]
Attempts to return whether a given URL string is valid by checking for the presence of the URL scheme and netloc using the urlparse module, and then using a regex. From http://stackoverflow.com/questions/7160737/
[ "Attempts", "to", "return", "whether", "a", "given", "URL", "string", "is", "valid", "by", "checking", "for", "the", "presence", "of", "the", "URL", "scheme", "and", "netloc", "using", "the", "urlparse", "module", "and", "then", "using", "a", "regex", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/peers.py#L17-L36
ga4gh/ga4gh-server
ga4gh/server/datamodel/peers.py
Peer.setUrl
def setUrl(self, url): """ Attempt to safely set the URL by string. """ if isUrl(url): self._url = url else: raise exceptions.BadUrlException(url) return self
python
def setUrl(self, url): """ Attempt to safely set the URL by string. """ if isUrl(url): self._url = url else: raise exceptions.BadUrlException(url) return self
[ "def", "setUrl", "(", "self", ",", "url", ")", ":", "if", "isUrl", "(", "url", ")", ":", "self", ".", "_url", "=", "url", "else", ":", "raise", "exceptions", ".", "BadUrlException", "(", "url", ")", "return", "self" ]
Attempt to safely set the URL by string.
[ "Attempt", "to", "safely", "set", "the", "URL", "by", "string", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/peers.py#L53-L61
ga4gh/ga4gh-server
ga4gh/server/datamodel/peers.py
Peer.setAttributesJson
def setAttributesJson(self, attributesJson): """ Sets the attributes dictionary from a JSON string. """ try: self._attributes = json.loads(attributesJson) except: raise exceptions.InvalidJsonException(attributesJson) return self
python
def setAttributesJson(self, attributesJson): """ Sets the attributes dictionary from a JSON string. """ try: self._attributes = json.loads(attributesJson) except: raise exceptions.InvalidJsonException(attributesJson) return self
[ "def", "setAttributesJson", "(", "self", ",", "attributesJson", ")", ":", "try", ":", "self", ".", "_attributes", "=", "json", ".", "loads", "(", "attributesJson", ")", "except", ":", "raise", "exceptions", ".", "InvalidJsonException", "(", "attributesJson", ")", "return", "self" ]
Sets the attributes dictionary from a JSON string.
[ "Sets", "the", "attributes", "dictionary", "from", "a", "JSON", "string", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/peers.py#L73-L81
ga4gh/ga4gh-server
ga4gh/server/datamodel/peers.py
Peer.populateFromRow
def populateFromRow(self, peerRecord): """ This method accepts a model record and sets class variables. """ self.setUrl(peerRecord.url) \ .setAttributesJson(peerRecord.attributes) return self
python
def populateFromRow(self, peerRecord): """ This method accepts a model record and sets class variables. """ self.setUrl(peerRecord.url) \ .setAttributesJson(peerRecord.attributes) return self
[ "def", "populateFromRow", "(", "self", ",", "peerRecord", ")", ":", "self", ".", "setUrl", "(", "peerRecord", ".", "url", ")", ".", "setAttributesJson", "(", "peerRecord", ".", "attributes", ")", "return", "self" ]
This method accepts a model record and sets class variables.
[ "This", "method", "accepts", "a", "model", "record", "and", "sets", "class", "variables", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/peers.py#L105-L111
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend._topLevelObjectGenerator
def _topLevelObjectGenerator(self, request, numObjects, getByIndexMethod): """ Returns a generator over the results for the specified request, which is over a set of objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point. """ currentIndex = 0 if request.page_token: currentIndex, = paging._parsePageToken( request.page_token, 1) while currentIndex < numObjects: object_ = getByIndexMethod(currentIndex) currentIndex += 1 nextPageToken = None if currentIndex < numObjects: nextPageToken = str(currentIndex) yield object_.toProtocolElement(), nextPageToken
python
def _topLevelObjectGenerator(self, request, numObjects, getByIndexMethod): """ Returns a generator over the results for the specified request, which is over a set of objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point. """ currentIndex = 0 if request.page_token: currentIndex, = paging._parsePageToken( request.page_token, 1) while currentIndex < numObjects: object_ = getByIndexMethod(currentIndex) currentIndex += 1 nextPageToken = None if currentIndex < numObjects: nextPageToken = str(currentIndex) yield object_.toProtocolElement(), nextPageToken
[ "def", "_topLevelObjectGenerator", "(", "self", ",", "request", ",", "numObjects", ",", "getByIndexMethod", ")", ":", "currentIndex", "=", "0", "if", "request", ".", "page_token", ":", "currentIndex", ",", "=", "paging", ".", "_parsePageToken", "(", "request", ".", "page_token", ",", "1", ")", "while", "currentIndex", "<", "numObjects", ":", "object_", "=", "getByIndexMethod", "(", "currentIndex", ")", "currentIndex", "+=", "1", "nextPageToken", "=", "None", "if", "currentIndex", "<", "numObjects", ":", "nextPageToken", "=", "str", "(", "currentIndex", ")", "yield", "object_", ".", "toProtocolElement", "(", ")", ",", "nextPageToken" ]
Returns a generator over the results for the specified request, which is over a set of objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point.
[ "Returns", "a", "generator", "over", "the", "results", "for", "the", "specified", "request", "which", "is", "over", "a", "set", "of", "objects", "of", "the", "specified", "size", ".", "The", "objects", "are", "returned", "by", "call", "to", "the", "specified", "method", "which", "must", "take", "a", "single", "integer", "as", "an", "argument", ".", "The", "returned", "generator", "yields", "a", "sequence", "of", "(", "object", "nextPageToken", ")", "pairs", "which", "allows", "this", "iteration", "to", "be", "picked", "up", "at", "any", "point", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L74-L93
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend._protocolObjectGenerator
def _protocolObjectGenerator(self, request, numObjects, getByIndexMethod): """ Returns a generator over the results for the specified request, from a set of protocol objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point. """ currentIndex = 0 if request.page_token: currentIndex, = paging._parsePageToken( request.page_token, 1) while currentIndex < numObjects: object_ = getByIndexMethod(currentIndex) currentIndex += 1 nextPageToken = None if currentIndex < numObjects: nextPageToken = str(currentIndex) yield object_, nextPageToken
python
def _protocolObjectGenerator(self, request, numObjects, getByIndexMethod): """ Returns a generator over the results for the specified request, from a set of protocol objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point. """ currentIndex = 0 if request.page_token: currentIndex, = paging._parsePageToken( request.page_token, 1) while currentIndex < numObjects: object_ = getByIndexMethod(currentIndex) currentIndex += 1 nextPageToken = None if currentIndex < numObjects: nextPageToken = str(currentIndex) yield object_, nextPageToken
[ "def", "_protocolObjectGenerator", "(", "self", ",", "request", ",", "numObjects", ",", "getByIndexMethod", ")", ":", "currentIndex", "=", "0", "if", "request", ".", "page_token", ":", "currentIndex", ",", "=", "paging", ".", "_parsePageToken", "(", "request", ".", "page_token", ",", "1", ")", "while", "currentIndex", "<", "numObjects", ":", "object_", "=", "getByIndexMethod", "(", "currentIndex", ")", "currentIndex", "+=", "1", "nextPageToken", "=", "None", "if", "currentIndex", "<", "numObjects", ":", "nextPageToken", "=", "str", "(", "currentIndex", ")", "yield", "object_", ",", "nextPageToken" ]
Returns a generator over the results for the specified request, from a set of protocol objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point.
[ "Returns", "a", "generator", "over", "the", "results", "for", "the", "specified", "request", "from", "a", "set", "of", "protocol", "objects", "of", "the", "specified", "size", ".", "The", "objects", "are", "returned", "by", "call", "to", "the", "specified", "method", "which", "must", "take", "a", "single", "integer", "as", "an", "argument", ".", "The", "returned", "generator", "yields", "a", "sequence", "of", "(", "object", "nextPageToken", ")", "pairs", "which", "allows", "this", "iteration", "to", "be", "picked", "up", "at", "any", "point", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L95-L114
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend._protocolListGenerator
def _protocolListGenerator(self, request, objectList): """ Returns a generator over the objects in the specified list using _protocolObjectGenerator to generate page tokens. """ return self._protocolObjectGenerator( request, len(objectList), lambda index: objectList[index])
python
def _protocolListGenerator(self, request, objectList): """ Returns a generator over the objects in the specified list using _protocolObjectGenerator to generate page tokens. """ return self._protocolObjectGenerator( request, len(objectList), lambda index: objectList[index])
[ "def", "_protocolListGenerator", "(", "self", ",", "request", ",", "objectList", ")", ":", "return", "self", ".", "_protocolObjectGenerator", "(", "request", ",", "len", "(", "objectList", ")", ",", "lambda", "index", ":", "objectList", "[", "index", "]", ")" ]
Returns a generator over the objects in the specified list using _protocolObjectGenerator to generate page tokens.
[ "Returns", "a", "generator", "over", "the", "objects", "in", "the", "specified", "list", "using", "_protocolObjectGenerator", "to", "generate", "page", "tokens", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L116-L122
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend._objectListGenerator
def _objectListGenerator(self, request, objectList): """ Returns a generator over the objects in the specified list using _topLevelObjectGenerator to generate page tokens. """ return self._topLevelObjectGenerator( request, len(objectList), lambda index: objectList[index])
python
def _objectListGenerator(self, request, objectList): """ Returns a generator over the objects in the specified list using _topLevelObjectGenerator to generate page tokens. """ return self._topLevelObjectGenerator( request, len(objectList), lambda index: objectList[index])
[ "def", "_objectListGenerator", "(", "self", ",", "request", ",", "objectList", ")", ":", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "len", "(", "objectList", ")", ",", "lambda", "index", ":", "objectList", "[", "index", "]", ")" ]
Returns a generator over the objects in the specified list using _topLevelObjectGenerator to generate page tokens.
[ "Returns", "a", "generator", "over", "the", "objects", "in", "the", "specified", "list", "using", "_topLevelObjectGenerator", "to", "generate", "page", "tokens", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L124-L130
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.datasetsGenerator
def datasetsGenerator(self, request): """ Returns a generator over the (dataset, nextPageToken) pairs defined by the specified request """ return self._topLevelObjectGenerator( request, self.getDataRepository().getNumDatasets(), self.getDataRepository().getDatasetByIndex)
python
def datasetsGenerator(self, request): """ Returns a generator over the (dataset, nextPageToken) pairs defined by the specified request """ return self._topLevelObjectGenerator( request, self.getDataRepository().getNumDatasets(), self.getDataRepository().getDatasetByIndex)
[ "def", "datasetsGenerator", "(", "self", ",", "request", ")", ":", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "self", ".", "getDataRepository", "(", ")", ".", "getNumDatasets", "(", ")", ",", "self", ".", "getDataRepository", "(", ")", ".", "getDatasetByIndex", ")" ]
Returns a generator over the (dataset, nextPageToken) pairs defined by the specified request
[ "Returns", "a", "generator", "over", "the", "(", "dataset", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L132-L139
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.phenotypeAssociationSetsGenerator
def phenotypeAssociationSetsGenerator(self, request): """ Returns a generator over the (phenotypeAssociationSet, nextPageToken) pairs defined by the specified request """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumPhenotypeAssociationSets(), dataset.getPhenotypeAssociationSetByIndex)
python
def phenotypeAssociationSetsGenerator(self, request): """ Returns a generator over the (phenotypeAssociationSet, nextPageToken) pairs defined by the specified request """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumPhenotypeAssociationSets(), dataset.getPhenotypeAssociationSetByIndex)
[ "def", "phenotypeAssociationSetsGenerator", "(", "self", ",", "request", ")", ":", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "request", ".", "dataset_id", ")", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "dataset", ".", "getNumPhenotypeAssociationSets", "(", ")", ",", "dataset", ".", "getPhenotypeAssociationSetByIndex", ")" ]
Returns a generator over the (phenotypeAssociationSet, nextPageToken) pairs defined by the specified request
[ "Returns", "a", "generator", "over", "the", "(", "phenotypeAssociationSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L168-L176
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.readGroupSetsGenerator
def readGroupSetsGenerator(self, request): """ Returns a generator over the (readGroupSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._readGroupSetsGenerator( request, dataset.getNumReadGroupSets(), dataset.getReadGroupSetByIndex)
python
def readGroupSetsGenerator(self, request): """ Returns a generator over the (readGroupSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._readGroupSetsGenerator( request, dataset.getNumReadGroupSets(), dataset.getReadGroupSetByIndex)
[ "def", "readGroupSetsGenerator", "(", "self", ",", "request", ")", ":", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "request", ".", "dataset_id", ")", "return", "self", ".", "_readGroupSetsGenerator", "(", "request", ",", "dataset", ".", "getNumReadGroupSets", "(", ")", ",", "dataset", ".", "getReadGroupSetByIndex", ")" ]
Returns a generator over the (readGroupSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "readGroupSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L178-L186
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend._readGroupSetsGenerator
def _readGroupSetsGenerator(self, request, numObjects, getByIndexMethod): """ Returns a generator over the results for the specified request, which is over a set of objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point. """ currentIndex = 0 if request.page_token: currentIndex, = paging._parsePageToken( request.page_token, 1) while currentIndex < numObjects: obj = getByIndexMethod(currentIndex) include = True rgsp = obj.toProtocolElement() if request.name and request.name != obj.getLocalId(): include = False if request.biosample_id and include: rgsp.ClearField("read_groups") for readGroup in obj.getReadGroups(): if request.biosample_id == readGroup.getBiosampleId(): rgsp.read_groups.extend( [readGroup.toProtocolElement()]) # If none of the biosamples match and the readgroupset # contains reagroups, don't include in the response if len(rgsp.read_groups) == 0 and \ len(obj.getReadGroups()) != 0: include = False currentIndex += 1 nextPageToken = None if currentIndex < numObjects: nextPageToken = str(currentIndex) if include: yield rgsp, nextPageToken
python
def _readGroupSetsGenerator(self, request, numObjects, getByIndexMethod): """ Returns a generator over the results for the specified request, which is over a set of objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point. """ currentIndex = 0 if request.page_token: currentIndex, = paging._parsePageToken( request.page_token, 1) while currentIndex < numObjects: obj = getByIndexMethod(currentIndex) include = True rgsp = obj.toProtocolElement() if request.name and request.name != obj.getLocalId(): include = False if request.biosample_id and include: rgsp.ClearField("read_groups") for readGroup in obj.getReadGroups(): if request.biosample_id == readGroup.getBiosampleId(): rgsp.read_groups.extend( [readGroup.toProtocolElement()]) # If none of the biosamples match and the readgroupset # contains reagroups, don't include in the response if len(rgsp.read_groups) == 0 and \ len(obj.getReadGroups()) != 0: include = False currentIndex += 1 nextPageToken = None if currentIndex < numObjects: nextPageToken = str(currentIndex) if include: yield rgsp, nextPageToken
[ "def", "_readGroupSetsGenerator", "(", "self", ",", "request", ",", "numObjects", ",", "getByIndexMethod", ")", ":", "currentIndex", "=", "0", "if", "request", ".", "page_token", ":", "currentIndex", ",", "=", "paging", ".", "_parsePageToken", "(", "request", ".", "page_token", ",", "1", ")", "while", "currentIndex", "<", "numObjects", ":", "obj", "=", "getByIndexMethod", "(", "currentIndex", ")", "include", "=", "True", "rgsp", "=", "obj", ".", "toProtocolElement", "(", ")", "if", "request", ".", "name", "and", "request", ".", "name", "!=", "obj", ".", "getLocalId", "(", ")", ":", "include", "=", "False", "if", "request", ".", "biosample_id", "and", "include", ":", "rgsp", ".", "ClearField", "(", "\"read_groups\"", ")", "for", "readGroup", "in", "obj", ".", "getReadGroups", "(", ")", ":", "if", "request", ".", "biosample_id", "==", "readGroup", ".", "getBiosampleId", "(", ")", ":", "rgsp", ".", "read_groups", ".", "extend", "(", "[", "readGroup", ".", "toProtocolElement", "(", ")", "]", ")", "# If none of the biosamples match and the readgroupset", "# contains reagroups, don't include in the response", "if", "len", "(", "rgsp", ".", "read_groups", ")", "==", "0", "and", "len", "(", "obj", ".", "getReadGroups", "(", ")", ")", "!=", "0", ":", "include", "=", "False", "currentIndex", "+=", "1", "nextPageToken", "=", "None", "if", "currentIndex", "<", "numObjects", ":", "nextPageToken", "=", "str", "(", "currentIndex", ")", "if", "include", ":", "yield", "rgsp", ",", "nextPageToken" ]
Returns a generator over the results for the specified request, which is over a set of objects of the specified size. The objects are returned by call to the specified method, which must take a single integer as an argument. The returned generator yields a sequence of (object, nextPageToken) pairs, which allows this iteration to be picked up at any point.
[ "Returns", "a", "generator", "over", "the", "results", "for", "the", "specified", "request", "which", "is", "over", "a", "set", "of", "objects", "of", "the", "specified", "size", ".", "The", "objects", "are", "returned", "by", "call", "to", "the", "specified", "method", "which", "must", "take", "a", "single", "integer", "as", "an", "argument", ".", "The", "returned", "generator", "yields", "a", "sequence", "of", "(", "object", "nextPageToken", ")", "pairs", "which", "allows", "this", "iteration", "to", "be", "picked", "up", "at", "any", "point", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L188-L223
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.referenceSetsGenerator
def referenceSetsGenerator(self, request): """ Returns a generator over the (referenceSet, nextPageToken) pairs defined by the specified request. """ results = [] for obj in self.getDataRepository().getReferenceSets(): include = True if request.md5checksum: if request.md5checksum != obj.getMd5Checksum(): include = False if request.accession: if request.accession not in obj.getSourceAccessions(): include = False if request.assembly_id: if request.assembly_id != obj.getAssemblyId(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
python
def referenceSetsGenerator(self, request): """ Returns a generator over the (referenceSet, nextPageToken) pairs defined by the specified request. """ results = [] for obj in self.getDataRepository().getReferenceSets(): include = True if request.md5checksum: if request.md5checksum != obj.getMd5Checksum(): include = False if request.accession: if request.accession not in obj.getSourceAccessions(): include = False if request.assembly_id: if request.assembly_id != obj.getAssemblyId(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
[ "def", "referenceSetsGenerator", "(", "self", ",", "request", ")", ":", "results", "=", "[", "]", "for", "obj", "in", "self", ".", "getDataRepository", "(", ")", ".", "getReferenceSets", "(", ")", ":", "include", "=", "True", "if", "request", ".", "md5checksum", ":", "if", "request", ".", "md5checksum", "!=", "obj", ".", "getMd5Checksum", "(", ")", ":", "include", "=", "False", "if", "request", ".", "accession", ":", "if", "request", ".", "accession", "not", "in", "obj", ".", "getSourceAccessions", "(", ")", ":", "include", "=", "False", "if", "request", ".", "assembly_id", ":", "if", "request", ".", "assembly_id", "!=", "obj", ".", "getAssemblyId", "(", ")", ":", "include", "=", "False", "if", "include", ":", "results", ".", "append", "(", "obj", ")", "return", "self", ".", "_objectListGenerator", "(", "request", ",", "results", ")" ]
Returns a generator over the (referenceSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "referenceSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L225-L244
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.referencesGenerator
def referencesGenerator(self, request): """ Returns a generator over the (reference, nextPageToken) pairs defined by the specified request. """ referenceSet = self.getDataRepository().getReferenceSet( request.reference_set_id) results = [] for obj in referenceSet.getReferences(): include = True if request.md5checksum: if request.md5checksum != obj.getMd5Checksum(): include = False if request.accession: if request.accession not in obj.getSourceAccessions(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
python
def referencesGenerator(self, request): """ Returns a generator over the (reference, nextPageToken) pairs defined by the specified request. """ referenceSet = self.getDataRepository().getReferenceSet( request.reference_set_id) results = [] for obj in referenceSet.getReferences(): include = True if request.md5checksum: if request.md5checksum != obj.getMd5Checksum(): include = False if request.accession: if request.accession not in obj.getSourceAccessions(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
[ "def", "referencesGenerator", "(", "self", ",", "request", ")", ":", "referenceSet", "=", "self", ".", "getDataRepository", "(", ")", ".", "getReferenceSet", "(", "request", ".", "reference_set_id", ")", "results", "=", "[", "]", "for", "obj", "in", "referenceSet", ".", "getReferences", "(", ")", ":", "include", "=", "True", "if", "request", ".", "md5checksum", ":", "if", "request", ".", "md5checksum", "!=", "obj", ".", "getMd5Checksum", "(", ")", ":", "include", "=", "False", "if", "request", ".", "accession", ":", "if", "request", ".", "accession", "not", "in", "obj", ".", "getSourceAccessions", "(", ")", ":", "include", "=", "False", "if", "include", ":", "results", ".", "append", "(", "obj", ")", "return", "self", ".", "_objectListGenerator", "(", "request", ",", "results", ")" ]
Returns a generator over the (reference, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "reference", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L246-L264
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.variantSetsGenerator
def variantSetsGenerator(self, request): """ Returns a generator over the (variantSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumVariantSets(), dataset.getVariantSetByIndex)
python
def variantSetsGenerator(self, request): """ Returns a generator over the (variantSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumVariantSets(), dataset.getVariantSetByIndex)
[ "def", "variantSetsGenerator", "(", "self", ",", "request", ")", ":", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "request", ".", "dataset_id", ")", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "dataset", ".", "getNumVariantSets", "(", ")", ",", "dataset", ".", "getVariantSetByIndex", ")" ]
Returns a generator over the (variantSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "variantSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L266-L274
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.variantAnnotationSetsGenerator
def variantAnnotationSetsGenerator(self, request): """ Returns a generator over the (variantAnnotationSet, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantSetCompoundId.parse( request.variant_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(request.variant_set_id) return self._topLevelObjectGenerator( request, variantSet.getNumVariantAnnotationSets(), variantSet.getVariantAnnotationSetByIndex)
python
def variantAnnotationSetsGenerator(self, request): """ Returns a generator over the (variantAnnotationSet, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantSetCompoundId.parse( request.variant_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(request.variant_set_id) return self._topLevelObjectGenerator( request, variantSet.getNumVariantAnnotationSets(), variantSet.getVariantAnnotationSetByIndex)
[ "def", "variantAnnotationSetsGenerator", "(", "self", ",", "request", ")", ":", "compoundId", "=", "datamodel", ".", "VariantSetCompoundId", ".", "parse", "(", "request", ".", "variant_set_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "request", ".", "variant_set_id", ")", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "variantSet", ".", "getNumVariantAnnotationSets", "(", ")", ",", "variantSet", ".", "getVariantAnnotationSetByIndex", ")" ]
Returns a generator over the (variantAnnotationSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "variantAnnotationSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L276-L287
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.readsGenerator
def readsGenerator(self, request): """ Returns a generator over the (read, nextPageToken) pairs defined by the specified request """ if not request.reference_id: raise exceptions.UnmappedReadsNotSupported() if len(request.read_group_ids) < 1: raise exceptions.BadRequestException( "At least one readGroupId must be specified") elif len(request.read_group_ids) == 1: return self._readsGeneratorSingle(request) else: return self._readsGeneratorMultiple(request)
python
def readsGenerator(self, request): """ Returns a generator over the (read, nextPageToken) pairs defined by the specified request """ if not request.reference_id: raise exceptions.UnmappedReadsNotSupported() if len(request.read_group_ids) < 1: raise exceptions.BadRequestException( "At least one readGroupId must be specified") elif len(request.read_group_ids) == 1: return self._readsGeneratorSingle(request) else: return self._readsGeneratorMultiple(request)
[ "def", "readsGenerator", "(", "self", ",", "request", ")", ":", "if", "not", "request", ".", "reference_id", ":", "raise", "exceptions", ".", "UnmappedReadsNotSupported", "(", ")", "if", "len", "(", "request", ".", "read_group_ids", ")", "<", "1", ":", "raise", "exceptions", ".", "BadRequestException", "(", "\"At least one readGroupId must be specified\"", ")", "elif", "len", "(", "request", ".", "read_group_ids", ")", "==", "1", ":", "return", "self", ".", "_readsGeneratorSingle", "(", "request", ")", "else", ":", "return", "self", ".", "_readsGeneratorMultiple", "(", "request", ")" ]
Returns a generator over the (read, nextPageToken) pairs defined by the specified request
[ "Returns", "a", "generator", "over", "the", "(", "read", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L289-L302
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.variantsGenerator
def variantsGenerator(self, request): """ Returns a generator over the (variant, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantSetCompoundId \ .parse(request.variant_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) intervalIterator = paging.VariantsIntervalIterator( request, variantSet) return intervalIterator
python
def variantsGenerator(self, request): """ Returns a generator over the (variant, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantSetCompoundId \ .parse(request.variant_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) intervalIterator = paging.VariantsIntervalIterator( request, variantSet) return intervalIterator
[ "def", "variantsGenerator", "(", "self", ",", "request", ")", ":", "compoundId", "=", "datamodel", ".", "VariantSetCompoundId", ".", "parse", "(", "request", ".", "variant_set_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "compoundId", ".", "variant_set_id", ")", "intervalIterator", "=", "paging", ".", "VariantsIntervalIterator", "(", "request", ",", "variantSet", ")", "return", "intervalIterator" ]
Returns a generator over the (variant, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "variant", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L338-L349
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.variantAnnotationsGenerator
def variantAnnotationsGenerator(self, request): """ Returns a generator over the (variantAnnotaitons, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantAnnotationSetCompoundId.parse( request.variant_annotation_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) variantAnnotationSet = variantSet.getVariantAnnotationSet( request.variant_annotation_set_id) iterator = paging.VariantAnnotationsIntervalIterator( request, variantAnnotationSet) return iterator
python
def variantAnnotationsGenerator(self, request): """ Returns a generator over the (variantAnnotaitons, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantAnnotationSetCompoundId.parse( request.variant_annotation_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) variantAnnotationSet = variantSet.getVariantAnnotationSet( request.variant_annotation_set_id) iterator = paging.VariantAnnotationsIntervalIterator( request, variantAnnotationSet) return iterator
[ "def", "variantAnnotationsGenerator", "(", "self", ",", "request", ")", ":", "compoundId", "=", "datamodel", ".", "VariantAnnotationSetCompoundId", ".", "parse", "(", "request", ".", "variant_annotation_set_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "compoundId", ".", "variant_set_id", ")", "variantAnnotationSet", "=", "variantSet", ".", "getVariantAnnotationSet", "(", "request", ".", "variant_annotation_set_id", ")", "iterator", "=", "paging", ".", "VariantAnnotationsIntervalIterator", "(", "request", ",", "variantAnnotationSet", ")", "return", "iterator" ]
Returns a generator over the (variantAnnotaitons, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "variantAnnotaitons", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L351-L364
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.featuresGenerator
def featuresGenerator(self, request): """ Returns a generator over the (features, nextPageToken) pairs defined by the (JSON string) request. """ compoundId = None parentId = None if request.feature_set_id != "": compoundId = datamodel.FeatureSetCompoundId.parse( request.feature_set_id) if request.parent_id != "": compoundParentId = datamodel.FeatureCompoundId.parse( request.parent_id) parentId = compoundParentId.featureId # A client can optionally specify JUST the (compound) parentID, # and the server needs to derive the dataset & featureSet # from this (compound) parentID. if compoundId is None: compoundId = compoundParentId else: # check that the dataset and featureSet of the parent # compound ID is the same as that of the featureSetId mismatchCheck = ( compoundParentId.dataset_id != compoundId.dataset_id or compoundParentId.feature_set_id != compoundId.feature_set_id) if mismatchCheck: raise exceptions.ParentIncompatibleWithFeatureSet() if compoundId is None: raise exceptions.FeatureSetNotSpecifiedException() dataset = self.getDataRepository().getDataset( compoundId.dataset_id) featureSet = dataset.getFeatureSet(compoundId.feature_set_id) iterator = paging.FeaturesIterator( request, featureSet, parentId) return iterator
python
def featuresGenerator(self, request): """ Returns a generator over the (features, nextPageToken) pairs defined by the (JSON string) request. """ compoundId = None parentId = None if request.feature_set_id != "": compoundId = datamodel.FeatureSetCompoundId.parse( request.feature_set_id) if request.parent_id != "": compoundParentId = datamodel.FeatureCompoundId.parse( request.parent_id) parentId = compoundParentId.featureId # A client can optionally specify JUST the (compound) parentID, # and the server needs to derive the dataset & featureSet # from this (compound) parentID. if compoundId is None: compoundId = compoundParentId else: # check that the dataset and featureSet of the parent # compound ID is the same as that of the featureSetId mismatchCheck = ( compoundParentId.dataset_id != compoundId.dataset_id or compoundParentId.feature_set_id != compoundId.feature_set_id) if mismatchCheck: raise exceptions.ParentIncompatibleWithFeatureSet() if compoundId is None: raise exceptions.FeatureSetNotSpecifiedException() dataset = self.getDataRepository().getDataset( compoundId.dataset_id) featureSet = dataset.getFeatureSet(compoundId.feature_set_id) iterator = paging.FeaturesIterator( request, featureSet, parentId) return iterator
[ "def", "featuresGenerator", "(", "self", ",", "request", ")", ":", "compoundId", "=", "None", "parentId", "=", "None", "if", "request", ".", "feature_set_id", "!=", "\"\"", ":", "compoundId", "=", "datamodel", ".", "FeatureSetCompoundId", ".", "parse", "(", "request", ".", "feature_set_id", ")", "if", "request", ".", "parent_id", "!=", "\"\"", ":", "compoundParentId", "=", "datamodel", ".", "FeatureCompoundId", ".", "parse", "(", "request", ".", "parent_id", ")", "parentId", "=", "compoundParentId", ".", "featureId", "# A client can optionally specify JUST the (compound) parentID,", "# and the server needs to derive the dataset & featureSet", "# from this (compound) parentID.", "if", "compoundId", "is", "None", ":", "compoundId", "=", "compoundParentId", "else", ":", "# check that the dataset and featureSet of the parent", "# compound ID is the same as that of the featureSetId", "mismatchCheck", "=", "(", "compoundParentId", ".", "dataset_id", "!=", "compoundId", ".", "dataset_id", "or", "compoundParentId", ".", "feature_set_id", "!=", "compoundId", ".", "feature_set_id", ")", "if", "mismatchCheck", ":", "raise", "exceptions", ".", "ParentIncompatibleWithFeatureSet", "(", ")", "if", "compoundId", "is", "None", ":", "raise", "exceptions", ".", "FeatureSetNotSpecifiedException", "(", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "featureSet", "=", "dataset", ".", "getFeatureSet", "(", "compoundId", ".", "feature_set_id", ")", "iterator", "=", "paging", ".", "FeaturesIterator", "(", "request", ",", "featureSet", ",", "parentId", ")", "return", "iterator" ]
Returns a generator over the (features, nextPageToken) pairs defined by the (JSON string) request.
[ "Returns", "a", "generator", "over", "the", "(", "features", "nextPageToken", ")", "pairs", "defined", "by", "the", "(", "JSON", "string", ")", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L366-L403
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.continuousGenerator
def continuousGenerator(self, request): """ Returns a generator over the (continuous, nextPageToken) pairs defined by the (JSON string) request. """ compoundId = None if request.continuous_set_id != "": compoundId = datamodel.ContinuousSetCompoundId.parse( request.continuous_set_id) if compoundId is None: raise exceptions.ContinuousSetNotSpecifiedException() dataset = self.getDataRepository().getDataset( compoundId.dataset_id) continuousSet = dataset.getContinuousSet(request.continuous_set_id) iterator = paging.ContinuousIterator(request, continuousSet) return iterator
python
def continuousGenerator(self, request): """ Returns a generator over the (continuous, nextPageToken) pairs defined by the (JSON string) request. """ compoundId = None if request.continuous_set_id != "": compoundId = datamodel.ContinuousSetCompoundId.parse( request.continuous_set_id) if compoundId is None: raise exceptions.ContinuousSetNotSpecifiedException() dataset = self.getDataRepository().getDataset( compoundId.dataset_id) continuousSet = dataset.getContinuousSet(request.continuous_set_id) iterator = paging.ContinuousIterator(request, continuousSet) return iterator
[ "def", "continuousGenerator", "(", "self", ",", "request", ")", ":", "compoundId", "=", "None", "if", "request", ".", "continuous_set_id", "!=", "\"\"", ":", "compoundId", "=", "datamodel", ".", "ContinuousSetCompoundId", ".", "parse", "(", "request", ".", "continuous_set_id", ")", "if", "compoundId", "is", "None", ":", "raise", "exceptions", ".", "ContinuousSetNotSpecifiedException", "(", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "continuousSet", "=", "dataset", ".", "getContinuousSet", "(", "request", ".", "continuous_set_id", ")", "iterator", "=", "paging", ".", "ContinuousIterator", "(", "request", ",", "continuousSet", ")", "return", "iterator" ]
Returns a generator over the (continuous, nextPageToken) pairs defined by the (JSON string) request.
[ "Returns", "a", "generator", "over", "the", "(", "continuous", "nextPageToken", ")", "pairs", "defined", "by", "the", "(", "JSON", "string", ")", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L405-L421
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.phenotypesGenerator
def phenotypesGenerator(self, request): """ Returns a generator over the (phenotypes, nextPageToken) pairs defined by the (JSON string) request """ # TODO make paging work using SPARQL? compoundId = datamodel.PhenotypeAssociationSetCompoundId.parse( request.phenotype_association_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) phenotypeAssociationSet = dataset.getPhenotypeAssociationSet( compoundId.phenotypeAssociationSetId) associations = phenotypeAssociationSet.getAssociations(request) phenotypes = [association.phenotype for association in associations] return self._protocolListGenerator( request, phenotypes)
python
def phenotypesGenerator(self, request): """ Returns a generator over the (phenotypes, nextPageToken) pairs defined by the (JSON string) request """ # TODO make paging work using SPARQL? compoundId = datamodel.PhenotypeAssociationSetCompoundId.parse( request.phenotype_association_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) phenotypeAssociationSet = dataset.getPhenotypeAssociationSet( compoundId.phenotypeAssociationSetId) associations = phenotypeAssociationSet.getAssociations(request) phenotypes = [association.phenotype for association in associations] return self._protocolListGenerator( request, phenotypes)
[ "def", "phenotypesGenerator", "(", "self", ",", "request", ")", ":", "# TODO make paging work using SPARQL?", "compoundId", "=", "datamodel", ".", "PhenotypeAssociationSetCompoundId", ".", "parse", "(", "request", ".", "phenotype_association_set_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "phenotypeAssociationSet", "=", "dataset", ".", "getPhenotypeAssociationSet", "(", "compoundId", ".", "phenotypeAssociationSetId", ")", "associations", "=", "phenotypeAssociationSet", ".", "getAssociations", "(", "request", ")", "phenotypes", "=", "[", "association", ".", "phenotype", "for", "association", "in", "associations", "]", "return", "self", ".", "_protocolListGenerator", "(", "request", ",", "phenotypes", ")" ]
Returns a generator over the (phenotypes, nextPageToken) pairs defined by the (JSON string) request
[ "Returns", "a", "generator", "over", "the", "(", "phenotypes", "nextPageToken", ")", "pairs", "defined", "by", "the", "(", "JSON", "string", ")", "request" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L423-L437
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.genotypesPhenotypesGenerator
def genotypesPhenotypesGenerator(self, request): """ Returns a generator over the (phenotypes, nextPageToken) pairs defined by the (JSON string) request """ # TODO make paging work using SPARQL? compoundId = datamodel.PhenotypeAssociationSetCompoundId.parse( request.phenotype_association_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) phenotypeAssociationSet = dataset.getPhenotypeAssociationSet( compoundId.phenotypeAssociationSetId) featureSets = dataset.getFeatureSets() annotationList = phenotypeAssociationSet.getAssociations( request, featureSets) return self._protocolListGenerator(request, annotationList)
python
def genotypesPhenotypesGenerator(self, request): """ Returns a generator over the (phenotypes, nextPageToken) pairs defined by the (JSON string) request """ # TODO make paging work using SPARQL? compoundId = datamodel.PhenotypeAssociationSetCompoundId.parse( request.phenotype_association_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) phenotypeAssociationSet = dataset.getPhenotypeAssociationSet( compoundId.phenotypeAssociationSetId) featureSets = dataset.getFeatureSets() annotationList = phenotypeAssociationSet.getAssociations( request, featureSets) return self._protocolListGenerator(request, annotationList)
[ "def", "genotypesPhenotypesGenerator", "(", "self", ",", "request", ")", ":", "# TODO make paging work using SPARQL?", "compoundId", "=", "datamodel", ".", "PhenotypeAssociationSetCompoundId", ".", "parse", "(", "request", ".", "phenotype_association_set_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "phenotypeAssociationSet", "=", "dataset", ".", "getPhenotypeAssociationSet", "(", "compoundId", ".", "phenotypeAssociationSetId", ")", "featureSets", "=", "dataset", ".", "getFeatureSets", "(", ")", "annotationList", "=", "phenotypeAssociationSet", ".", "getAssociations", "(", "request", ",", "featureSets", ")", "return", "self", ".", "_protocolListGenerator", "(", "request", ",", "annotationList", ")" ]
Returns a generator over the (phenotypes, nextPageToken) pairs defined by the (JSON string) request
[ "Returns", "a", "generator", "over", "the", "(", "phenotypes", "nextPageToken", ")", "pairs", "defined", "by", "the", "(", "JSON", "string", ")", "request" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L439-L453
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.callSetsGenerator
def callSetsGenerator(self, request): """ Returns a generator over the (callSet, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantSetCompoundId.parse( request.variant_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) results = [] for obj in variantSet.getCallSets(): include = True if request.name: if request.name != obj.getLocalId(): include = False if request.biosample_id: if request.biosample_id != obj.getBiosampleId(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
python
def callSetsGenerator(self, request): """ Returns a generator over the (callSet, nextPageToken) pairs defined by the specified request. """ compoundId = datamodel.VariantSetCompoundId.parse( request.variant_set_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) results = [] for obj in variantSet.getCallSets(): include = True if request.name: if request.name != obj.getLocalId(): include = False if request.biosample_id: if request.biosample_id != obj.getBiosampleId(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
[ "def", "callSetsGenerator", "(", "self", ",", "request", ")", ":", "compoundId", "=", "datamodel", ".", "VariantSetCompoundId", ".", "parse", "(", "request", ".", "variant_set_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "compoundId", ".", "variant_set_id", ")", "results", "=", "[", "]", "for", "obj", "in", "variantSet", ".", "getCallSets", "(", ")", ":", "include", "=", "True", "if", "request", ".", "name", ":", "if", "request", ".", "name", "!=", "obj", ".", "getLocalId", "(", ")", ":", "include", "=", "False", "if", "request", ".", "biosample_id", ":", "if", "request", ".", "biosample_id", "!=", "obj", ".", "getBiosampleId", "(", ")", ":", "include", "=", "False", "if", "include", ":", "results", ".", "append", "(", "obj", ")", "return", "self", ".", "_objectListGenerator", "(", "request", ",", "results", ")" ]
Returns a generator over the (callSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "callSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L455-L475
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.featureSetsGenerator
def featureSetsGenerator(self, request): """ Returns a generator over the (featureSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumFeatureSets(), dataset.getFeatureSetByIndex)
python
def featureSetsGenerator(self, request): """ Returns a generator over the (featureSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumFeatureSets(), dataset.getFeatureSetByIndex)
[ "def", "featureSetsGenerator", "(", "self", ",", "request", ")", ":", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "request", ".", "dataset_id", ")", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "dataset", ".", "getNumFeatureSets", "(", ")", ",", "dataset", ".", "getFeatureSetByIndex", ")" ]
Returns a generator over the (featureSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "featureSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L477-L485
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.continuousSetsGenerator
def continuousSetsGenerator(self, request): """ Returns a generator over the (continuousSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumContinuousSets(), dataset.getContinuousSetByIndex)
python
def continuousSetsGenerator(self, request): """ Returns a generator over the (continuousSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumContinuousSets(), dataset.getContinuousSetByIndex)
[ "def", "continuousSetsGenerator", "(", "self", ",", "request", ")", ":", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "request", ".", "dataset_id", ")", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "dataset", ".", "getNumContinuousSets", "(", ")", ",", "dataset", ".", "getContinuousSetByIndex", ")" ]
Returns a generator over the (continuousSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "continuousSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L487-L495
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.rnaQuantificationSetsGenerator
def rnaQuantificationSetsGenerator(self, request): """ Returns a generator over the (rnaQuantificationSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumRnaQuantificationSets(), dataset.getRnaQuantificationSetByIndex)
python
def rnaQuantificationSetsGenerator(self, request): """ Returns a generator over the (rnaQuantificationSet, nextPageToken) pairs defined by the specified request. """ dataset = self.getDataRepository().getDataset(request.dataset_id) return self._topLevelObjectGenerator( request, dataset.getNumRnaQuantificationSets(), dataset.getRnaQuantificationSetByIndex)
[ "def", "rnaQuantificationSetsGenerator", "(", "self", ",", "request", ")", ":", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "request", ".", "dataset_id", ")", "return", "self", ".", "_topLevelObjectGenerator", "(", "request", ",", "dataset", ".", "getNumRnaQuantificationSets", "(", ")", ",", "dataset", ".", "getRnaQuantificationSetByIndex", ")" ]
Returns a generator over the (rnaQuantificationSet, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "rnaQuantificationSet", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L497-L505
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.rnaQuantificationsGenerator
def rnaQuantificationsGenerator(self, request): """ Returns a generator over the (rnaQuantification, nextPageToken) pairs defined by the specified request. """ if len(request.rna_quantification_set_id) < 1: raise exceptions.BadRequestException( "Rna Quantification Set Id must be specified") else: compoundId = datamodel.RnaQuantificationSetCompoundId.parse( request.rna_quantification_set_id) dataset = self.getDataRepository().getDataset( compoundId.dataset_id) rnaQuantSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) results = [] for obj in rnaQuantSet.getRnaQuantifications(): include = True if request.biosample_id: if request.biosample_id != obj.getBiosampleId(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
python
def rnaQuantificationsGenerator(self, request): """ Returns a generator over the (rnaQuantification, nextPageToken) pairs defined by the specified request. """ if len(request.rna_quantification_set_id) < 1: raise exceptions.BadRequestException( "Rna Quantification Set Id must be specified") else: compoundId = datamodel.RnaQuantificationSetCompoundId.parse( request.rna_quantification_set_id) dataset = self.getDataRepository().getDataset( compoundId.dataset_id) rnaQuantSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) results = [] for obj in rnaQuantSet.getRnaQuantifications(): include = True if request.biosample_id: if request.biosample_id != obj.getBiosampleId(): include = False if include: results.append(obj) return self._objectListGenerator(request, results)
[ "def", "rnaQuantificationsGenerator", "(", "self", ",", "request", ")", ":", "if", "len", "(", "request", ".", "rna_quantification_set_id", ")", "<", "1", ":", "raise", "exceptions", ".", "BadRequestException", "(", "\"Rna Quantification Set Id must be specified\"", ")", "else", ":", "compoundId", "=", "datamodel", ".", "RnaQuantificationSetCompoundId", ".", "parse", "(", "request", ".", "rna_quantification_set_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "rnaQuantSet", "=", "dataset", ".", "getRnaQuantificationSet", "(", "compoundId", ".", "rna_quantification_set_id", ")", "results", "=", "[", "]", "for", "obj", "in", "rnaQuantSet", ".", "getRnaQuantifications", "(", ")", ":", "include", "=", "True", "if", "request", ".", "biosample_id", ":", "if", "request", ".", "biosample_id", "!=", "obj", ".", "getBiosampleId", "(", ")", ":", "include", "=", "False", "if", "include", ":", "results", ".", "append", "(", "obj", ")", "return", "self", ".", "_objectListGenerator", "(", "request", ",", "results", ")" ]
Returns a generator over the (rnaQuantification, nextPageToken) pairs defined by the specified request.
[ "Returns", "a", "generator", "over", "the", "(", "rnaQuantification", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L507-L530
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.expressionLevelsGenerator
def expressionLevelsGenerator(self, request): """ Returns a generator over the (expressionLevel, nextPageToken) pairs defined by the specified request. Currently only supports searching over a specified rnaQuantification """ rnaQuantificationId = request.rna_quantification_id compoundId = datamodel.RnaQuantificationCompoundId.parse( request.rna_quantification_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) rnaQuant = rnaQuantSet.getRnaQuantification(rnaQuantificationId) rnaQuantificationId = rnaQuant.getLocalId() iterator = paging.ExpressionLevelsIterator( request, rnaQuant) return iterator
python
def expressionLevelsGenerator(self, request): """ Returns a generator over the (expressionLevel, nextPageToken) pairs defined by the specified request. Currently only supports searching over a specified rnaQuantification """ rnaQuantificationId = request.rna_quantification_id compoundId = datamodel.RnaQuantificationCompoundId.parse( request.rna_quantification_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) rnaQuant = rnaQuantSet.getRnaQuantification(rnaQuantificationId) rnaQuantificationId = rnaQuant.getLocalId() iterator = paging.ExpressionLevelsIterator( request, rnaQuant) return iterator
[ "def", "expressionLevelsGenerator", "(", "self", ",", "request", ")", ":", "rnaQuantificationId", "=", "request", ".", "rna_quantification_id", "compoundId", "=", "datamodel", ".", "RnaQuantificationCompoundId", ".", "parse", "(", "request", ".", "rna_quantification_id", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "rnaQuantSet", "=", "dataset", ".", "getRnaQuantificationSet", "(", "compoundId", ".", "rna_quantification_set_id", ")", "rnaQuant", "=", "rnaQuantSet", ".", "getRnaQuantification", "(", "rnaQuantificationId", ")", "rnaQuantificationId", "=", "rnaQuant", ".", "getLocalId", "(", ")", "iterator", "=", "paging", ".", "ExpressionLevelsIterator", "(", "request", ",", "rnaQuant", ")", "return", "iterator" ]
Returns a generator over the (expressionLevel, nextPageToken) pairs defined by the specified request. Currently only supports searching over a specified rnaQuantification
[ "Returns", "a", "generator", "over", "the", "(", "expressionLevel", "nextPageToken", ")", "pairs", "defined", "by", "the", "specified", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L532-L549
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetRequest
def runGetRequest(self, obj): """ Runs a get request by converting the specified datamodel object into its protocol representation. """ protocolElement = obj.toProtocolElement() jsonString = protocol.toJson(protocolElement) return jsonString
python
def runGetRequest(self, obj): """ Runs a get request by converting the specified datamodel object into its protocol representation. """ protocolElement = obj.toProtocolElement() jsonString = protocol.toJson(protocolElement) return jsonString
[ "def", "runGetRequest", "(", "self", ",", "obj", ")", ":", "protocolElement", "=", "obj", ".", "toProtocolElement", "(", ")", "jsonString", "=", "protocol", ".", "toJson", "(", "protocolElement", ")", "return", "jsonString" ]
Runs a get request by converting the specified datamodel object into its protocol representation.
[ "Runs", "a", "get", "request", "by", "converting", "the", "specified", "datamodel", "object", "into", "its", "protocol", "representation", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L568-L575
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchRequest
def runSearchRequest( self, requestStr, requestClass, responseClass, objectGenerator): """ Runs the specified request. The request is a string containing a JSON representation of an instance of the specified requestClass. We return a string representation of an instance of the specified responseClass in JSON format. Objects are filled into the page list using the specified object generator, which must return (object, nextPageToken) pairs, and be able to resume iteration from any point using the nextPageToken attribute of the request object. """ self.startProfile() try: request = protocol.fromJson(requestStr, requestClass) except protocol.json_format.ParseError: raise exceptions.InvalidJsonException(requestStr) # TODO How do we detect when the page size is not set? if not request.page_size: request.page_size = self._defaultPageSize if request.page_size < 0: raise exceptions.BadPageSizeException(request.page_size) responseBuilder = response_builder.SearchResponseBuilder( responseClass, request.page_size, self._maxResponseLength) nextPageToken = None for obj, nextPageToken in objectGenerator(request): responseBuilder.addValue(obj) if responseBuilder.isFull(): break responseBuilder.setNextPageToken(nextPageToken) responseString = responseBuilder.getSerializedResponse() self.endProfile() return responseString
python
def runSearchRequest( self, requestStr, requestClass, responseClass, objectGenerator): """ Runs the specified request. The request is a string containing a JSON representation of an instance of the specified requestClass. We return a string representation of an instance of the specified responseClass in JSON format. Objects are filled into the page list using the specified object generator, which must return (object, nextPageToken) pairs, and be able to resume iteration from any point using the nextPageToken attribute of the request object. """ self.startProfile() try: request = protocol.fromJson(requestStr, requestClass) except protocol.json_format.ParseError: raise exceptions.InvalidJsonException(requestStr) # TODO How do we detect when the page size is not set? if not request.page_size: request.page_size = self._defaultPageSize if request.page_size < 0: raise exceptions.BadPageSizeException(request.page_size) responseBuilder = response_builder.SearchResponseBuilder( responseClass, request.page_size, self._maxResponseLength) nextPageToken = None for obj, nextPageToken in objectGenerator(request): responseBuilder.addValue(obj) if responseBuilder.isFull(): break responseBuilder.setNextPageToken(nextPageToken) responseString = responseBuilder.getSerializedResponse() self.endProfile() return responseString
[ "def", "runSearchRequest", "(", "self", ",", "requestStr", ",", "requestClass", ",", "responseClass", ",", "objectGenerator", ")", ":", "self", ".", "startProfile", "(", ")", "try", ":", "request", "=", "protocol", ".", "fromJson", "(", "requestStr", ",", "requestClass", ")", "except", "protocol", ".", "json_format", ".", "ParseError", ":", "raise", "exceptions", ".", "InvalidJsonException", "(", "requestStr", ")", "# TODO How do we detect when the page size is not set?", "if", "not", "request", ".", "page_size", ":", "request", ".", "page_size", "=", "self", ".", "_defaultPageSize", "if", "request", ".", "page_size", "<", "0", ":", "raise", "exceptions", ".", "BadPageSizeException", "(", "request", ".", "page_size", ")", "responseBuilder", "=", "response_builder", ".", "SearchResponseBuilder", "(", "responseClass", ",", "request", ".", "page_size", ",", "self", ".", "_maxResponseLength", ")", "nextPageToken", "=", "None", "for", "obj", ",", "nextPageToken", "in", "objectGenerator", "(", "request", ")", ":", "responseBuilder", ".", "addValue", "(", "obj", ")", "if", "responseBuilder", ".", "isFull", "(", ")", ":", "break", "responseBuilder", ".", "setNextPageToken", "(", "nextPageToken", ")", "responseString", "=", "responseBuilder", ".", "getSerializedResponse", "(", ")", "self", ".", "endProfile", "(", ")", "return", "responseString" ]
Runs the specified request. The request is a string containing a JSON representation of an instance of the specified requestClass. We return a string representation of an instance of the specified responseClass in JSON format. Objects are filled into the page list using the specified object generator, which must return (object, nextPageToken) pairs, and be able to resume iteration from any point using the nextPageToken attribute of the request object.
[ "Runs", "the", "specified", "request", ".", "The", "request", "is", "a", "string", "containing", "a", "JSON", "representation", "of", "an", "instance", "of", "the", "specified", "requestClass", ".", "We", "return", "a", "string", "representation", "of", "an", "instance", "of", "the", "specified", "responseClass", "in", "JSON", "format", ".", "Objects", "are", "filled", "into", "the", "page", "list", "using", "the", "specified", "object", "generator", "which", "must", "return", "(", "object", "nextPageToken", ")", "pairs", "and", "be", "able", "to", "resume", "iteration", "from", "any", "point", "using", "the", "nextPageToken", "attribute", "of", "the", "request", "object", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L577-L608
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runListReferenceBases
def runListReferenceBases(self, requestJson): """ Runs a listReferenceBases request for the specified ID and request arguments. """ # In the case when an empty post request is made to the endpoint # we instantiate an empty ListReferenceBasesRequest. if not requestJson: request = protocol.ListReferenceBasesRequest() else: try: request = protocol.fromJson( requestJson, protocol.ListReferenceBasesRequest) except protocol.json_format.ParseError: raise exceptions.InvalidJsonException(requestJson) compoundId = datamodel.ReferenceCompoundId.parse(request.reference_id) referenceSet = self.getDataRepository().getReferenceSet( compoundId.reference_set_id) reference = referenceSet.getReference(request.reference_id) start = request.start end = request.end if end == 0: # assume meant "get all" end = reference.getLength() if request.page_token: pageTokenStr = request.page_token start = paging._parsePageToken(pageTokenStr, 1)[0] chunkSize = self._maxResponseLength nextPageToken = None if start + chunkSize < end: end = start + chunkSize nextPageToken = str(start + chunkSize) sequence = reference.getBases(start, end) # build response response = protocol.ListReferenceBasesResponse() response.offset = start response.sequence = sequence if nextPageToken: response.next_page_token = nextPageToken return protocol.toJson(response)
python
def runListReferenceBases(self, requestJson): """ Runs a listReferenceBases request for the specified ID and request arguments. """ # In the case when an empty post request is made to the endpoint # we instantiate an empty ListReferenceBasesRequest. if not requestJson: request = protocol.ListReferenceBasesRequest() else: try: request = protocol.fromJson( requestJson, protocol.ListReferenceBasesRequest) except protocol.json_format.ParseError: raise exceptions.InvalidJsonException(requestJson) compoundId = datamodel.ReferenceCompoundId.parse(request.reference_id) referenceSet = self.getDataRepository().getReferenceSet( compoundId.reference_set_id) reference = referenceSet.getReference(request.reference_id) start = request.start end = request.end if end == 0: # assume meant "get all" end = reference.getLength() if request.page_token: pageTokenStr = request.page_token start = paging._parsePageToken(pageTokenStr, 1)[0] chunkSize = self._maxResponseLength nextPageToken = None if start + chunkSize < end: end = start + chunkSize nextPageToken = str(start + chunkSize) sequence = reference.getBases(start, end) # build response response = protocol.ListReferenceBasesResponse() response.offset = start response.sequence = sequence if nextPageToken: response.next_page_token = nextPageToken return protocol.toJson(response)
[ "def", "runListReferenceBases", "(", "self", ",", "requestJson", ")", ":", "# In the case when an empty post request is made to the endpoint", "# we instantiate an empty ListReferenceBasesRequest.", "if", "not", "requestJson", ":", "request", "=", "protocol", ".", "ListReferenceBasesRequest", "(", ")", "else", ":", "try", ":", "request", "=", "protocol", ".", "fromJson", "(", "requestJson", ",", "protocol", ".", "ListReferenceBasesRequest", ")", "except", "protocol", ".", "json_format", ".", "ParseError", ":", "raise", "exceptions", ".", "InvalidJsonException", "(", "requestJson", ")", "compoundId", "=", "datamodel", ".", "ReferenceCompoundId", ".", "parse", "(", "request", ".", "reference_id", ")", "referenceSet", "=", "self", ".", "getDataRepository", "(", ")", ".", "getReferenceSet", "(", "compoundId", ".", "reference_set_id", ")", "reference", "=", "referenceSet", ".", "getReference", "(", "request", ".", "reference_id", ")", "start", "=", "request", ".", "start", "end", "=", "request", ".", "end", "if", "end", "==", "0", ":", "# assume meant \"get all\"", "end", "=", "reference", ".", "getLength", "(", ")", "if", "request", ".", "page_token", ":", "pageTokenStr", "=", "request", ".", "page_token", "start", "=", "paging", ".", "_parsePageToken", "(", "pageTokenStr", ",", "1", ")", "[", "0", "]", "chunkSize", "=", "self", ".", "_maxResponseLength", "nextPageToken", "=", "None", "if", "start", "+", "chunkSize", "<", "end", ":", "end", "=", "start", "+", "chunkSize", "nextPageToken", "=", "str", "(", "start", "+", "chunkSize", ")", "sequence", "=", "reference", ".", "getBases", "(", "start", ",", "end", ")", "# build response", "response", "=", "protocol", ".", "ListReferenceBasesResponse", "(", ")", "response", ".", "offset", "=", "start", "response", ".", "sequence", "=", "sequence", "if", "nextPageToken", ":", "response", ".", "next_page_token", "=", "nextPageToken", "return", "protocol", ".", "toJson", "(", "response", ")" ]
Runs a listReferenceBases request for the specified ID and request arguments.
[ "Runs", "a", "listReferenceBases", "request", "for", "the", "specified", "ID", "and", "request", "arguments", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L610-L651
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetCallSet
def runGetCallSet(self, id_): """ Returns a callset with the given id """ compoundId = datamodel.CallSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) callSet = variantSet.getCallSet(id_) return self.runGetRequest(callSet)
python
def runGetCallSet(self, id_): """ Returns a callset with the given id """ compoundId = datamodel.CallSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) callSet = variantSet.getCallSet(id_) return self.runGetRequest(callSet)
[ "def", "runGetCallSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "CallSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "compoundId", ".", "variant_set_id", ")", "callSet", "=", "variantSet", ".", "getCallSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "callSet", ")" ]
Returns a callset with the given id
[ "Returns", "a", "callset", "with", "the", "given", "id" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L655-L663
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetInfo
def runGetInfo(self, request): """ Returns information about the service including protocol version. """ return protocol.toJson(protocol.GetInfoResponse( protocol_version=protocol.version))
python
def runGetInfo(self, request): """ Returns information about the service including protocol version. """ return protocol.toJson(protocol.GetInfoResponse( protocol_version=protocol.version))
[ "def", "runGetInfo", "(", "self", ",", "request", ")", ":", "return", "protocol", ".", "toJson", "(", "protocol", ".", "GetInfoResponse", "(", "protocol_version", "=", "protocol", ".", "version", ")", ")" ]
Returns information about the service including protocol version.
[ "Returns", "information", "about", "the", "service", "including", "protocol", "version", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L665-L670
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runAddAnnouncement
def runAddAnnouncement(self, flaskrequest): """ Takes a flask request from the frontend and attempts to parse into an AnnouncePeerRequest. If successful, it will log the announcement to the `announcement` table with some other metadata gathered from the request. """ announcement = {} # We want to parse the request ourselves to collect a little more # data about it. try: requestData = protocol.fromJson( flaskrequest.get_data(), protocol.AnnouncePeerRequest) announcement['hostname'] = flaskrequest.host_url announcement['remote_addr'] = flaskrequest.remote_addr announcement['user_agent'] = flaskrequest.headers.get('User-Agent') except AttributeError: # Sometimes in testing we will send protocol requests instead # of flask requests and so the hostname and user agent won't # be present. try: requestData = protocol.fromJson( flaskrequest, protocol.AnnouncePeerRequest) except Exception as e: raise exceptions.InvalidJsonException(e) except Exception as e: raise exceptions.InvalidJsonException(e) # Validate the url before accepting the announcement peer = datamodel.peers.Peer(requestData.peer.url) peer.setAttributesJson(protocol.toJson( requestData.peer.attributes)) announcement['url'] = peer.getUrl() announcement['attributes'] = peer.getAttributes() try: self.getDataRepository().insertAnnouncement(announcement) except: raise exceptions.BadRequestException(announcement['url']) return protocol.toJson( protocol.AnnouncePeerResponse(success=True))
python
def runAddAnnouncement(self, flaskrequest): """ Takes a flask request from the frontend and attempts to parse into an AnnouncePeerRequest. If successful, it will log the announcement to the `announcement` table with some other metadata gathered from the request. """ announcement = {} # We want to parse the request ourselves to collect a little more # data about it. try: requestData = protocol.fromJson( flaskrequest.get_data(), protocol.AnnouncePeerRequest) announcement['hostname'] = flaskrequest.host_url announcement['remote_addr'] = flaskrequest.remote_addr announcement['user_agent'] = flaskrequest.headers.get('User-Agent') except AttributeError: # Sometimes in testing we will send protocol requests instead # of flask requests and so the hostname and user agent won't # be present. try: requestData = protocol.fromJson( flaskrequest, protocol.AnnouncePeerRequest) except Exception as e: raise exceptions.InvalidJsonException(e) except Exception as e: raise exceptions.InvalidJsonException(e) # Validate the url before accepting the announcement peer = datamodel.peers.Peer(requestData.peer.url) peer.setAttributesJson(protocol.toJson( requestData.peer.attributes)) announcement['url'] = peer.getUrl() announcement['attributes'] = peer.getAttributes() try: self.getDataRepository().insertAnnouncement(announcement) except: raise exceptions.BadRequestException(announcement['url']) return protocol.toJson( protocol.AnnouncePeerResponse(success=True))
[ "def", "runAddAnnouncement", "(", "self", ",", "flaskrequest", ")", ":", "announcement", "=", "{", "}", "# We want to parse the request ourselves to collect a little more", "# data about it.", "try", ":", "requestData", "=", "protocol", ".", "fromJson", "(", "flaskrequest", ".", "get_data", "(", ")", ",", "protocol", ".", "AnnouncePeerRequest", ")", "announcement", "[", "'hostname'", "]", "=", "flaskrequest", ".", "host_url", "announcement", "[", "'remote_addr'", "]", "=", "flaskrequest", ".", "remote_addr", "announcement", "[", "'user_agent'", "]", "=", "flaskrequest", ".", "headers", ".", "get", "(", "'User-Agent'", ")", "except", "AttributeError", ":", "# Sometimes in testing we will send protocol requests instead", "# of flask requests and so the hostname and user agent won't", "# be present.", "try", ":", "requestData", "=", "protocol", ".", "fromJson", "(", "flaskrequest", ",", "protocol", ".", "AnnouncePeerRequest", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "InvalidJsonException", "(", "e", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "InvalidJsonException", "(", "e", ")", "# Validate the url before accepting the announcement", "peer", "=", "datamodel", ".", "peers", ".", "Peer", "(", "requestData", ".", "peer", ".", "url", ")", "peer", ".", "setAttributesJson", "(", "protocol", ".", "toJson", "(", "requestData", ".", "peer", ".", "attributes", ")", ")", "announcement", "[", "'url'", "]", "=", "peer", ".", "getUrl", "(", ")", "announcement", "[", "'attributes'", "]", "=", "peer", ".", "getAttributes", "(", ")", "try", ":", "self", ".", "getDataRepository", "(", ")", ".", "insertAnnouncement", "(", "announcement", ")", "except", ":", "raise", "exceptions", ".", "BadRequestException", "(", "announcement", "[", "'url'", "]", ")", "return", "protocol", ".", "toJson", "(", "protocol", ".", "AnnouncePeerResponse", "(", "success", "=", "True", ")", ")" ]
Takes a flask request from the frontend and attempts to parse into an AnnouncePeerRequest. If successful, it will log the announcement to the `announcement` table with some other metadata gathered from the request.
[ "Takes", "a", "flask", "request", "from", "the", "frontend", "and", "attempts", "to", "parse", "into", "an", "AnnouncePeerRequest", ".", "If", "successful", "it", "will", "log", "the", "announcement", "to", "the", "announcement", "table", "with", "some", "other", "metadata", "gathered", "from", "the", "request", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L672-L711
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runListPeers
def runListPeers(self, request): """ Takes a ListPeersRequest and returns a ListPeersResponse using a page_token and page_size if provided. """ return self.runSearchRequest( request, protocol.ListPeersRequest, protocol.ListPeersResponse, self.peersGenerator)
python
def runListPeers(self, request): """ Takes a ListPeersRequest and returns a ListPeersResponse using a page_token and page_size if provided. """ return self.runSearchRequest( request, protocol.ListPeersRequest, protocol.ListPeersResponse, self.peersGenerator)
[ "def", "runListPeers", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "ListPeersRequest", ",", "protocol", ".", "ListPeersResponse", ",", "self", ".", "peersGenerator", ")" ]
Takes a ListPeersRequest and returns a ListPeersResponse using a page_token and page_size if provided.
[ "Takes", "a", "ListPeersRequest", "and", "returns", "a", "ListPeersResponse", "using", "a", "page_token", "and", "page_size", "if", "provided", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L713-L722
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetVariant
def runGetVariant(self, id_): """ Returns a variant with the given id """ compoundId = datamodel.VariantCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) gaVariant = variantSet.getVariant(compoundId) # TODO variant is a special case here, as it's returning a # protocol element rather than a datamodel object. We should # fix this for consistency. jsonString = protocol.toJson(gaVariant) return jsonString
python
def runGetVariant(self, id_): """ Returns a variant with the given id """ compoundId = datamodel.VariantCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) gaVariant = variantSet.getVariant(compoundId) # TODO variant is a special case here, as it's returning a # protocol element rather than a datamodel object. We should # fix this for consistency. jsonString = protocol.toJson(gaVariant) return jsonString
[ "def", "runGetVariant", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "VariantCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "compoundId", ".", "variant_set_id", ")", "gaVariant", "=", "variantSet", ".", "getVariant", "(", "compoundId", ")", "# TODO variant is a special case here, as it's returning a", "# protocol element rather than a datamodel object. We should", "# fix this for consistency.", "jsonString", "=", "protocol", ".", "toJson", "(", "gaVariant", ")", "return", "jsonString" ]
Returns a variant with the given id
[ "Returns", "a", "variant", "with", "the", "given", "id" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L724-L736
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetBiosample
def runGetBiosample(self, id_): """ Runs a getBiosample request for the specified ID. """ compoundId = datamodel.BiosampleCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) biosample = dataset.getBiosample(id_) return self.runGetRequest(biosample)
python
def runGetBiosample(self, id_): """ Runs a getBiosample request for the specified ID. """ compoundId = datamodel.BiosampleCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) biosample = dataset.getBiosample(id_) return self.runGetRequest(biosample)
[ "def", "runGetBiosample", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "BiosampleCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "biosample", "=", "dataset", ".", "getBiosample", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "biosample", ")" ]
Runs a getBiosample request for the specified ID.
[ "Runs", "a", "getBiosample", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L738-L745
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetIndividual
def runGetIndividual(self, id_): """ Runs a getIndividual request for the specified ID. """ compoundId = datamodel.BiosampleCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) individual = dataset.getIndividual(id_) return self.runGetRequest(individual)
python
def runGetIndividual(self, id_): """ Runs a getIndividual request for the specified ID. """ compoundId = datamodel.BiosampleCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) individual = dataset.getIndividual(id_) return self.runGetRequest(individual)
[ "def", "runGetIndividual", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "BiosampleCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "individual", "=", "dataset", ".", "getIndividual", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "individual", ")" ]
Runs a getIndividual request for the specified ID.
[ "Runs", "a", "getIndividual", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L747-L754
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetFeature
def runGetFeature(self, id_): """ Returns JSON string of the feature object corresponding to the feature compoundID passed in. """ compoundId = datamodel.FeatureCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) featureSet = dataset.getFeatureSet(compoundId.feature_set_id) gaFeature = featureSet.getFeature(compoundId) jsonString = protocol.toJson(gaFeature) return jsonString
python
def runGetFeature(self, id_): """ Returns JSON string of the feature object corresponding to the feature compoundID passed in. """ compoundId = datamodel.FeatureCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) featureSet = dataset.getFeatureSet(compoundId.feature_set_id) gaFeature = featureSet.getFeature(compoundId) jsonString = protocol.toJson(gaFeature) return jsonString
[ "def", "runGetFeature", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "FeatureCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "featureSet", "=", "dataset", ".", "getFeatureSet", "(", "compoundId", ".", "feature_set_id", ")", "gaFeature", "=", "featureSet", ".", "getFeature", "(", "compoundId", ")", "jsonString", "=", "protocol", ".", "toJson", "(", "gaFeature", ")", "return", "jsonString" ]
Returns JSON string of the feature object corresponding to the feature compoundID passed in.
[ "Returns", "JSON", "string", "of", "the", "feature", "object", "corresponding", "to", "the", "feature", "compoundID", "passed", "in", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L756-L766
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetReadGroupSet
def runGetReadGroupSet(self, id_): """ Returns a readGroupSet with the given id_ """ compoundId = datamodel.ReadGroupSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) readGroupSet = dataset.getReadGroupSet(id_) return self.runGetRequest(readGroupSet)
python
def runGetReadGroupSet(self, id_): """ Returns a readGroupSet with the given id_ """ compoundId = datamodel.ReadGroupSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) readGroupSet = dataset.getReadGroupSet(id_) return self.runGetRequest(readGroupSet)
[ "def", "runGetReadGroupSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "ReadGroupSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "readGroupSet", "=", "dataset", ".", "getReadGroupSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "readGroupSet", ")" ]
Returns a readGroupSet with the given id_
[ "Returns", "a", "readGroupSet", "with", "the", "given", "id_" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L768-L775
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetReadGroup
def runGetReadGroup(self, id_): """ Returns a read group with the given id_ """ compoundId = datamodel.ReadGroupCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) readGroupSet = dataset.getReadGroupSet(compoundId.read_group_set_id) readGroup = readGroupSet.getReadGroup(id_) return self.runGetRequest(readGroup)
python
def runGetReadGroup(self, id_): """ Returns a read group with the given id_ """ compoundId = datamodel.ReadGroupCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) readGroupSet = dataset.getReadGroupSet(compoundId.read_group_set_id) readGroup = readGroupSet.getReadGroup(id_) return self.runGetRequest(readGroup)
[ "def", "runGetReadGroup", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "ReadGroupCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "readGroupSet", "=", "dataset", ".", "getReadGroupSet", "(", "compoundId", ".", "read_group_set_id", ")", "readGroup", "=", "readGroupSet", ".", "getReadGroup", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "readGroup", ")" ]
Returns a read group with the given id_
[ "Returns", "a", "read", "group", "with", "the", "given", "id_" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L777-L785
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetReference
def runGetReference(self, id_): """ Runs a getReference request for the specified ID. """ compoundId = datamodel.ReferenceCompoundId.parse(id_) referenceSet = self.getDataRepository().getReferenceSet( compoundId.reference_set_id) reference = referenceSet.getReference(id_) return self.runGetRequest(reference)
python
def runGetReference(self, id_): """ Runs a getReference request for the specified ID. """ compoundId = datamodel.ReferenceCompoundId.parse(id_) referenceSet = self.getDataRepository().getReferenceSet( compoundId.reference_set_id) reference = referenceSet.getReference(id_) return self.runGetRequest(reference)
[ "def", "runGetReference", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "ReferenceCompoundId", ".", "parse", "(", "id_", ")", "referenceSet", "=", "self", ".", "getDataRepository", "(", ")", ".", "getReferenceSet", "(", "compoundId", ".", "reference_set_id", ")", "reference", "=", "referenceSet", ".", "getReference", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "reference", ")" ]
Runs a getReference request for the specified ID.
[ "Runs", "a", "getReference", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L787-L795
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetReferenceSet
def runGetReferenceSet(self, id_): """ Runs a getReferenceSet request for the specified ID. """ referenceSet = self.getDataRepository().getReferenceSet(id_) return self.runGetRequest(referenceSet)
python
def runGetReferenceSet(self, id_): """ Runs a getReferenceSet request for the specified ID. """ referenceSet = self.getDataRepository().getReferenceSet(id_) return self.runGetRequest(referenceSet)
[ "def", "runGetReferenceSet", "(", "self", ",", "id_", ")", ":", "referenceSet", "=", "self", ".", "getDataRepository", "(", ")", ".", "getReferenceSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "referenceSet", ")" ]
Runs a getReferenceSet request for the specified ID.
[ "Runs", "a", "getReferenceSet", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L797-L802
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetVariantSet
def runGetVariantSet(self, id_): """ Runs a getVariantSet request for the specified ID. """ compoundId = datamodel.VariantSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(id_) return self.runGetRequest(variantSet)
python
def runGetVariantSet(self, id_): """ Runs a getVariantSet request for the specified ID. """ compoundId = datamodel.VariantSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(id_) return self.runGetRequest(variantSet)
[ "def", "runGetVariantSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "VariantSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "variantSet", ")" ]
Runs a getVariantSet request for the specified ID.
[ "Runs", "a", "getVariantSet", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L804-L811
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetFeatureSet
def runGetFeatureSet(self, id_): """ Runs a getFeatureSet request for the specified ID. """ compoundId = datamodel.FeatureSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) featureSet = dataset.getFeatureSet(id_) return self.runGetRequest(featureSet)
python
def runGetFeatureSet(self, id_): """ Runs a getFeatureSet request for the specified ID. """ compoundId = datamodel.FeatureSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) featureSet = dataset.getFeatureSet(id_) return self.runGetRequest(featureSet)
[ "def", "runGetFeatureSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "FeatureSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "featureSet", "=", "dataset", ".", "getFeatureSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "featureSet", ")" ]
Runs a getFeatureSet request for the specified ID.
[ "Runs", "a", "getFeatureSet", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L813-L820
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetContinuousSet
def runGetContinuousSet(self, id_): """ Runs a getContinuousSet request for the specified ID. """ compoundId = datamodel.ContinuousSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) continuousSet = dataset.getContinuousSet(id_) return self.runGetRequest(continuousSet)
python
def runGetContinuousSet(self, id_): """ Runs a getContinuousSet request for the specified ID. """ compoundId = datamodel.ContinuousSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) continuousSet = dataset.getContinuousSet(id_) return self.runGetRequest(continuousSet)
[ "def", "runGetContinuousSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "ContinuousSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "continuousSet", "=", "dataset", ".", "getContinuousSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "continuousSet", ")" ]
Runs a getContinuousSet request for the specified ID.
[ "Runs", "a", "getContinuousSet", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L822-L829
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetDataset
def runGetDataset(self, id_): """ Runs a getDataset request for the specified ID. """ dataset = self.getDataRepository().getDataset(id_) return self.runGetRequest(dataset)
python
def runGetDataset(self, id_): """ Runs a getDataset request for the specified ID. """ dataset = self.getDataRepository().getDataset(id_) return self.runGetRequest(dataset)
[ "def", "runGetDataset", "(", "self", ",", "id_", ")", ":", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "dataset", ")" ]
Runs a getDataset request for the specified ID.
[ "Runs", "a", "getDataset", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L831-L836
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetVariantAnnotationSet
def runGetVariantAnnotationSet(self, id_): """ Runs a getVariantSet request for the specified ID. """ compoundId = datamodel.VariantAnnotationSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) variantAnnotationSet = variantSet.getVariantAnnotationSet(id_) return self.runGetRequest(variantAnnotationSet)
python
def runGetVariantAnnotationSet(self, id_): """ Runs a getVariantSet request for the specified ID. """ compoundId = datamodel.VariantAnnotationSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) variantSet = dataset.getVariantSet(compoundId.variant_set_id) variantAnnotationSet = variantSet.getVariantAnnotationSet(id_) return self.runGetRequest(variantAnnotationSet)
[ "def", "runGetVariantAnnotationSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "VariantAnnotationSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "variantSet", "=", "dataset", ".", "getVariantSet", "(", "compoundId", ".", "variant_set_id", ")", "variantAnnotationSet", "=", "variantSet", ".", "getVariantAnnotationSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "variantAnnotationSet", ")" ]
Runs a getVariantSet request for the specified ID.
[ "Runs", "a", "getVariantSet", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L838-L846
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetRnaQuantification
def runGetRnaQuantification(self, id_): """ Runs a getRnaQuantification request for the specified ID. """ compoundId = datamodel.RnaQuantificationCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantificationSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) rnaQuantification = rnaQuantificationSet.getRnaQuantification(id_) return self.runGetRequest(rnaQuantification)
python
def runGetRnaQuantification(self, id_): """ Runs a getRnaQuantification request for the specified ID. """ compoundId = datamodel.RnaQuantificationCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantificationSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) rnaQuantification = rnaQuantificationSet.getRnaQuantification(id_) return self.runGetRequest(rnaQuantification)
[ "def", "runGetRnaQuantification", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "RnaQuantificationCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "rnaQuantificationSet", "=", "dataset", ".", "getRnaQuantificationSet", "(", "compoundId", ".", "rna_quantification_set_id", ")", "rnaQuantification", "=", "rnaQuantificationSet", ".", "getRnaQuantification", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "rnaQuantification", ")" ]
Runs a getRnaQuantification request for the specified ID.
[ "Runs", "a", "getRnaQuantification", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L848-L857
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetRnaQuantificationSet
def runGetRnaQuantificationSet(self, id_): """ Runs a getRnaQuantificationSet request for the specified ID. """ compoundId = datamodel.RnaQuantificationSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantificationSet = dataset.getRnaQuantificationSet(id_) return self.runGetRequest(rnaQuantificationSet)
python
def runGetRnaQuantificationSet(self, id_): """ Runs a getRnaQuantificationSet request for the specified ID. """ compoundId = datamodel.RnaQuantificationSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantificationSet = dataset.getRnaQuantificationSet(id_) return self.runGetRequest(rnaQuantificationSet)
[ "def", "runGetRnaQuantificationSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "RnaQuantificationSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "rnaQuantificationSet", "=", "dataset", ".", "getRnaQuantificationSet", "(", "id_", ")", "return", "self", ".", "runGetRequest", "(", "rnaQuantificationSet", ")" ]
Runs a getRnaQuantificationSet request for the specified ID.
[ "Runs", "a", "getRnaQuantificationSet", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L859-L866
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runGetExpressionLevel
def runGetExpressionLevel(self, id_): """ Runs a getExpressionLevel request for the specified ID. """ compoundId = datamodel.ExpressionLevelCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantificationSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) rnaQuantification = rnaQuantificationSet.getRnaQuantification( compoundId.rna_quantification_id) expressionLevel = rnaQuantification.getExpressionLevel(compoundId) return self.runGetRequest(expressionLevel)
python
def runGetExpressionLevel(self, id_): """ Runs a getExpressionLevel request for the specified ID. """ compoundId = datamodel.ExpressionLevelCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantificationSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) rnaQuantification = rnaQuantificationSet.getRnaQuantification( compoundId.rna_quantification_id) expressionLevel = rnaQuantification.getExpressionLevel(compoundId) return self.runGetRequest(expressionLevel)
[ "def", "runGetExpressionLevel", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "ExpressionLevelCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataRepository", "(", ")", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "rnaQuantificationSet", "=", "dataset", ".", "getRnaQuantificationSet", "(", "compoundId", ".", "rna_quantification_set_id", ")", "rnaQuantification", "=", "rnaQuantificationSet", ".", "getRnaQuantification", "(", "compoundId", ".", "rna_quantification_id", ")", "expressionLevel", "=", "rnaQuantification", ".", "getExpressionLevel", "(", "compoundId", ")", "return", "self", ".", "runGetRequest", "(", "expressionLevel", ")" ]
Runs a getExpressionLevel request for the specified ID.
[ "Runs", "a", "getExpressionLevel", "request", "for", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L868-L879
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchReadGroupSets
def runSearchReadGroupSets(self, request): """ Runs the specified SearchReadGroupSetsRequest. """ return self.runSearchRequest( request, protocol.SearchReadGroupSetsRequest, protocol.SearchReadGroupSetsResponse, self.readGroupSetsGenerator)
python
def runSearchReadGroupSets(self, request): """ Runs the specified SearchReadGroupSetsRequest. """ return self.runSearchRequest( request, protocol.SearchReadGroupSetsRequest, protocol.SearchReadGroupSetsResponse, self.readGroupSetsGenerator)
[ "def", "runSearchReadGroupSets", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchReadGroupSetsRequest", ",", "protocol", ".", "SearchReadGroupSetsResponse", ",", "self", ".", "readGroupSetsGenerator", ")" ]
Runs the specified SearchReadGroupSetsRequest.
[ "Runs", "the", "specified", "SearchReadGroupSetsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L883-L890
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchIndividuals
def runSearchIndividuals(self, request): """ Runs the specified search SearchIndividualsRequest. """ return self.runSearchRequest( request, protocol.SearchIndividualsRequest, protocol.SearchIndividualsResponse, self.individualsGenerator)
python
def runSearchIndividuals(self, request): """ Runs the specified search SearchIndividualsRequest. """ return self.runSearchRequest( request, protocol.SearchIndividualsRequest, protocol.SearchIndividualsResponse, self.individualsGenerator)
[ "def", "runSearchIndividuals", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchIndividualsRequest", ",", "protocol", ".", "SearchIndividualsResponse", ",", "self", ".", "individualsGenerator", ")" ]
Runs the specified search SearchIndividualsRequest.
[ "Runs", "the", "specified", "search", "SearchIndividualsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L892-L899
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchBiosamples
def runSearchBiosamples(self, request): """ Runs the specified SearchBiosamplesRequest. """ return self.runSearchRequest( request, protocol.SearchBiosamplesRequest, protocol.SearchBiosamplesResponse, self.biosamplesGenerator)
python
def runSearchBiosamples(self, request): """ Runs the specified SearchBiosamplesRequest. """ return self.runSearchRequest( request, protocol.SearchBiosamplesRequest, protocol.SearchBiosamplesResponse, self.biosamplesGenerator)
[ "def", "runSearchBiosamples", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchBiosamplesRequest", ",", "protocol", ".", "SearchBiosamplesResponse", ",", "self", ".", "biosamplesGenerator", ")" ]
Runs the specified SearchBiosamplesRequest.
[ "Runs", "the", "specified", "SearchBiosamplesRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L901-L908
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchReads
def runSearchReads(self, request): """ Runs the specified SearchReadsRequest. """ return self.runSearchRequest( request, protocol.SearchReadsRequest, protocol.SearchReadsResponse, self.readsGenerator)
python
def runSearchReads(self, request): """ Runs the specified SearchReadsRequest. """ return self.runSearchRequest( request, protocol.SearchReadsRequest, protocol.SearchReadsResponse, self.readsGenerator)
[ "def", "runSearchReads", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchReadsRequest", ",", "protocol", ".", "SearchReadsResponse", ",", "self", ".", "readsGenerator", ")" ]
Runs the specified SearchReadsRequest.
[ "Runs", "the", "specified", "SearchReadsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L910-L917
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchReferenceSets
def runSearchReferenceSets(self, request): """ Runs the specified SearchReferenceSetsRequest. """ return self.runSearchRequest( request, protocol.SearchReferenceSetsRequest, protocol.SearchReferenceSetsResponse, self.referenceSetsGenerator)
python
def runSearchReferenceSets(self, request): """ Runs the specified SearchReferenceSetsRequest. """ return self.runSearchRequest( request, protocol.SearchReferenceSetsRequest, protocol.SearchReferenceSetsResponse, self.referenceSetsGenerator)
[ "def", "runSearchReferenceSets", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchReferenceSetsRequest", ",", "protocol", ".", "SearchReferenceSetsResponse", ",", "self", ".", "referenceSetsGenerator", ")" ]
Runs the specified SearchReferenceSetsRequest.
[ "Runs", "the", "specified", "SearchReferenceSetsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L919-L926
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchReferences
def runSearchReferences(self, request): """ Runs the specified SearchReferenceRequest. """ return self.runSearchRequest( request, protocol.SearchReferencesRequest, protocol.SearchReferencesResponse, self.referencesGenerator)
python
def runSearchReferences(self, request): """ Runs the specified SearchReferenceRequest. """ return self.runSearchRequest( request, protocol.SearchReferencesRequest, protocol.SearchReferencesResponse, self.referencesGenerator)
[ "def", "runSearchReferences", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchReferencesRequest", ",", "protocol", ".", "SearchReferencesResponse", ",", "self", ".", "referencesGenerator", ")" ]
Runs the specified SearchReferenceRequest.
[ "Runs", "the", "specified", "SearchReferenceRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L928-L935
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchVariantSets
def runSearchVariantSets(self, request): """ Runs the specified SearchVariantSetsRequest. """ return self.runSearchRequest( request, protocol.SearchVariantSetsRequest, protocol.SearchVariantSetsResponse, self.variantSetsGenerator)
python
def runSearchVariantSets(self, request): """ Runs the specified SearchVariantSetsRequest. """ return self.runSearchRequest( request, protocol.SearchVariantSetsRequest, protocol.SearchVariantSetsResponse, self.variantSetsGenerator)
[ "def", "runSearchVariantSets", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchVariantSetsRequest", ",", "protocol", ".", "SearchVariantSetsResponse", ",", "self", ".", "variantSetsGenerator", ")" ]
Runs the specified SearchVariantSetsRequest.
[ "Runs", "the", "specified", "SearchVariantSetsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L937-L944
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchVariantAnnotationSets
def runSearchVariantAnnotationSets(self, request): """ Runs the specified SearchVariantAnnotationSetsRequest. """ return self.runSearchRequest( request, protocol.SearchVariantAnnotationSetsRequest, protocol.SearchVariantAnnotationSetsResponse, self.variantAnnotationSetsGenerator)
python
def runSearchVariantAnnotationSets(self, request): """ Runs the specified SearchVariantAnnotationSetsRequest. """ return self.runSearchRequest( request, protocol.SearchVariantAnnotationSetsRequest, protocol.SearchVariantAnnotationSetsResponse, self.variantAnnotationSetsGenerator)
[ "def", "runSearchVariantAnnotationSets", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchVariantAnnotationSetsRequest", ",", "protocol", ".", "SearchVariantAnnotationSetsResponse", ",", "self", ".", "variantAnnotationSetsGenerator", ")" ]
Runs the specified SearchVariantAnnotationSetsRequest.
[ "Runs", "the", "specified", "SearchVariantAnnotationSetsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L946-L953
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchVariants
def runSearchVariants(self, request): """ Runs the specified SearchVariantRequest. """ return self.runSearchRequest( request, protocol.SearchVariantsRequest, protocol.SearchVariantsResponse, self.variantsGenerator)
python
def runSearchVariants(self, request): """ Runs the specified SearchVariantRequest. """ return self.runSearchRequest( request, protocol.SearchVariantsRequest, protocol.SearchVariantsResponse, self.variantsGenerator)
[ "def", "runSearchVariants", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchVariantsRequest", ",", "protocol", ".", "SearchVariantsResponse", ",", "self", ".", "variantsGenerator", ")" ]
Runs the specified SearchVariantRequest.
[ "Runs", "the", "specified", "SearchVariantRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L955-L962
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchVariantAnnotations
def runSearchVariantAnnotations(self, request): """ Runs the specified SearchVariantAnnotationsRequest. """ return self.runSearchRequest( request, protocol.SearchVariantAnnotationsRequest, protocol.SearchVariantAnnotationsResponse, self.variantAnnotationsGenerator)
python
def runSearchVariantAnnotations(self, request): """ Runs the specified SearchVariantAnnotationsRequest. """ return self.runSearchRequest( request, protocol.SearchVariantAnnotationsRequest, protocol.SearchVariantAnnotationsResponse, self.variantAnnotationsGenerator)
[ "def", "runSearchVariantAnnotations", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchVariantAnnotationsRequest", ",", "protocol", ".", "SearchVariantAnnotationsResponse", ",", "self", ".", "variantAnnotationsGenerator", ")" ]
Runs the specified SearchVariantAnnotationsRequest.
[ "Runs", "the", "specified", "SearchVariantAnnotationsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L964-L971
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchCallSets
def runSearchCallSets(self, request): """ Runs the specified SearchCallSetsRequest. """ return self.runSearchRequest( request, protocol.SearchCallSetsRequest, protocol.SearchCallSetsResponse, self.callSetsGenerator)
python
def runSearchCallSets(self, request): """ Runs the specified SearchCallSetsRequest. """ return self.runSearchRequest( request, protocol.SearchCallSetsRequest, protocol.SearchCallSetsResponse, self.callSetsGenerator)
[ "def", "runSearchCallSets", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchCallSetsRequest", ",", "protocol", ".", "SearchCallSetsResponse", ",", "self", ".", "callSetsGenerator", ")" ]
Runs the specified SearchCallSetsRequest.
[ "Runs", "the", "specified", "SearchCallSetsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L973-L980
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchDatasets
def runSearchDatasets(self, request): """ Runs the specified SearchDatasetsRequest. """ return self.runSearchRequest( request, protocol.SearchDatasetsRequest, protocol.SearchDatasetsResponse, self.datasetsGenerator)
python
def runSearchDatasets(self, request): """ Runs the specified SearchDatasetsRequest. """ return self.runSearchRequest( request, protocol.SearchDatasetsRequest, protocol.SearchDatasetsResponse, self.datasetsGenerator)
[ "def", "runSearchDatasets", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchDatasetsRequest", ",", "protocol", ".", "SearchDatasetsResponse", ",", "self", ".", "datasetsGenerator", ")" ]
Runs the specified SearchDatasetsRequest.
[ "Runs", "the", "specified", "SearchDatasetsRequest", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L982-L989
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchFeatureSets
def runSearchFeatureSets(self, request): """ Returns a SearchFeatureSetsResponse for the specified SearchFeatureSetsRequest object. """ return self.runSearchRequest( request, protocol.SearchFeatureSetsRequest, protocol.SearchFeatureSetsResponse, self.featureSetsGenerator)
python
def runSearchFeatureSets(self, request): """ Returns a SearchFeatureSetsResponse for the specified SearchFeatureSetsRequest object. """ return self.runSearchRequest( request, protocol.SearchFeatureSetsRequest, protocol.SearchFeatureSetsResponse, self.featureSetsGenerator)
[ "def", "runSearchFeatureSets", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchFeatureSetsRequest", ",", "protocol", ".", "SearchFeatureSetsResponse", ",", "self", ".", "featureSetsGenerator", ")" ]
Returns a SearchFeatureSetsResponse for the specified SearchFeatureSetsRequest object.
[ "Returns", "a", "SearchFeatureSetsResponse", "for", "the", "specified", "SearchFeatureSetsRequest", "object", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L991-L999
ga4gh/ga4gh-server
ga4gh/server/backend.py
Backend.runSearchFeatures
def runSearchFeatures(self, request): """ Returns a SearchFeaturesResponse for the specified SearchFeaturesRequest object. :param request: JSON string representing searchFeaturesRequest :return: JSON string representing searchFeatureResponse """ return self.runSearchRequest( request, protocol.SearchFeaturesRequest, protocol.SearchFeaturesResponse, self.featuresGenerator)
python
def runSearchFeatures(self, request): """ Returns a SearchFeaturesResponse for the specified SearchFeaturesRequest object. :param request: JSON string representing searchFeaturesRequest :return: JSON string representing searchFeatureResponse """ return self.runSearchRequest( request, protocol.SearchFeaturesRequest, protocol.SearchFeaturesResponse, self.featuresGenerator)
[ "def", "runSearchFeatures", "(", "self", ",", "request", ")", ":", "return", "self", ".", "runSearchRequest", "(", "request", ",", "protocol", ".", "SearchFeaturesRequest", ",", "protocol", ".", "SearchFeaturesResponse", ",", "self", ".", "featuresGenerator", ")" ]
Returns a SearchFeaturesResponse for the specified SearchFeaturesRequest object. :param request: JSON string representing searchFeaturesRequest :return: JSON string representing searchFeatureResponse
[ "Returns", "a", "SearchFeaturesResponse", "for", "the", "specified", "SearchFeaturesRequest", "object", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/backend.py#L1001-L1012