repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
SimulatedVariantAnnotationSet.generateVariantAnnotation
def generateVariantAnnotation(self, variant): """ Generate a random variant annotation based on a given variant. This generator should be seeded with a value that is unique to the variant so that the same annotation will always be produced regardless of the order it is generated in. """ # To make this reproducible, make a seed based on this # specific variant. seed = self._randomSeed + variant.start + variant.end randomNumberGenerator = random.Random() randomNumberGenerator.seed(seed) ann = protocol.VariantAnnotation() ann.variant_annotation_set_id = str(self.getCompoundId()) ann.variant_id = variant.id ann.created = datetime.datetime.now().isoformat() + "Z" # make a transcript effect for each alternate base element # multiplied by a random integer (1,5) for base in variant.alternate_bases: ann.transcript_effects.add().CopyFrom( self.generateTranscriptEffect( variant, ann, base, randomNumberGenerator)) ann.id = self.getVariantAnnotationId(variant, ann) return ann
python
def generateVariantAnnotation(self, variant): """ Generate a random variant annotation based on a given variant. This generator should be seeded with a value that is unique to the variant so that the same annotation will always be produced regardless of the order it is generated in. """ # To make this reproducible, make a seed based on this # specific variant. seed = self._randomSeed + variant.start + variant.end randomNumberGenerator = random.Random() randomNumberGenerator.seed(seed) ann = protocol.VariantAnnotation() ann.variant_annotation_set_id = str(self.getCompoundId()) ann.variant_id = variant.id ann.created = datetime.datetime.now().isoformat() + "Z" # make a transcript effect for each alternate base element # multiplied by a random integer (1,5) for base in variant.alternate_bases: ann.transcript_effects.add().CopyFrom( self.generateTranscriptEffect( variant, ann, base, randomNumberGenerator)) ann.id = self.getVariantAnnotationId(variant, ann) return ann
[ "def", "generateVariantAnnotation", "(", "self", ",", "variant", ")", ":", "# To make this reproducible, make a seed based on this", "# specific variant.", "seed", "=", "self", ".", "_randomSeed", "+", "variant", ".", "start", "+", "variant", ".", "end", "randomNumberGenerator", "=", "random", ".", "Random", "(", ")", "randomNumberGenerator", ".", "seed", "(", "seed", ")", "ann", "=", "protocol", ".", "VariantAnnotation", "(", ")", "ann", ".", "variant_annotation_set_id", "=", "str", "(", "self", ".", "getCompoundId", "(", ")", ")", "ann", ".", "variant_id", "=", "variant", ".", "id", "ann", ".", "created", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "isoformat", "(", ")", "+", "\"Z\"", "# make a transcript effect for each alternate base element", "# multiplied by a random integer (1,5)", "for", "base", "in", "variant", ".", "alternate_bases", ":", "ann", ".", "transcript_effects", ".", "add", "(", ")", ".", "CopyFrom", "(", "self", ".", "generateTranscriptEffect", "(", "variant", ",", "ann", ",", "base", ",", "randomNumberGenerator", ")", ")", "ann", ".", "id", "=", "self", ".", "getVariantAnnotationId", "(", "variant", ",", "ann", ")", "return", "ann" ]
Generate a random variant annotation based on a given variant. This generator should be seeded with a value that is unique to the variant so that the same annotation will always be produced regardless of the order it is generated in.
[ "Generate", "a", "random", "variant", "annotation", "based", "on", "a", "given", "variant", ".", "This", "generator", "should", "be", "seeded", "with", "a", "value", "that", "is", "unique", "to", "the", "variant", "so", "that", "the", "same", "annotation", "will", "always", "be", "produced", "regardless", "of", "the", "order", "it", "is", "generated", "in", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L985-L1008
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.populateFromRow
def populateFromRow(self, annotationSetRecord): """ Populates this VariantAnnotationSet from the specified DB row. """ self._annotationType = annotationSetRecord.annotationtype self._analysis = protocol.fromJson( annotationSetRecord.analysis, protocol.Analysis) self._creationTime = annotationSetRecord.created self._updatedTime = annotationSetRecord.updated self.setAttributesJson(annotationSetRecord.attributes)
python
def populateFromRow(self, annotationSetRecord): """ Populates this VariantAnnotationSet from the specified DB row. """ self._annotationType = annotationSetRecord.annotationtype self._analysis = protocol.fromJson( annotationSetRecord.analysis, protocol.Analysis) self._creationTime = annotationSetRecord.created self._updatedTime = annotationSetRecord.updated self.setAttributesJson(annotationSetRecord.attributes)
[ "def", "populateFromRow", "(", "self", ",", "annotationSetRecord", ")", ":", "self", ".", "_annotationType", "=", "annotationSetRecord", ".", "annotationtype", "self", ".", "_analysis", "=", "protocol", ".", "fromJson", "(", "annotationSetRecord", ".", "analysis", ",", "protocol", ".", "Analysis", ")", "self", ".", "_creationTime", "=", "annotationSetRecord", ".", "created", "self", ".", "_updatedTime", "=", "annotationSetRecord", ".", "updated", "self", ".", "setAttributesJson", "(", "annotationSetRecord", ".", "attributes", ")" ]
Populates this VariantAnnotationSet from the specified DB row.
[ "Populates", "this", "VariantAnnotationSet", "from", "the", "specified", "DB", "row", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1109-L1118
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet._getAnnotationAnalysis
def _getAnnotationAnalysis(self, varFile): """ Assembles metadata within the VCF header into a GA4GH Analysis object. :return: protocol.Analysis """ header = varFile.header analysis = protocol.Analysis() formats = header.formats.items() infos = header.info.items() filters = header.filters.items() for prefix, content in [("FORMAT", formats), ("INFO", infos), ("FILTER", filters)]: for contentKey, value in content: key = "{0}.{1}".format(prefix, value.name) if key not in analysis.attributes.attr: analysis.attributes.attr[key].Clear() if value.description is not None: analysis.attributes.attr[ key].values.add().string_value = value.description analysis.created = self._creationTime analysis.updated = self._updatedTime for r in header.records: # Don't add a key to info if there's nothing in the value if r.value is not None: if r.key not in analysis.attributes.attr: analysis.attributes.attr[r.key].Clear() analysis.attributes.attr[r.key] \ .values.add().string_value = str(r.value) if r.key == "created" or r.key == "fileDate": # TODO handle more date formats try: if '-' in r.value: fmtStr = "%Y-%m-%d" else: fmtStr = "%Y%m%d" analysis.created = datetime.datetime.strptime( r.value, fmtStr).isoformat() + "Z" except ValueError: # is there a logger we should tell? # print("INFO: Could not parse variant annotation time") pass # analysis.create_date_time remains datetime.now() if r.key == "software": analysis.software.append(r.value) if r.key == "name": analysis.name = r.value if r.key == "description": analysis.description = r.value analysis.id = str(datamodel.VariantAnnotationSetAnalysisCompoundId( self._compoundId, "analysis")) return analysis
python
def _getAnnotationAnalysis(self, varFile): """ Assembles metadata within the VCF header into a GA4GH Analysis object. :return: protocol.Analysis """ header = varFile.header analysis = protocol.Analysis() formats = header.formats.items() infos = header.info.items() filters = header.filters.items() for prefix, content in [("FORMAT", formats), ("INFO", infos), ("FILTER", filters)]: for contentKey, value in content: key = "{0}.{1}".format(prefix, value.name) if key not in analysis.attributes.attr: analysis.attributes.attr[key].Clear() if value.description is not None: analysis.attributes.attr[ key].values.add().string_value = value.description analysis.created = self._creationTime analysis.updated = self._updatedTime for r in header.records: # Don't add a key to info if there's nothing in the value if r.value is not None: if r.key not in analysis.attributes.attr: analysis.attributes.attr[r.key].Clear() analysis.attributes.attr[r.key] \ .values.add().string_value = str(r.value) if r.key == "created" or r.key == "fileDate": # TODO handle more date formats try: if '-' in r.value: fmtStr = "%Y-%m-%d" else: fmtStr = "%Y%m%d" analysis.created = datetime.datetime.strptime( r.value, fmtStr).isoformat() + "Z" except ValueError: # is there a logger we should tell? # print("INFO: Could not parse variant annotation time") pass # analysis.create_date_time remains datetime.now() if r.key == "software": analysis.software.append(r.value) if r.key == "name": analysis.name = r.value if r.key == "description": analysis.description = r.value analysis.id = str(datamodel.VariantAnnotationSetAnalysisCompoundId( self._compoundId, "analysis")) return analysis
[ "def", "_getAnnotationAnalysis", "(", "self", ",", "varFile", ")", ":", "header", "=", "varFile", ".", "header", "analysis", "=", "protocol", ".", "Analysis", "(", ")", "formats", "=", "header", ".", "formats", ".", "items", "(", ")", "infos", "=", "header", ".", "info", ".", "items", "(", ")", "filters", "=", "header", ".", "filters", ".", "items", "(", ")", "for", "prefix", ",", "content", "in", "[", "(", "\"FORMAT\"", ",", "formats", ")", ",", "(", "\"INFO\"", ",", "infos", ")", ",", "(", "\"FILTER\"", ",", "filters", ")", "]", ":", "for", "contentKey", ",", "value", "in", "content", ":", "key", "=", "\"{0}.{1}\"", ".", "format", "(", "prefix", ",", "value", ".", "name", ")", "if", "key", "not", "in", "analysis", ".", "attributes", ".", "attr", ":", "analysis", ".", "attributes", ".", "attr", "[", "key", "]", ".", "Clear", "(", ")", "if", "value", ".", "description", "is", "not", "None", ":", "analysis", ".", "attributes", ".", "attr", "[", "key", "]", ".", "values", ".", "add", "(", ")", ".", "string_value", "=", "value", ".", "description", "analysis", ".", "created", "=", "self", ".", "_creationTime", "analysis", ".", "updated", "=", "self", ".", "_updatedTime", "for", "r", "in", "header", ".", "records", ":", "# Don't add a key to info if there's nothing in the value", "if", "r", ".", "value", "is", "not", "None", ":", "if", "r", ".", "key", "not", "in", "analysis", ".", "attributes", ".", "attr", ":", "analysis", ".", "attributes", ".", "attr", "[", "r", ".", "key", "]", ".", "Clear", "(", ")", "analysis", ".", "attributes", ".", "attr", "[", "r", ".", "key", "]", ".", "values", ".", "add", "(", ")", ".", "string_value", "=", "str", "(", "r", ".", "value", ")", "if", "r", ".", "key", "==", "\"created\"", "or", "r", ".", "key", "==", "\"fileDate\"", ":", "# TODO handle more date formats", "try", ":", "if", "'-'", "in", "r", ".", "value", ":", "fmtStr", "=", "\"%Y-%m-%d\"", "else", ":", "fmtStr", "=", "\"%Y%m%d\"", "analysis", ".", "created", "=", "datetime", ".", "datetime", ".", "strptime", "(", "r", ".", "value", ",", "fmtStr", ")", ".", "isoformat", "(", ")", "+", "\"Z\"", "except", "ValueError", ":", "# is there a logger we should tell?", "# print(\"INFO: Could not parse variant annotation time\")", "pass", "# analysis.create_date_time remains datetime.now()", "if", "r", ".", "key", "==", "\"software\"", ":", "analysis", ".", "software", ".", "append", "(", "r", ".", "value", ")", "if", "r", ".", "key", "==", "\"name\"", ":", "analysis", ".", "name", "=", "r", ".", "value", "if", "r", ".", "key", "==", "\"description\"", ":", "analysis", ".", "description", "=", "r", ".", "value", "analysis", ".", "id", "=", "str", "(", "datamodel", ".", "VariantAnnotationSetAnalysisCompoundId", "(", "self", ".", "_compoundId", ",", "\"analysis\"", ")", ")", "return", "analysis" ]
Assembles metadata within the VCF header into a GA4GH Analysis object. :return: protocol.Analysis
[ "Assembles", "metadata", "within", "the", "VCF", "header", "into", "a", "GA4GH", "Analysis", "object", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1127-L1177
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.getVariantAnnotations
def getVariantAnnotations(self, referenceName, startPosition, endPosition): """ Generator for iterating through variant annotations in this variant annotation set. :param referenceName: :param startPosition: :param endPosition: :return: generator of protocol.VariantAnnotation """ variantIter = self._variantSet.getPysamVariants( referenceName, startPosition, endPosition) for record in variantIter: yield self.convertVariantAnnotation(record)
python
def getVariantAnnotations(self, referenceName, startPosition, endPosition): """ Generator for iterating through variant annotations in this variant annotation set. :param referenceName: :param startPosition: :param endPosition: :return: generator of protocol.VariantAnnotation """ variantIter = self._variantSet.getPysamVariants( referenceName, startPosition, endPosition) for record in variantIter: yield self.convertVariantAnnotation(record)
[ "def", "getVariantAnnotations", "(", "self", ",", "referenceName", ",", "startPosition", ",", "endPosition", ")", ":", "variantIter", "=", "self", ".", "_variantSet", ".", "getPysamVariants", "(", "referenceName", ",", "startPosition", ",", "endPosition", ")", "for", "record", "in", "variantIter", ":", "yield", "self", ".", "convertVariantAnnotation", "(", "record", ")" ]
Generator for iterating through variant annotations in this variant annotation set. :param referenceName: :param startPosition: :param endPosition: :return: generator of protocol.VariantAnnotation
[ "Generator", "for", "iterating", "through", "variant", "annotations", "in", "this", "variant", "annotation", "set", ".", ":", "param", "referenceName", ":", ":", "param", "startPosition", ":", ":", "param", "endPosition", ":", ":", "return", ":", "generator", "of", "protocol", ".", "VariantAnnotation" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1179-L1191
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.convertLocation
def convertLocation(self, pos): """ Accepts a position string (start/length) and returns a GA4GH AlleleLocation with populated fields. :param pos: :return: protocol.AlleleLocation """ if isUnspecified(pos): return None coordLen = pos.split('/') if len(coordLen) > 1: allLoc = self._createGaAlleleLocation() allLoc.start = int(coordLen[0]) - 1 return allLoc return None
python
def convertLocation(self, pos): """ Accepts a position string (start/length) and returns a GA4GH AlleleLocation with populated fields. :param pos: :return: protocol.AlleleLocation """ if isUnspecified(pos): return None coordLen = pos.split('/') if len(coordLen) > 1: allLoc = self._createGaAlleleLocation() allLoc.start = int(coordLen[0]) - 1 return allLoc return None
[ "def", "convertLocation", "(", "self", ",", "pos", ")", ":", "if", "isUnspecified", "(", "pos", ")", ":", "return", "None", "coordLen", "=", "pos", ".", "split", "(", "'/'", ")", "if", "len", "(", "coordLen", ")", ">", "1", ":", "allLoc", "=", "self", ".", "_createGaAlleleLocation", "(", ")", "allLoc", ".", "start", "=", "int", "(", "coordLen", "[", "0", "]", ")", "-", "1", "return", "allLoc", "return", "None" ]
Accepts a position string (start/length) and returns a GA4GH AlleleLocation with populated fields. :param pos: :return: protocol.AlleleLocation
[ "Accepts", "a", "position", "string", "(", "start", "/", "length", ")", "and", "returns", "a", "GA4GH", "AlleleLocation", "with", "populated", "fields", ".", ":", "param", "pos", ":", ":", "return", ":", "protocol", ".", "AlleleLocation" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1193-L1207
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.convertLocationHgvsC
def convertLocationHgvsC(self, hgvsc): """ Accepts an annotation in HGVS notation and returns an AlleleLocation with populated fields. :param hgvsc: :return: """ if isUnspecified(hgvsc): return None match = re.match(".*c.(\d+)(\D+)>(\D+)", hgvsc) if match: pos = int(match.group(1)) if pos > 0: allLoc = self._createGaAlleleLocation() allLoc.start = pos - 1 allLoc.reference_sequence = match.group(2) allLoc.alternate_sequence = match.group(3) return allLoc return None
python
def convertLocationHgvsC(self, hgvsc): """ Accepts an annotation in HGVS notation and returns an AlleleLocation with populated fields. :param hgvsc: :return: """ if isUnspecified(hgvsc): return None match = re.match(".*c.(\d+)(\D+)>(\D+)", hgvsc) if match: pos = int(match.group(1)) if pos > 0: allLoc = self._createGaAlleleLocation() allLoc.start = pos - 1 allLoc.reference_sequence = match.group(2) allLoc.alternate_sequence = match.group(3) return allLoc return None
[ "def", "convertLocationHgvsC", "(", "self", ",", "hgvsc", ")", ":", "if", "isUnspecified", "(", "hgvsc", ")", ":", "return", "None", "match", "=", "re", ".", "match", "(", "\".*c.(\\d+)(\\D+)>(\\D+)\"", ",", "hgvsc", ")", "if", "match", ":", "pos", "=", "int", "(", "match", ".", "group", "(", "1", ")", ")", "if", "pos", ">", "0", ":", "allLoc", "=", "self", ".", "_createGaAlleleLocation", "(", ")", "allLoc", ".", "start", "=", "pos", "-", "1", "allLoc", ".", "reference_sequence", "=", "match", ".", "group", "(", "2", ")", "allLoc", ".", "alternate_sequence", "=", "match", ".", "group", "(", "3", ")", "return", "allLoc", "return", "None" ]
Accepts an annotation in HGVS notation and returns an AlleleLocation with populated fields. :param hgvsc: :return:
[ "Accepts", "an", "annotation", "in", "HGVS", "notation", "and", "returns", "an", "AlleleLocation", "with", "populated", "fields", ".", ":", "param", "hgvsc", ":", ":", "return", ":" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1209-L1227
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.convertLocationHgvsP
def convertLocationHgvsP(self, hgvsp): """ Accepts an annotation in HGVS notation and returns an AlleleLocation with populated fields. :param hgvsp: :return: protocol.AlleleLocation """ if isUnspecified(hgvsp): return None match = re.match(".*p.(\D+)(\d+)(\D+)", hgvsp, flags=re.UNICODE) if match is not None: allLoc = self._createGaAlleleLocation() allLoc.reference_sequence = match.group(1) allLoc.start = int(match.group(2)) - 1 allLoc.alternate_sequence = match.group(3) return allLoc return None
python
def convertLocationHgvsP(self, hgvsp): """ Accepts an annotation in HGVS notation and returns an AlleleLocation with populated fields. :param hgvsp: :return: protocol.AlleleLocation """ if isUnspecified(hgvsp): return None match = re.match(".*p.(\D+)(\d+)(\D+)", hgvsp, flags=re.UNICODE) if match is not None: allLoc = self._createGaAlleleLocation() allLoc.reference_sequence = match.group(1) allLoc.start = int(match.group(2)) - 1 allLoc.alternate_sequence = match.group(3) return allLoc return None
[ "def", "convertLocationHgvsP", "(", "self", ",", "hgvsp", ")", ":", "if", "isUnspecified", "(", "hgvsp", ")", ":", "return", "None", "match", "=", "re", ".", "match", "(", "\".*p.(\\D+)(\\d+)(\\D+)\"", ",", "hgvsp", ",", "flags", "=", "re", ".", "UNICODE", ")", "if", "match", "is", "not", "None", ":", "allLoc", "=", "self", ".", "_createGaAlleleLocation", "(", ")", "allLoc", ".", "reference_sequence", "=", "match", ".", "group", "(", "1", ")", "allLoc", ".", "start", "=", "int", "(", "match", ".", "group", "(", "2", ")", ")", "-", "1", "allLoc", ".", "alternate_sequence", "=", "match", ".", "group", "(", "3", ")", "return", "allLoc", "return", "None" ]
Accepts an annotation in HGVS notation and returns an AlleleLocation with populated fields. :param hgvsp: :return: protocol.AlleleLocation
[ "Accepts", "an", "annotation", "in", "HGVS", "notation", "and", "returns", "an", "AlleleLocation", "with", "populated", "fields", ".", ":", "param", "hgvsp", ":", ":", "return", ":", "protocol", ".", "AlleleLocation" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1229-L1245
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.addLocations
def addLocations(self, effect, protPos, cdnaPos): """ Adds locations to a GA4GH transcript effect object by parsing HGVS annotation fields in concert with and supplied position values. :param effect: protocol.TranscriptEffect :param protPos: String representing protein position from VCF :param cdnaPos: String representing coding DNA location :return: effect protocol.TranscriptEffect """ self.addCDSLocation(effect, cdnaPos) self.addCDNALocation(effect, cdnaPos) self.addProteinLocation(effect, protPos) return effect
python
def addLocations(self, effect, protPos, cdnaPos): """ Adds locations to a GA4GH transcript effect object by parsing HGVS annotation fields in concert with and supplied position values. :param effect: protocol.TranscriptEffect :param protPos: String representing protein position from VCF :param cdnaPos: String representing coding DNA location :return: effect protocol.TranscriptEffect """ self.addCDSLocation(effect, cdnaPos) self.addCDNALocation(effect, cdnaPos) self.addProteinLocation(effect, protPos) return effect
[ "def", "addLocations", "(", "self", ",", "effect", ",", "protPos", ",", "cdnaPos", ")", ":", "self", ".", "addCDSLocation", "(", "effect", ",", "cdnaPos", ")", "self", ".", "addCDNALocation", "(", "effect", ",", "cdnaPos", ")", "self", ".", "addProteinLocation", "(", "effect", ",", "protPos", ")", "return", "effect" ]
Adds locations to a GA4GH transcript effect object by parsing HGVS annotation fields in concert with and supplied position values. :param effect: protocol.TranscriptEffect :param protPos: String representing protein position from VCF :param cdnaPos: String representing coding DNA location :return: effect protocol.TranscriptEffect
[ "Adds", "locations", "to", "a", "GA4GH", "transcript", "effect", "object", "by", "parsing", "HGVS", "annotation", "fields", "in", "concert", "with", "and", "supplied", "position", "values", ".", ":", "param", "effect", ":", "protocol", ".", "TranscriptEffect", ":", "param", "protPos", ":", "String", "representing", "protein", "position", "from", "VCF", ":", "param", "cdnaPos", ":", "String", "representing", "coding", "DNA", "location", ":", "return", ":", "effect", "protocol", ".", "TranscriptEffect" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1282-L1295
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.convertTranscriptEffect
def convertTranscriptEffect(self, annStr, hgvsG): """ Takes the ANN string of a SnpEff generated VCF, splits it and returns a populated GA4GH transcript effect object. :param annStr: String :param hgvsG: String :return: effect protocol.TranscriptEffect() """ effect = self._createGaTranscriptEffect() effect.hgvs_annotation.CopyFrom(protocol.HGVSAnnotation()) annDict = dict() if self._annotationType == ANNOTATIONS_SNPEFF: annDict = dict(zip(self. SNPEFF_FIELDS, annStr.split("|"))) elif self._annotationType == ANNOTATIONS_VEP_V82: annDict = dict(zip(self.VEP_FIELDS, annStr.split("|"))) else: annDict = dict(zip(self.CSQ_FIELDS, annStr.split("|"))) annDict["hgvs_annotation.genomic"] = hgvsG if hgvsG else u'' for key, val in annDict.items(): try: protocol.deepSetAttr(effect, key, val) except AttributeError: if val and key not in self.EXCLUDED_FIELDS: protocol.setAttribute( effect.attributes.attr[key].values, val) effect.effects.extend(self.convertSeqOntology(annDict.get('effects'))) self.addLocations( effect, annDict.get('protPos'), annDict.get('cdnaPos')) effect.id = self.getTranscriptEffectId(effect) return effect
python
def convertTranscriptEffect(self, annStr, hgvsG): """ Takes the ANN string of a SnpEff generated VCF, splits it and returns a populated GA4GH transcript effect object. :param annStr: String :param hgvsG: String :return: effect protocol.TranscriptEffect() """ effect = self._createGaTranscriptEffect() effect.hgvs_annotation.CopyFrom(protocol.HGVSAnnotation()) annDict = dict() if self._annotationType == ANNOTATIONS_SNPEFF: annDict = dict(zip(self. SNPEFF_FIELDS, annStr.split("|"))) elif self._annotationType == ANNOTATIONS_VEP_V82: annDict = dict(zip(self.VEP_FIELDS, annStr.split("|"))) else: annDict = dict(zip(self.CSQ_FIELDS, annStr.split("|"))) annDict["hgvs_annotation.genomic"] = hgvsG if hgvsG else u'' for key, val in annDict.items(): try: protocol.deepSetAttr(effect, key, val) except AttributeError: if val and key not in self.EXCLUDED_FIELDS: protocol.setAttribute( effect.attributes.attr[key].values, val) effect.effects.extend(self.convertSeqOntology(annDict.get('effects'))) self.addLocations( effect, annDict.get('protPos'), annDict.get('cdnaPos')) effect.id = self.getTranscriptEffectId(effect) return effect
[ "def", "convertTranscriptEffect", "(", "self", ",", "annStr", ",", "hgvsG", ")", ":", "effect", "=", "self", ".", "_createGaTranscriptEffect", "(", ")", "effect", ".", "hgvs_annotation", ".", "CopyFrom", "(", "protocol", ".", "HGVSAnnotation", "(", ")", ")", "annDict", "=", "dict", "(", ")", "if", "self", ".", "_annotationType", "==", "ANNOTATIONS_SNPEFF", ":", "annDict", "=", "dict", "(", "zip", "(", "self", ".", "SNPEFF_FIELDS", ",", "annStr", ".", "split", "(", "\"|\"", ")", ")", ")", "elif", "self", ".", "_annotationType", "==", "ANNOTATIONS_VEP_V82", ":", "annDict", "=", "dict", "(", "zip", "(", "self", ".", "VEP_FIELDS", ",", "annStr", ".", "split", "(", "\"|\"", ")", ")", ")", "else", ":", "annDict", "=", "dict", "(", "zip", "(", "self", ".", "CSQ_FIELDS", ",", "annStr", ".", "split", "(", "\"|\"", ")", ")", ")", "annDict", "[", "\"hgvs_annotation.genomic\"", "]", "=", "hgvsG", "if", "hgvsG", "else", "u''", "for", "key", ",", "val", "in", "annDict", ".", "items", "(", ")", ":", "try", ":", "protocol", ".", "deepSetAttr", "(", "effect", ",", "key", ",", "val", ")", "except", "AttributeError", ":", "if", "val", "and", "key", "not", "in", "self", ".", "EXCLUDED_FIELDS", ":", "protocol", ".", "setAttribute", "(", "effect", ".", "attributes", ".", "attr", "[", "key", "]", ".", "values", ",", "val", ")", "effect", ".", "effects", ".", "extend", "(", "self", ".", "convertSeqOntology", "(", "annDict", ".", "get", "(", "'effects'", ")", ")", ")", "self", ".", "addLocations", "(", "effect", ",", "annDict", ".", "get", "(", "'protPos'", ")", ",", "annDict", ".", "get", "(", "'cdnaPos'", ")", ")", "effect", ".", "id", "=", "self", ".", "getTranscriptEffectId", "(", "effect", ")", "return", "effect" ]
Takes the ANN string of a SnpEff generated VCF, splits it and returns a populated GA4GH transcript effect object. :param annStr: String :param hgvsG: String :return: effect protocol.TranscriptEffect()
[ "Takes", "the", "ANN", "string", "of", "a", "SnpEff", "generated", "VCF", "splits", "it", "and", "returns", "a", "populated", "GA4GH", "transcript", "effect", "object", ".", ":", "param", "annStr", ":", "String", ":", "param", "hgvsG", ":", "String", ":", "return", ":", "effect", "protocol", ".", "TranscriptEffect", "()" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1297-L1326
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.convertSeqOntology
def convertSeqOntology(self, seqOntStr): """ Splits a string of sequence ontology effects and creates an ontology term record for each, which are built into an array of return soTerms. :param seqOntStr: :return: [protocol.OntologyTerm] """ return [ self._ontology.getGaTermByName(soName) for soName in seqOntStr.split('&')]
python
def convertSeqOntology(self, seqOntStr): """ Splits a string of sequence ontology effects and creates an ontology term record for each, which are built into an array of return soTerms. :param seqOntStr: :return: [protocol.OntologyTerm] """ return [ self._ontology.getGaTermByName(soName) for soName in seqOntStr.split('&')]
[ "def", "convertSeqOntology", "(", "self", ",", "seqOntStr", ")", ":", "return", "[", "self", ".", "_ontology", ".", "getGaTermByName", "(", "soName", ")", "for", "soName", "in", "seqOntStr", ".", "split", "(", "'&'", ")", "]" ]
Splits a string of sequence ontology effects and creates an ontology term record for each, which are built into an array of return soTerms. :param seqOntStr: :return: [protocol.OntologyTerm]
[ "Splits", "a", "string", "of", "sequence", "ontology", "effects", "and", "creates", "an", "ontology", "term", "record", "for", "each", "which", "are", "built", "into", "an", "array", "of", "return", "soTerms", ".", ":", "param", "seqOntStr", ":", ":", "return", ":", "[", "protocol", ".", "OntologyTerm", "]" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1328-L1338
ga4gh/ga4gh-server
ga4gh/server/datamodel/variants.py
HtslibVariantAnnotationSet.convertVariantAnnotation
def convertVariantAnnotation(self, record): """ Converts the specfied pysam variant record into a GA4GH variant annotation object using the specified function to convert the transcripts. """ variant = self._variantSet.convertVariant(record, []) annotation = self._createGaVariantAnnotation() annotation.variant_id = variant.id gDots = record.info.get(b'HGVS.g') # Convert annotations from INFO field into TranscriptEffect transcriptEffects = [] annotations = record.info.get(b'ANN') or record.info.get(b'CSQ') for i, ann in enumerate(annotations): hgvsG = gDots[i % len(variant.alternate_bases)] if gDots else None transcriptEffects.append(self.convertTranscriptEffect(ann, hgvsG)) annotation.transcript_effects.extend(transcriptEffects) annotation.id = self.getVariantAnnotationId(variant, annotation) return variant, annotation
python
def convertVariantAnnotation(self, record): """ Converts the specfied pysam variant record into a GA4GH variant annotation object using the specified function to convert the transcripts. """ variant = self._variantSet.convertVariant(record, []) annotation = self._createGaVariantAnnotation() annotation.variant_id = variant.id gDots = record.info.get(b'HGVS.g') # Convert annotations from INFO field into TranscriptEffect transcriptEffects = [] annotations = record.info.get(b'ANN') or record.info.get(b'CSQ') for i, ann in enumerate(annotations): hgvsG = gDots[i % len(variant.alternate_bases)] if gDots else None transcriptEffects.append(self.convertTranscriptEffect(ann, hgvsG)) annotation.transcript_effects.extend(transcriptEffects) annotation.id = self.getVariantAnnotationId(variant, annotation) return variant, annotation
[ "def", "convertVariantAnnotation", "(", "self", ",", "record", ")", ":", "variant", "=", "self", ".", "_variantSet", ".", "convertVariant", "(", "record", ",", "[", "]", ")", "annotation", "=", "self", ".", "_createGaVariantAnnotation", "(", ")", "annotation", ".", "variant_id", "=", "variant", ".", "id", "gDots", "=", "record", ".", "info", ".", "get", "(", "b'HGVS.g'", ")", "# Convert annotations from INFO field into TranscriptEffect", "transcriptEffects", "=", "[", "]", "annotations", "=", "record", ".", "info", ".", "get", "(", "b'ANN'", ")", "or", "record", ".", "info", ".", "get", "(", "b'CSQ'", ")", "for", "i", ",", "ann", "in", "enumerate", "(", "annotations", ")", ":", "hgvsG", "=", "gDots", "[", "i", "%", "len", "(", "variant", ".", "alternate_bases", ")", "]", "if", "gDots", "else", "None", "transcriptEffects", ".", "append", "(", "self", ".", "convertTranscriptEffect", "(", "ann", ",", "hgvsG", ")", ")", "annotation", ".", "transcript_effects", ".", "extend", "(", "transcriptEffects", ")", "annotation", ".", "id", "=", "self", ".", "getVariantAnnotationId", "(", "variant", ",", "annotation", ")", "return", "variant", ",", "annotation" ]
Converts the specfied pysam variant record into a GA4GH variant annotation object using the specified function to convert the transcripts.
[ "Converts", "the", "specfied", "pysam", "variant", "record", "into", "a", "GA4GH", "variant", "annotation", "object", "using", "the", "specified", "function", "to", "convert", "the", "transcripts", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/variants.py#L1340-L1358
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Feature._attributeStr
def _attributeStr(self, name): """ Return name=value for a single attribute """ return "{}={}".format( _encodeAttr(name), ",".join([_encodeAttr(v) for v in self.attributes[name]]))
python
def _attributeStr(self, name): """ Return name=value for a single attribute """ return "{}={}".format( _encodeAttr(name), ",".join([_encodeAttr(v) for v in self.attributes[name]]))
[ "def", "_attributeStr", "(", "self", ",", "name", ")", ":", "return", "\"{}={}\"", ".", "format", "(", "_encodeAttr", "(", "name", ")", ",", "\",\"", ".", "join", "(", "[", "_encodeAttr", "(", "v", ")", "for", "v", "in", "self", ".", "attributes", "[", "name", "]", "]", ")", ")" ]
Return name=value for a single attribute
[ "Return", "name", "=", "value", "for", "a", "single", "attribute" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L98-L104
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Feature._attributeStrs
def _attributeStrs(self): """ Return name=value, semi-colon-separated string for attributes, including url-style quoting """ return ";".join([self._attributeStr(name) for name in self.attributes.iterkeys()])
python
def _attributeStrs(self): """ Return name=value, semi-colon-separated string for attributes, including url-style quoting """ return ";".join([self._attributeStr(name) for name in self.attributes.iterkeys()])
[ "def", "_attributeStrs", "(", "self", ")", ":", "return", "\";\"", ".", "join", "(", "[", "self", ".", "_attributeStr", "(", "name", ")", "for", "name", "in", "self", ".", "attributes", ".", "iterkeys", "(", ")", "]", ")" ]
Return name=value, semi-colon-separated string for attributes, including url-style quoting
[ "Return", "name", "=", "value", "semi", "-", "colon", "-", "separated", "string", "for", "attributes", "including", "url", "-", "style", "quoting" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L106-L112
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Feature.featureName
def featureName(self): """ ID attribute from GFF3 or None if record doesn't have it. Called "Name" rather than "Id" within GA4GH, as there is no guarantee of either uniqueness or existence. """ featId = self.attributes.get("ID") if featId is not None: featId = featId[0] return featId
python
def featureName(self): """ ID attribute from GFF3 or None if record doesn't have it. Called "Name" rather than "Id" within GA4GH, as there is no guarantee of either uniqueness or existence. """ featId = self.attributes.get("ID") if featId is not None: featId = featId[0] return featId
[ "def", "featureName", "(", "self", ")", ":", "featId", "=", "self", ".", "attributes", ".", "get", "(", "\"ID\"", ")", "if", "featId", "is", "not", "None", ":", "featId", "=", "featId", "[", "0", "]", "return", "featId" ]
ID attribute from GFF3 or None if record doesn't have it. Called "Name" rather than "Id" within GA4GH, as there is no guarantee of either uniqueness or existence.
[ "ID", "attribute", "from", "GFF3", "or", "None", "if", "record", "doesn", "t", "have", "it", ".", "Called", "Name", "rather", "than", "Id", "within", "GA4GH", "as", "there", "is", "no", "guarantee", "of", "either", "uniqueness", "or", "existence", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L126-L135
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Set._linkFeature
def _linkFeature(self, feature): """ Link a feature with its parents. """ parentNames = feature.attributes.get("Parent") if parentNames is None: self.roots.add(feature) else: for parentName in parentNames: self._linkToParent(feature, parentName)
python
def _linkFeature(self, feature): """ Link a feature with its parents. """ parentNames = feature.attributes.get("Parent") if parentNames is None: self.roots.add(feature) else: for parentName in parentNames: self._linkToParent(feature, parentName)
[ "def", "_linkFeature", "(", "self", ",", "feature", ")", ":", "parentNames", "=", "feature", ".", "attributes", ".", "get", "(", "\"Parent\"", ")", "if", "parentNames", "is", "None", ":", "self", ".", "roots", ".", "add", "(", "feature", ")", "else", ":", "for", "parentName", "in", "parentNames", ":", "self", ".", "_linkToParent", "(", "feature", ",", "parentName", ")" ]
Link a feature with its parents.
[ "Link", "a", "feature", "with", "its", "parents", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L165-L174
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Set._linkToParent
def _linkToParent(self, feature, parentName): """ Link a feature with its children """ parentParts = self.byFeatureName.get(parentName) if parentParts is None: raise GFF3Exception( "Parent feature does not exist: {}".format(parentName), self.fileName) # parent maybe disjoint for parentPart in parentParts: feature.parents.add(parentPart) parentPart.children.add(feature)
python
def _linkToParent(self, feature, parentName): """ Link a feature with its children """ parentParts = self.byFeatureName.get(parentName) if parentParts is None: raise GFF3Exception( "Parent feature does not exist: {}".format(parentName), self.fileName) # parent maybe disjoint for parentPart in parentParts: feature.parents.add(parentPart) parentPart.children.add(feature)
[ "def", "_linkToParent", "(", "self", ",", "feature", ",", "parentName", ")", ":", "parentParts", "=", "self", ".", "byFeatureName", ".", "get", "(", "parentName", ")", "if", "parentParts", "is", "None", ":", "raise", "GFF3Exception", "(", "\"Parent feature does not exist: {}\"", ".", "format", "(", "parentName", ")", ",", "self", ".", "fileName", ")", "# parent maybe disjoint", "for", "parentPart", "in", "parentParts", ":", "feature", ".", "parents", ".", "add", "(", "parentPart", ")", "parentPart", ".", "children", ".", "add", "(", "feature", ")" ]
Link a feature with its children
[ "Link", "a", "feature", "with", "its", "children" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L176-L188
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Set.linkChildFeaturesToParents
def linkChildFeaturesToParents(self): """ finish loading the set, constructing the tree """ # features maybe disjoint for featureParts in self.byFeatureName.itervalues(): for feature in featureParts: self._linkFeature(feature)
python
def linkChildFeaturesToParents(self): """ finish loading the set, constructing the tree """ # features maybe disjoint for featureParts in self.byFeatureName.itervalues(): for feature in featureParts: self._linkFeature(feature)
[ "def", "linkChildFeaturesToParents", "(", "self", ")", ":", "# features maybe disjoint", "for", "featureParts", "in", "self", ".", "byFeatureName", ".", "itervalues", "(", ")", ":", "for", "feature", "in", "featureParts", ":", "self", ".", "_linkFeature", "(", "feature", ")" ]
finish loading the set, constructing the tree
[ "finish", "loading", "the", "set", "constructing", "the", "tree" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L190-L197
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Set._recSortKey
def _recSortKey(r): """ Sort order for Features, by genomic coordinate, disambiguated by feature type (alphabetically). """ return r.seqname, r.start, -r.end, r.type
python
def _recSortKey(r): """ Sort order for Features, by genomic coordinate, disambiguated by feature type (alphabetically). """ return r.seqname, r.start, -r.end, r.type
[ "def", "_recSortKey", "(", "r", ")", ":", "return", "r", ".", "seqname", ",", "r", ".", "start", ",", "-", "r", ".", "end", ",", "r", ".", "type" ]
Sort order for Features, by genomic coordinate, disambiguated by feature type (alphabetically).
[ "Sort", "order", "for", "Features", "by", "genomic", "coordinate", "disambiguated", "by", "feature", "type", "(", "alphabetically", ")", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L200-L205
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Set._writeRec
def _writeRec(self, fh, rec): """ Writes a single record to a file provided by the filehandle fh. """ fh.write(str(rec) + "\n") for child in sorted(rec.children, key=self._recSortKey): self._writeRec(fh, child)
python
def _writeRec(self, fh, rec): """ Writes a single record to a file provided by the filehandle fh. """ fh.write(str(rec) + "\n") for child in sorted(rec.children, key=self._recSortKey): self._writeRec(fh, child)
[ "def", "_writeRec", "(", "self", ",", "fh", ",", "rec", ")", ":", "fh", ".", "write", "(", "str", "(", "rec", ")", "+", "\"\\n\"", ")", "for", "child", "in", "sorted", "(", "rec", ".", "children", ",", "key", "=", "self", ".", "_recSortKey", ")", ":", "self", ".", "_writeRec", "(", "fh", ",", "child", ")" ]
Writes a single record to a file provided by the filehandle fh.
[ "Writes", "a", "single", "record", "to", "a", "file", "provided", "by", "the", "filehandle", "fh", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L207-L213
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Set.write
def write(self, fh): """ Write set to a GFF3 format file. :param file fh: file handle for file to write to """ fh.write(GFF3_HEADER+"\n") for root in sorted(self.roots, key=self._recSortKey): self._writeRec(fh, root)
python
def write(self, fh): """ Write set to a GFF3 format file. :param file fh: file handle for file to write to """ fh.write(GFF3_HEADER+"\n") for root in sorted(self.roots, key=self._recSortKey): self._writeRec(fh, root)
[ "def", "write", "(", "self", ",", "fh", ")", ":", "fh", ".", "write", "(", "GFF3_HEADER", "+", "\"\\n\"", ")", "for", "root", "in", "sorted", "(", "self", ".", "roots", ",", "key", "=", "self", ".", "_recSortKey", ")", ":", "self", ".", "_writeRec", "(", "fh", ",", "root", ")" ]
Write set to a GFF3 format file. :param file fh: file handle for file to write to
[ "Write", "set", "to", "a", "GFF3", "format", "file", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L215-L223
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Parser._open
def _open(self): """ open input file, optionally with decompression """ if self.fileName.endswith(".gz"): return gzip.open(self.fileName) elif self.fileName.endswith(".bz2"): return bz2.BZ2File(self.fileName) else: return open(self.fileName)
python
def _open(self): """ open input file, optionally with decompression """ if self.fileName.endswith(".gz"): return gzip.open(self.fileName) elif self.fileName.endswith(".bz2"): return bz2.BZ2File(self.fileName) else: return open(self.fileName)
[ "def", "_open", "(", "self", ")", ":", "if", "self", ".", "fileName", ".", "endswith", "(", "\".gz\"", ")", ":", "return", "gzip", ".", "open", "(", "self", ".", "fileName", ")", "elif", "self", ".", "fileName", ".", "endswith", "(", "\".bz2\"", ")", ":", "return", "bz2", ".", "BZ2File", "(", "self", ".", "fileName", ")", "else", ":", "return", "open", "(", "self", ".", "fileName", ")" ]
open input file, optionally with decompression
[ "open", "input", "file", "optionally", "with", "decompression" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L240-L249
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Parser._parseAttrVal
def _parseAttrVal(self, attrStr): """ Returns tuple of tuple of (attr, value), multiple are returned to handle multi-value attributes. """ m = self.SPLIT_ATTR_RE.match(attrStr) if m is None: raise GFF3Exception( "can't parse attribute/value: '" + attrStr + "'", self.fileName, self.lineNumber) name = urllib.unquote(m.group(1)) val = m.group(2) # Split by comma to separate then unquote. # Commas in values must be url encoded. return name, [urllib.unquote(v) for v in val.split(',')]
python
def _parseAttrVal(self, attrStr): """ Returns tuple of tuple of (attr, value), multiple are returned to handle multi-value attributes. """ m = self.SPLIT_ATTR_RE.match(attrStr) if m is None: raise GFF3Exception( "can't parse attribute/value: '" + attrStr + "'", self.fileName, self.lineNumber) name = urllib.unquote(m.group(1)) val = m.group(2) # Split by comma to separate then unquote. # Commas in values must be url encoded. return name, [urllib.unquote(v) for v in val.split(',')]
[ "def", "_parseAttrVal", "(", "self", ",", "attrStr", ")", ":", "m", "=", "self", ".", "SPLIT_ATTR_RE", ".", "match", "(", "attrStr", ")", "if", "m", "is", "None", ":", "raise", "GFF3Exception", "(", "\"can't parse attribute/value: '\"", "+", "attrStr", "+", "\"'\"", ",", "self", ".", "fileName", ",", "self", ".", "lineNumber", ")", "name", "=", "urllib", ".", "unquote", "(", "m", ".", "group", "(", "1", ")", ")", "val", "=", "m", ".", "group", "(", "2", ")", "# Split by comma to separate then unquote.", "# Commas in values must be url encoded.", "return", "name", ",", "[", "urllib", ".", "unquote", "(", "v", ")", "for", "v", "in", "val", ".", "split", "(", "','", ")", "]" ]
Returns tuple of tuple of (attr, value), multiple are returned to handle multi-value attributes.
[ "Returns", "tuple", "of", "tuple", "of", "(", "attr", "value", ")", "multiple", "are", "returned", "to", "handle", "multi", "-", "value", "attributes", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L254-L268
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Parser._parseAttrs
def _parseAttrs(self, attrsStr): """ Parse the attributes and values """ attributes = dict() for attrStr in self.SPLIT_ATTR_COL_RE.split(attrsStr): name, vals = self._parseAttrVal(attrStr) if name in attributes: raise GFF3Exception( "duplicated attribute name: {}".format(name), self.fileName, self.lineNumber) attributes[name] = vals return attributes
python
def _parseAttrs(self, attrsStr): """ Parse the attributes and values """ attributes = dict() for attrStr in self.SPLIT_ATTR_COL_RE.split(attrsStr): name, vals = self._parseAttrVal(attrStr) if name in attributes: raise GFF3Exception( "duplicated attribute name: {}".format(name), self.fileName, self.lineNumber) attributes[name] = vals return attributes
[ "def", "_parseAttrs", "(", "self", ",", "attrsStr", ")", ":", "attributes", "=", "dict", "(", ")", "for", "attrStr", "in", "self", ".", "SPLIT_ATTR_COL_RE", ".", "split", "(", "attrsStr", ")", ":", "name", ",", "vals", "=", "self", ".", "_parseAttrVal", "(", "attrStr", ")", "if", "name", "in", "attributes", ":", "raise", "GFF3Exception", "(", "\"duplicated attribute name: {}\"", ".", "format", "(", "name", ")", ",", "self", ".", "fileName", ",", "self", ".", "lineNumber", ")", "attributes", "[", "name", "]", "=", "vals", "return", "attributes" ]
Parse the attributes and values
[ "Parse", "the", "attributes", "and", "values" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L272-L284
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Parser._parseRecord
def _parseRecord(self, gff3Set, line): """ Parse one record. """ row = line.split("\t") if len(row) != self.GFF3_NUM_COLS: raise GFF3Exception( "Wrong number of columns, expected {}, got {}".format( self.GFF3_NUM_COLS, len(row)), self.fileName, self.lineNumber) feature = Feature( urllib.unquote(row[0]), urllib.unquote(row[1]), urllib.unquote(row[2]), int(row[3]), int(row[4]), row[5], row[6], row[7], self._parseAttrs(row[8])) gff3Set.add(feature)
python
def _parseRecord(self, gff3Set, line): """ Parse one record. """ row = line.split("\t") if len(row) != self.GFF3_NUM_COLS: raise GFF3Exception( "Wrong number of columns, expected {}, got {}".format( self.GFF3_NUM_COLS, len(row)), self.fileName, self.lineNumber) feature = Feature( urllib.unquote(row[0]), urllib.unquote(row[1]), urllib.unquote(row[2]), int(row[3]), int(row[4]), row[5], row[6], row[7], self._parseAttrs(row[8])) gff3Set.add(feature)
[ "def", "_parseRecord", "(", "self", ",", "gff3Set", ",", "line", ")", ":", "row", "=", "line", ".", "split", "(", "\"\\t\"", ")", "if", "len", "(", "row", ")", "!=", "self", ".", "GFF3_NUM_COLS", ":", "raise", "GFF3Exception", "(", "\"Wrong number of columns, expected {}, got {}\"", ".", "format", "(", "self", ".", "GFF3_NUM_COLS", ",", "len", "(", "row", ")", ")", ",", "self", ".", "fileName", ",", "self", ".", "lineNumber", ")", "feature", "=", "Feature", "(", "urllib", ".", "unquote", "(", "row", "[", "0", "]", ")", ",", "urllib", ".", "unquote", "(", "row", "[", "1", "]", ")", ",", "urllib", ".", "unquote", "(", "row", "[", "2", "]", ")", ",", "int", "(", "row", "[", "3", "]", ")", ",", "int", "(", "row", "[", "4", "]", ")", ",", "row", "[", "5", "]", ",", "row", "[", "6", "]", ",", "row", "[", "7", "]", ",", "self", ".", "_parseAttrs", "(", "row", "[", "8", "]", ")", ")", "gff3Set", ".", "add", "(", "feature", ")" ]
Parse one record.
[ "Parse", "one", "record", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L288-L305
ga4gh/ga4gh-server
ga4gh/server/gff3.py
Gff3Parser.parse
def parse(self): """ Run the parse and return the resulting Gff3Set object. """ fh = self._open() try: gff3Set = Gff3Set(self.fileName) for line in fh: self.lineNumber += 1 self._parseLine(gff3Set, line[0:-1]) finally: fh.close() gff3Set.linkChildFeaturesToParents() return gff3Set
python
def parse(self): """ Run the parse and return the resulting Gff3Set object. """ fh = self._open() try: gff3Set = Gff3Set(self.fileName) for line in fh: self.lineNumber += 1 self._parseLine(gff3Set, line[0:-1]) finally: fh.close() gff3Set.linkChildFeaturesToParents() return gff3Set
[ "def", "parse", "(", "self", ")", ":", "fh", "=", "self", ".", "_open", "(", ")", "try", ":", "gff3Set", "=", "Gff3Set", "(", "self", ".", "fileName", ")", "for", "line", "in", "fh", ":", "self", ".", "lineNumber", "+=", "1", "self", ".", "_parseLine", "(", "gff3Set", ",", "line", "[", "0", ":", "-", "1", "]", ")", "finally", ":", "fh", ".", "close", "(", ")", "gff3Set", ".", "linkChildFeaturesToParents", "(", ")", "return", "gff3Set" ]
Run the parse and return the resulting Gff3Set object.
[ "Run", "the", "parse", "and", "return", "the", "resulting", "Gff3Set", "object", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/gff3.py#L327-L340
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.addDataset
def addDataset(self, dataset): """ Adds the specified dataset to this data repository. """ id_ = dataset.getId() self._datasetIdMap[id_] = dataset self._datasetNameMap[dataset.getLocalId()] = dataset self._datasetIds.append(id_)
python
def addDataset(self, dataset): """ Adds the specified dataset to this data repository. """ id_ = dataset.getId() self._datasetIdMap[id_] = dataset self._datasetNameMap[dataset.getLocalId()] = dataset self._datasetIds.append(id_)
[ "def", "addDataset", "(", "self", ",", "dataset", ")", ":", "id_", "=", "dataset", ".", "getId", "(", ")", "self", ".", "_datasetIdMap", "[", "id_", "]", "=", "dataset", "self", ".", "_datasetNameMap", "[", "dataset", ".", "getLocalId", "(", ")", "]", "=", "dataset", "self", ".", "_datasetIds", ".", "append", "(", "id_", ")" ]
Adds the specified dataset to this data repository.
[ "Adds", "the", "specified", "dataset", "to", "this", "data", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L50-L57
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.addReferenceSet
def addReferenceSet(self, referenceSet): """ Adds the specified reference set to this data repository. """ id_ = referenceSet.getId() self._referenceSetIdMap[id_] = referenceSet self._referenceSetNameMap[referenceSet.getLocalId()] = referenceSet self._referenceSetIds.append(id_)
python
def addReferenceSet(self, referenceSet): """ Adds the specified reference set to this data repository. """ id_ = referenceSet.getId() self._referenceSetIdMap[id_] = referenceSet self._referenceSetNameMap[referenceSet.getLocalId()] = referenceSet self._referenceSetIds.append(id_)
[ "def", "addReferenceSet", "(", "self", ",", "referenceSet", ")", ":", "id_", "=", "referenceSet", ".", "getId", "(", ")", "self", ".", "_referenceSetIdMap", "[", "id_", "]", "=", "referenceSet", "self", ".", "_referenceSetNameMap", "[", "referenceSet", ".", "getLocalId", "(", ")", "]", "=", "referenceSet", "self", ".", "_referenceSetIds", ".", "append", "(", "id_", ")" ]
Adds the specified reference set to this data repository.
[ "Adds", "the", "specified", "reference", "set", "to", "this", "data", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L59-L66
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.addOntology
def addOntology(self, ontology): """ Add an ontology map to this data repository. """ self._ontologyNameMap[ontology.getName()] = ontology self._ontologyIdMap[ontology.getId()] = ontology self._ontologyIds.append(ontology.getId())
python
def addOntology(self, ontology): """ Add an ontology map to this data repository. """ self._ontologyNameMap[ontology.getName()] = ontology self._ontologyIdMap[ontology.getId()] = ontology self._ontologyIds.append(ontology.getId())
[ "def", "addOntology", "(", "self", ",", "ontology", ")", ":", "self", ".", "_ontologyNameMap", "[", "ontology", ".", "getName", "(", ")", "]", "=", "ontology", "self", ".", "_ontologyIdMap", "[", "ontology", ".", "getId", "(", ")", "]", "=", "ontology", "self", ".", "_ontologyIds", ".", "append", "(", "ontology", ".", "getId", "(", ")", ")" ]
Add an ontology map to this data repository.
[ "Add", "an", "ontology", "map", "to", "this", "data", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L68-L74
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getPeer
def getPeer(self, url): """ Select the first peer in the datarepo with the given url simulating the behavior of selecting by URL. This is only used during testing. """ peers = filter(lambda x: x.getUrl() == url, self.getPeers()) if len(peers) == 0: raise exceptions.PeerNotFoundException(url) return peers[0]
python
def getPeer(self, url): """ Select the first peer in the datarepo with the given url simulating the behavior of selecting by URL. This is only used during testing. """ peers = filter(lambda x: x.getUrl() == url, self.getPeers()) if len(peers) == 0: raise exceptions.PeerNotFoundException(url) return peers[0]
[ "def", "getPeer", "(", "self", ",", "url", ")", ":", "peers", "=", "filter", "(", "lambda", "x", ":", "x", ".", "getUrl", "(", ")", "==", "url", ",", "self", ".", "getPeers", "(", ")", ")", "if", "len", "(", "peers", ")", "==", "0", ":", "raise", "exceptions", ".", "PeerNotFoundException", "(", "url", ")", "return", "peers", "[", "0", "]" ]
Select the first peer in the datarepo with the given url simulating the behavior of selecting by URL. This is only used during testing.
[ "Select", "the", "first", "peer", "in", "the", "datarepo", "with", "the", "given", "url", "simulating", "the", "behavior", "of", "selecting", "by", "URL", ".", "This", "is", "only", "used", "during", "testing", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L89-L97
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getDataset
def getDataset(self, id_): """ Returns a dataset with the specified ID, or raises a DatasetNotFoundException if it does not exist. """ if id_ not in self._datasetIdMap: raise exceptions.DatasetNotFoundException(id_) return self._datasetIdMap[id_]
python
def getDataset(self, id_): """ Returns a dataset with the specified ID, or raises a DatasetNotFoundException if it does not exist. """ if id_ not in self._datasetIdMap: raise exceptions.DatasetNotFoundException(id_) return self._datasetIdMap[id_]
[ "def", "getDataset", "(", "self", ",", "id_", ")", ":", "if", "id_", "not", "in", "self", ".", "_datasetIdMap", ":", "raise", "exceptions", ".", "DatasetNotFoundException", "(", "id_", ")", "return", "self", ".", "_datasetIdMap", "[", "id_", "]" ]
Returns a dataset with the specified ID, or raises a DatasetNotFoundException if it does not exist.
[ "Returns", "a", "dataset", "with", "the", "specified", "ID", "or", "raises", "a", "DatasetNotFoundException", "if", "it", "does", "not", "exist", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L119-L126
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getDatasetByName
def getDatasetByName(self, name): """ Returns the dataset with the specified name. """ if name not in self._datasetNameMap: raise exceptions.DatasetNameNotFoundException(name) return self._datasetNameMap[name]
python
def getDatasetByName(self, name): """ Returns the dataset with the specified name. """ if name not in self._datasetNameMap: raise exceptions.DatasetNameNotFoundException(name) return self._datasetNameMap[name]
[ "def", "getDatasetByName", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "_datasetNameMap", ":", "raise", "exceptions", ".", "DatasetNameNotFoundException", "(", "name", ")", "return", "self", ".", "_datasetNameMap", "[", "name", "]" ]
Returns the dataset with the specified name.
[ "Returns", "the", "dataset", "with", "the", "specified", "name", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L134-L140
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getOntology
def getOntology(self, id_): """ Returns the ontology with the specified ID. """ if id_ not in self._ontologyIdMap: raise exceptions.OntologyNotFoundException(id_) return self._ontologyIdMap[id_]
python
def getOntology(self, id_): """ Returns the ontology with the specified ID. """ if id_ not in self._ontologyIdMap: raise exceptions.OntologyNotFoundException(id_) return self._ontologyIdMap[id_]
[ "def", "getOntology", "(", "self", ",", "id_", ")", ":", "if", "id_", "not", "in", "self", ".", "_ontologyIdMap", ":", "raise", "exceptions", ".", "OntologyNotFoundException", "(", "id_", ")", "return", "self", ".", "_ontologyIdMap", "[", "id_", "]" ]
Returns the ontology with the specified ID.
[ "Returns", "the", "ontology", "with", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L154-L160
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getOntologyByName
def getOntologyByName(self, name): """ Returns an ontology by name """ if name not in self._ontologyNameMap: raise exceptions.OntologyNameNotFoundException(name) return self._ontologyNameMap[name]
python
def getOntologyByName(self, name): """ Returns an ontology by name """ if name not in self._ontologyNameMap: raise exceptions.OntologyNameNotFoundException(name) return self._ontologyNameMap[name]
[ "def", "getOntologyByName", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "_ontologyNameMap", ":", "raise", "exceptions", ".", "OntologyNameNotFoundException", "(", "name", ")", "return", "self", ".", "_ontologyNameMap", "[", "name", "]" ]
Returns an ontology by name
[ "Returns", "an", "ontology", "by", "name" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L162-L168
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getReferenceSet
def getReferenceSet(self, id_): """ Retuns the ReferenceSet with the specified ID, or raises a ReferenceSetNotFoundException if it does not exist. """ if id_ not in self._referenceSetIdMap: raise exceptions.ReferenceSetNotFoundException(id_) return self._referenceSetIdMap[id_]
python
def getReferenceSet(self, id_): """ Retuns the ReferenceSet with the specified ID, or raises a ReferenceSetNotFoundException if it does not exist. """ if id_ not in self._referenceSetIdMap: raise exceptions.ReferenceSetNotFoundException(id_) return self._referenceSetIdMap[id_]
[ "def", "getReferenceSet", "(", "self", ",", "id_", ")", ":", "if", "id_", "not", "in", "self", ".", "_referenceSetIdMap", ":", "raise", "exceptions", ".", "ReferenceSetNotFoundException", "(", "id_", ")", "return", "self", ".", "_referenceSetIdMap", "[", "id_", "]" ]
Retuns the ReferenceSet with the specified ID, or raises a ReferenceSetNotFoundException if it does not exist.
[ "Retuns", "the", "ReferenceSet", "with", "the", "specified", "ID", "or", "raises", "a", "ReferenceSetNotFoundException", "if", "it", "does", "not", "exist", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L176-L183
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getReferenceSetByName
def getReferenceSetByName(self, name): """ Returns the reference set with the specified name. """ if name not in self._referenceSetNameMap: raise exceptions.ReferenceSetNameNotFoundException(name) return self._referenceSetNameMap[name]
python
def getReferenceSetByName(self, name): """ Returns the reference set with the specified name. """ if name not in self._referenceSetNameMap: raise exceptions.ReferenceSetNameNotFoundException(name) return self._referenceSetNameMap[name]
[ "def", "getReferenceSetByName", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "_referenceSetNameMap", ":", "raise", "exceptions", ".", "ReferenceSetNameNotFoundException", "(", "name", ")", "return", "self", ".", "_referenceSetNameMap", "[", "name", "]" ]
Returns the reference set with the specified name.
[ "Returns", "the", "reference", "set", "with", "the", "specified", "name", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L191-L197
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getReadGroupSet
def getReadGroupSet(self, id_): """ Returns the readgroup set with the specified ID. """ compoundId = datamodel.ReadGroupSetCompoundId.parse(id_) dataset = self.getDataset(compoundId.dataset_id) return dataset.getReadGroupSet(id_)
python
def getReadGroupSet(self, id_): """ Returns the readgroup set with the specified ID. """ compoundId = datamodel.ReadGroupSetCompoundId.parse(id_) dataset = self.getDataset(compoundId.dataset_id) return dataset.getReadGroupSet(id_)
[ "def", "getReadGroupSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "ReadGroupSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "return", "dataset", ".", "getReadGroupSet", "(", "id_", ")" ]
Returns the readgroup set with the specified ID.
[ "Returns", "the", "readgroup", "set", "with", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L199-L205
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.getVariantSet
def getVariantSet(self, id_): """ Returns the readgroup set with the specified ID. """ compoundId = datamodel.VariantSetCompoundId.parse(id_) dataset = self.getDataset(compoundId.dataset_id) return dataset.getVariantSet(id_)
python
def getVariantSet(self, id_): """ Returns the readgroup set with the specified ID. """ compoundId = datamodel.VariantSetCompoundId.parse(id_) dataset = self.getDataset(compoundId.dataset_id) return dataset.getVariantSet(id_)
[ "def", "getVariantSet", "(", "self", ",", "id_", ")", ":", "compoundId", "=", "datamodel", ".", "VariantSetCompoundId", ".", "parse", "(", "id_", ")", "dataset", "=", "self", ".", "getDataset", "(", "compoundId", ".", "dataset_id", ")", "return", "dataset", ".", "getVariantSet", "(", "id_", ")" ]
Returns the readgroup set with the specified ID.
[ "Returns", "the", "readgroup", "set", "with", "the", "specified", "ID", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L207-L213
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.printSummary
def printSummary(self): """ Prints a summary of this data repository to stdout. """ print("Ontologies:") for ontology in self.getOntologys(): print( "", ontology.getOntologyPrefix(), ontology.getName(), ontology.getDataUrl(), sep="\t") print("ReferenceSets:") for referenceSet in self.getReferenceSets(): print( "", referenceSet.getLocalId(), referenceSet.getId(), referenceSet.getDescription(), referenceSet.getDataUrl(), sep="\t") for reference in referenceSet.getReferences(): print( "\t", reference.getLocalId(), reference.getId(), sep="\t") print("Datasets:") for dataset in self.getDatasets(): print( "", dataset.getLocalId(), dataset.getId(), dataset.getDescription(), sep="\t") print("\tReadGroupSets:") for readGroupSet in dataset.getReadGroupSets(): print( "\t", readGroupSet.getLocalId(), readGroupSet.getReferenceSet().getLocalId(), readGroupSet.getId(), readGroupSet.getDataUrl(), sep="\t") for readGroup in readGroupSet.getReadGroups(): print( "\t\t", readGroup.getId(), readGroup.getLocalId(), sep="\t") print("\tVariantSets:") for variantSet in dataset.getVariantSets(): print( "\t", variantSet.getLocalId(), variantSet.getReferenceSet().getLocalId(), variantSet.getId(), sep="\t") if variantSet.getNumVariantAnnotationSets() > 0: print("\t\tVariantAnnotationSets:") for vas in variantSet.getVariantAnnotationSets(): print( "\t\t", vas.getLocalId(), vas.getAnnotationType(), vas.getOntology().getName(), sep="\t") print("\tFeatureSets:") for featureSet in dataset.getFeatureSets(): print( "\t", featureSet.getLocalId(), featureSet.getReferenceSet().getLocalId(), featureSet.getOntology().getName(), featureSet.getId(), sep="\t") print("\tContinuousSets:") for continuousSet in dataset.getContinuousSets(): print( "\t", continuousSet.getLocalId(), continuousSet.getReferenceSet().getLocalId(), continuousSet.getId(), sep="\t") print("\tPhenotypeAssociationSets:") for phenotypeAssociationSet in \ dataset.getPhenotypeAssociationSets(): print( "\t", phenotypeAssociationSet.getLocalId(), phenotypeAssociationSet.getParentContainer().getId(), sep="\t") # TODO - please improve this listing print("\tRnaQuantificationSets:") for rna_quantification_set in dataset.getRnaQuantificationSets(): print( "\t", rna_quantification_set.getLocalId(), rna_quantification_set.getId(), sep="\t") for quant in rna_quantification_set.getRnaQuantifications(): print( "\t\t", quant.getLocalId(), quant._description, ",".join(quant._readGroupIds), ",".join(quant._featureSetIds), sep="\t")
python
def printSummary(self): """ Prints a summary of this data repository to stdout. """ print("Ontologies:") for ontology in self.getOntologys(): print( "", ontology.getOntologyPrefix(), ontology.getName(), ontology.getDataUrl(), sep="\t") print("ReferenceSets:") for referenceSet in self.getReferenceSets(): print( "", referenceSet.getLocalId(), referenceSet.getId(), referenceSet.getDescription(), referenceSet.getDataUrl(), sep="\t") for reference in referenceSet.getReferences(): print( "\t", reference.getLocalId(), reference.getId(), sep="\t") print("Datasets:") for dataset in self.getDatasets(): print( "", dataset.getLocalId(), dataset.getId(), dataset.getDescription(), sep="\t") print("\tReadGroupSets:") for readGroupSet in dataset.getReadGroupSets(): print( "\t", readGroupSet.getLocalId(), readGroupSet.getReferenceSet().getLocalId(), readGroupSet.getId(), readGroupSet.getDataUrl(), sep="\t") for readGroup in readGroupSet.getReadGroups(): print( "\t\t", readGroup.getId(), readGroup.getLocalId(), sep="\t") print("\tVariantSets:") for variantSet in dataset.getVariantSets(): print( "\t", variantSet.getLocalId(), variantSet.getReferenceSet().getLocalId(), variantSet.getId(), sep="\t") if variantSet.getNumVariantAnnotationSets() > 0: print("\t\tVariantAnnotationSets:") for vas in variantSet.getVariantAnnotationSets(): print( "\t\t", vas.getLocalId(), vas.getAnnotationType(), vas.getOntology().getName(), sep="\t") print("\tFeatureSets:") for featureSet in dataset.getFeatureSets(): print( "\t", featureSet.getLocalId(), featureSet.getReferenceSet().getLocalId(), featureSet.getOntology().getName(), featureSet.getId(), sep="\t") print("\tContinuousSets:") for continuousSet in dataset.getContinuousSets(): print( "\t", continuousSet.getLocalId(), continuousSet.getReferenceSet().getLocalId(), continuousSet.getId(), sep="\t") print("\tPhenotypeAssociationSets:") for phenotypeAssociationSet in \ dataset.getPhenotypeAssociationSets(): print( "\t", phenotypeAssociationSet.getLocalId(), phenotypeAssociationSet.getParentContainer().getId(), sep="\t") # TODO - please improve this listing print("\tRnaQuantificationSets:") for rna_quantification_set in dataset.getRnaQuantificationSets(): print( "\t", rna_quantification_set.getLocalId(), rna_quantification_set.getId(), sep="\t") for quant in rna_quantification_set.getRnaQuantifications(): print( "\t\t", quant.getLocalId(), quant._description, ",".join(quant._readGroupIds), ",".join(quant._featureSetIds), sep="\t")
[ "def", "printSummary", "(", "self", ")", ":", "print", "(", "\"Ontologies:\"", ")", "for", "ontology", "in", "self", ".", "getOntologys", "(", ")", ":", "print", "(", "\"\"", ",", "ontology", ".", "getOntologyPrefix", "(", ")", ",", "ontology", ".", "getName", "(", ")", ",", "ontology", ".", "getDataUrl", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "print", "(", "\"ReferenceSets:\"", ")", "for", "referenceSet", "in", "self", ".", "getReferenceSets", "(", ")", ":", "print", "(", "\"\"", ",", "referenceSet", ".", "getLocalId", "(", ")", ",", "referenceSet", ".", "getId", "(", ")", ",", "referenceSet", ".", "getDescription", "(", ")", ",", "referenceSet", ".", "getDataUrl", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "for", "reference", "in", "referenceSet", ".", "getReferences", "(", ")", ":", "print", "(", "\"\\t\"", ",", "reference", ".", "getLocalId", "(", ")", ",", "reference", ".", "getId", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "print", "(", "\"Datasets:\"", ")", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "print", "(", "\"\"", ",", "dataset", ".", "getLocalId", "(", ")", ",", "dataset", ".", "getId", "(", ")", ",", "dataset", ".", "getDescription", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "print", "(", "\"\\tReadGroupSets:\"", ")", "for", "readGroupSet", "in", "dataset", ".", "getReadGroupSets", "(", ")", ":", "print", "(", "\"\\t\"", ",", "readGroupSet", ".", "getLocalId", "(", ")", ",", "readGroupSet", ".", "getReferenceSet", "(", ")", ".", "getLocalId", "(", ")", ",", "readGroupSet", ".", "getId", "(", ")", ",", "readGroupSet", ".", "getDataUrl", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "for", "readGroup", "in", "readGroupSet", ".", "getReadGroups", "(", ")", ":", "print", "(", "\"\\t\\t\"", ",", "readGroup", ".", "getId", "(", ")", ",", "readGroup", ".", "getLocalId", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "print", "(", "\"\\tVariantSets:\"", ")", "for", "variantSet", "in", "dataset", ".", "getVariantSets", "(", ")", ":", "print", "(", "\"\\t\"", ",", "variantSet", ".", "getLocalId", "(", ")", ",", "variantSet", ".", "getReferenceSet", "(", ")", ".", "getLocalId", "(", ")", ",", "variantSet", ".", "getId", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "if", "variantSet", ".", "getNumVariantAnnotationSets", "(", ")", ">", "0", ":", "print", "(", "\"\\t\\tVariantAnnotationSets:\"", ")", "for", "vas", "in", "variantSet", ".", "getVariantAnnotationSets", "(", ")", ":", "print", "(", "\"\\t\\t\"", ",", "vas", ".", "getLocalId", "(", ")", ",", "vas", ".", "getAnnotationType", "(", ")", ",", "vas", ".", "getOntology", "(", ")", ".", "getName", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "print", "(", "\"\\tFeatureSets:\"", ")", "for", "featureSet", "in", "dataset", ".", "getFeatureSets", "(", ")", ":", "print", "(", "\"\\t\"", ",", "featureSet", ".", "getLocalId", "(", ")", ",", "featureSet", ".", "getReferenceSet", "(", ")", ".", "getLocalId", "(", ")", ",", "featureSet", ".", "getOntology", "(", ")", ".", "getName", "(", ")", ",", "featureSet", ".", "getId", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "print", "(", "\"\\tContinuousSets:\"", ")", "for", "continuousSet", "in", "dataset", ".", "getContinuousSets", "(", ")", ":", "print", "(", "\"\\t\"", ",", "continuousSet", ".", "getLocalId", "(", ")", ",", "continuousSet", ".", "getReferenceSet", "(", ")", ".", "getLocalId", "(", ")", ",", "continuousSet", ".", "getId", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "print", "(", "\"\\tPhenotypeAssociationSets:\"", ")", "for", "phenotypeAssociationSet", "in", "dataset", ".", "getPhenotypeAssociationSets", "(", ")", ":", "print", "(", "\"\\t\"", ",", "phenotypeAssociationSet", ".", "getLocalId", "(", ")", ",", "phenotypeAssociationSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "# TODO - please improve this listing", "print", "(", "\"\\tRnaQuantificationSets:\"", ")", "for", "rna_quantification_set", "in", "dataset", ".", "getRnaQuantificationSets", "(", ")", ":", "print", "(", "\"\\t\"", ",", "rna_quantification_set", ".", "getLocalId", "(", ")", ",", "rna_quantification_set", ".", "getId", "(", ")", ",", "sep", "=", "\"\\t\"", ")", "for", "quant", "in", "rna_quantification_set", ".", "getRnaQuantifications", "(", ")", ":", "print", "(", "\"\\t\\t\"", ",", "quant", ".", "getLocalId", "(", ")", ",", "quant", ".", "_description", ",", "\",\"", ".", "join", "(", "quant", ".", "_readGroupIds", ")", ",", "\",\"", ".", "join", "(", "quant", ".", "_featureSetIds", ")", ",", "sep", "=", "\"\\t\"", ")" ]
Prints a summary of this data repository to stdout.
[ "Prints", "a", "summary", "of", "this", "data", "repository", "to", "stdout", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L215-L300
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.allReadGroups
def allReadGroups(self): """ Return an iterator over all read groups in the data repo """ for dataset in self.getDatasets(): for readGroupSet in dataset.getReadGroupSets(): for readGroup in readGroupSet.getReadGroups(): yield readGroup
python
def allReadGroups(self): """ Return an iterator over all read groups in the data repo """ for dataset in self.getDatasets(): for readGroupSet in dataset.getReadGroupSets(): for readGroup in readGroupSet.getReadGroups(): yield readGroup
[ "def", "allReadGroups", "(", "self", ")", ":", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "for", "readGroupSet", "in", "dataset", ".", "getReadGroupSets", "(", ")", ":", "for", "readGroup", "in", "readGroupSet", ".", "getReadGroups", "(", ")", ":", "yield", "readGroup" ]
Return an iterator over all read groups in the data repo
[ "Return", "an", "iterator", "over", "all", "read", "groups", "in", "the", "data", "repo" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L334-L341
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.allFeatures
def allFeatures(self): """ Return an iterator over all features in the data repo """ for dataset in self.getDatasets(): for featureSet in dataset.getFeatureSets(): for feature in featureSet.getFeatures(): yield feature
python
def allFeatures(self): """ Return an iterator over all features in the data repo """ for dataset in self.getDatasets(): for featureSet in dataset.getFeatureSets(): for feature in featureSet.getFeatures(): yield feature
[ "def", "allFeatures", "(", "self", ")", ":", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "for", "featureSet", "in", "dataset", ".", "getFeatureSets", "(", ")", ":", "for", "feature", "in", "featureSet", ".", "getFeatures", "(", ")", ":", "yield", "feature" ]
Return an iterator over all features in the data repo
[ "Return", "an", "iterator", "over", "all", "features", "in", "the", "data", "repo" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L359-L366
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.allCallSets
def allCallSets(self): """ Return an iterator over all call sets in the data repo """ for dataset in self.getDatasets(): for variantSet in dataset.getVariantSets(): for callSet in variantSet.getCallSets(): yield callSet
python
def allCallSets(self): """ Return an iterator over all call sets in the data repo """ for dataset in self.getDatasets(): for variantSet in dataset.getVariantSets(): for callSet in variantSet.getCallSets(): yield callSet
[ "def", "allCallSets", "(", "self", ")", ":", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "for", "variantSet", "in", "dataset", ".", "getVariantSets", "(", ")", ":", "for", "callSet", "in", "variantSet", ".", "getCallSets", "(", ")", ":", "yield", "callSet" ]
Return an iterator over all call sets in the data repo
[ "Return", "an", "iterator", "over", "all", "call", "sets", "in", "the", "data", "repo" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L376-L383
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.allVariantAnnotationSets
def allVariantAnnotationSets(self): """ Return an iterator over all variant annotation sets in the data repo """ for dataset in self.getDatasets(): for variantSet in dataset.getVariantSets(): for vaSet in variantSet.getVariantAnnotationSets(): yield vaSet
python
def allVariantAnnotationSets(self): """ Return an iterator over all variant annotation sets in the data repo """ for dataset in self.getDatasets(): for variantSet in dataset.getVariantSets(): for vaSet in variantSet.getVariantAnnotationSets(): yield vaSet
[ "def", "allVariantAnnotationSets", "(", "self", ")", ":", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "for", "variantSet", "in", "dataset", ".", "getVariantSets", "(", ")", ":", "for", "vaSet", "in", "variantSet", ".", "getVariantAnnotationSets", "(", ")", ":", "yield", "vaSet" ]
Return an iterator over all variant annotation sets in the data repo
[ "Return", "an", "iterator", "over", "all", "variant", "annotation", "sets", "in", "the", "data", "repo" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L385-L393
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.allRnaQuantifications
def allRnaQuantifications(self): """ Return an iterator over all rna quantifications """ for dataset in self.getDatasets(): for rnaQuantificationSet in dataset.getRnaQuantificationSets(): for rnaQuantification in \ rnaQuantificationSet.getRnaQuantifications(): yield rnaQuantification
python
def allRnaQuantifications(self): """ Return an iterator over all rna quantifications """ for dataset in self.getDatasets(): for rnaQuantificationSet in dataset.getRnaQuantificationSets(): for rnaQuantification in \ rnaQuantificationSet.getRnaQuantifications(): yield rnaQuantification
[ "def", "allRnaQuantifications", "(", "self", ")", ":", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "for", "rnaQuantificationSet", "in", "dataset", ".", "getRnaQuantificationSets", "(", ")", ":", "for", "rnaQuantification", "in", "rnaQuantificationSet", ".", "getRnaQuantifications", "(", ")", ":", "yield", "rnaQuantification" ]
Return an iterator over all rna quantifications
[ "Return", "an", "iterator", "over", "all", "rna", "quantifications" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L412-L420
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
AbstractDataRepository.allExpressionLevels
def allExpressionLevels(self): """ Return an iterator over all expression levels """ for dataset in self.getDatasets(): for rnaQuantificationSet in dataset.getRnaQuantificationSets(): for rnaQuantification in \ rnaQuantificationSet.getRnaQuantifications(): for expressionLevel in \ rnaQuantification.getExpressionLevels(): yield expressionLevel
python
def allExpressionLevels(self): """ Return an iterator over all expression levels """ for dataset in self.getDatasets(): for rnaQuantificationSet in dataset.getRnaQuantificationSets(): for rnaQuantification in \ rnaQuantificationSet.getRnaQuantifications(): for expressionLevel in \ rnaQuantification.getExpressionLevels(): yield expressionLevel
[ "def", "allExpressionLevels", "(", "self", ")", ":", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "for", "rnaQuantificationSet", "in", "dataset", ".", "getRnaQuantificationSets", "(", ")", ":", "for", "rnaQuantification", "in", "rnaQuantificationSet", ".", "getRnaQuantifications", "(", ")", ":", "for", "expressionLevel", "in", "rnaQuantification", ".", "getExpressionLevels", "(", ")", ":", "yield", "expressionLevel" ]
Return an iterator over all expression levels
[ "Return", "an", "iterator", "over", "all", "expression", "levels" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L422-L432
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.getPeer
def getPeer(self, url): """ Finds a peer by URL and return the first peer record with that URL. """ peers = list(models.Peer.select().where(models.Peer.url == url)) if len(peers) == 0: raise exceptions.PeerNotFoundException(url) return peers[0]
python
def getPeer(self, url): """ Finds a peer by URL and return the first peer record with that URL. """ peers = list(models.Peer.select().where(models.Peer.url == url)) if len(peers) == 0: raise exceptions.PeerNotFoundException(url) return peers[0]
[ "def", "getPeer", "(", "self", ",", "url", ")", ":", "peers", "=", "list", "(", "models", ".", "Peer", ".", "select", "(", ")", ".", "where", "(", "models", ".", "Peer", ".", "url", "==", "url", ")", ")", "if", "len", "(", "peers", ")", "==", "0", ":", "raise", "exceptions", ".", "PeerNotFoundException", "(", "url", ")", "return", "peers", "[", "0", "]" ]
Finds a peer by URL and return the first peer record with that URL.
[ "Finds", "a", "peer", "by", "URL", "and", "return", "the", "first", "peer", "record", "with", "that", "URL", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L528-L535
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.getPeers
def getPeers(self, offset=0, limit=1000): """ Get the list of peers using an SQL offset and limit. Returns a list of peer datamodel objects in a list. """ select = models.Peer.select().order_by( models.Peer.url).limit(limit).offset(offset) return [peers.Peer(p.url, record=p) for p in select]
python
def getPeers(self, offset=0, limit=1000): """ Get the list of peers using an SQL offset and limit. Returns a list of peer datamodel objects in a list. """ select = models.Peer.select().order_by( models.Peer.url).limit(limit).offset(offset) return [peers.Peer(p.url, record=p) for p in select]
[ "def", "getPeers", "(", "self", ",", "offset", "=", "0", ",", "limit", "=", "1000", ")", ":", "select", "=", "models", ".", "Peer", ".", "select", "(", ")", ".", "order_by", "(", "models", ".", "Peer", ".", "url", ")", ".", "limit", "(", "limit", ")", ".", "offset", "(", "offset", ")", "return", "[", "peers", ".", "Peer", "(", "p", ".", "url", ",", "record", "=", "p", ")", "for", "p", "in", "select", "]" ]
Get the list of peers using an SQL offset and limit. Returns a list of peer datamodel objects in a list.
[ "Get", "the", "list", "of", "peers", "using", "an", "SQL", "offset", "and", "limit", ".", "Returns", "a", "list", "of", "peer", "datamodel", "objects", "in", "a", "list", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L537-L544
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.tableToTsv
def tableToTsv(self, model): """ Takes a model class and attempts to create a table in TSV format that can be imported into a spreadsheet program. """ first = True for item in model.select(): if first: header = "".join( ["{}\t".format(x) for x in model._meta.fields.keys()]) print(header) first = False row = "".join( ["{}\t".format( getattr(item, key)) for key in model._meta.fields.keys()]) print(row)
python
def tableToTsv(self, model): """ Takes a model class and attempts to create a table in TSV format that can be imported into a spreadsheet program. """ first = True for item in model.select(): if first: header = "".join( ["{}\t".format(x) for x in model._meta.fields.keys()]) print(header) first = False row = "".join( ["{}\t".format( getattr(item, key)) for key in model._meta.fields.keys()]) print(row)
[ "def", "tableToTsv", "(", "self", ",", "model", ")", ":", "first", "=", "True", "for", "item", "in", "model", ".", "select", "(", ")", ":", "if", "first", ":", "header", "=", "\"\"", ".", "join", "(", "[", "\"{}\\t\"", ".", "format", "(", "x", ")", "for", "x", "in", "model", ".", "_meta", ".", "fields", ".", "keys", "(", ")", "]", ")", "print", "(", "header", ")", "first", "=", "False", "row", "=", "\"\"", ".", "join", "(", "[", "\"{}\\t\"", ".", "format", "(", "getattr", "(", "item", ",", "key", ")", ")", "for", "key", "in", "model", ".", "_meta", ".", "fields", ".", "keys", "(", ")", "]", ")", "print", "(", "row", ")" ]
Takes a model class and attempts to create a table in TSV format that can be imported into a spreadsheet program.
[ "Takes", "a", "model", "class", "and", "attempts", "to", "create", "a", "table", "in", "TSV", "format", "that", "can", "be", "imported", "into", "a", "spreadsheet", "program", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L546-L561
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.clearAnnouncements
def clearAnnouncements(self): """ Flushes the announcement table. """ try: q = models.Announcement.delete().where( models.Announcement.id > 0) q.execute() except Exception as e: raise exceptions.RepoManagerException(e)
python
def clearAnnouncements(self): """ Flushes the announcement table. """ try: q = models.Announcement.delete().where( models.Announcement.id > 0) q.execute() except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "clearAnnouncements", "(", "self", ")", ":", "try", ":", "q", "=", "models", ".", "Announcement", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Announcement", ".", "id", ">", "0", ")", "q", ".", "execute", "(", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Flushes the announcement table.
[ "Flushes", "the", "announcement", "table", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L569-L578
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertAnnouncement
def insertAnnouncement(self, announcement): """ Adds an announcement to the registry for later analysis. """ url = announcement.get('url', None) try: peers.Peer(url) except: raise exceptions.BadUrlException(url) try: # TODO get more details about the user agent models.Announcement.create( url=announcement.get('url'), attributes=json.dumps(announcement.get('attributes', {})), remote_addr=announcement.get('remote_addr', None), user_agent=announcement.get('user_agent', None)) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertAnnouncement(self, announcement): """ Adds an announcement to the registry for later analysis. """ url = announcement.get('url', None) try: peers.Peer(url) except: raise exceptions.BadUrlException(url) try: # TODO get more details about the user agent models.Announcement.create( url=announcement.get('url'), attributes=json.dumps(announcement.get('attributes', {})), remote_addr=announcement.get('remote_addr', None), user_agent=announcement.get('user_agent', None)) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertAnnouncement", "(", "self", ",", "announcement", ")", ":", "url", "=", "announcement", ".", "get", "(", "'url'", ",", "None", ")", "try", ":", "peers", ".", "Peer", "(", "url", ")", "except", ":", "raise", "exceptions", ".", "BadUrlException", "(", "url", ")", "try", ":", "# TODO get more details about the user agent", "models", ".", "Announcement", ".", "create", "(", "url", "=", "announcement", ".", "get", "(", "'url'", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "announcement", ".", "get", "(", "'attributes'", ",", "{", "}", ")", ")", ",", "remote_addr", "=", "announcement", ".", "get", "(", "'remote_addr'", ",", "None", ")", ",", "user_agent", "=", "announcement", ".", "get", "(", "'user_agent'", ",", "None", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Adds an announcement to the registry for later analysis.
[ "Adds", "an", "announcement", "to", "the", "registry", "for", "later", "analysis", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L580-L597
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.open
def open(self, mode=MODE_READ): """ Opens this repo in the specified mode. TODO: figure out the correct semantics of this and document the intended future behaviour as well as the current transitional behaviour. """ if mode not in [MODE_READ, MODE_WRITE]: error = "Open mode must be '{}' or '{}'".format( MODE_READ, MODE_WRITE) raise ValueError(error) self._openMode = mode if mode == MODE_READ: self.assertExists() if mode == MODE_READ: # This is part of the transitional behaviour where # we load the whole DB into memory to get access to # the data model. self.load()
python
def open(self, mode=MODE_READ): """ Opens this repo in the specified mode. TODO: figure out the correct semantics of this and document the intended future behaviour as well as the current transitional behaviour. """ if mode not in [MODE_READ, MODE_WRITE]: error = "Open mode must be '{}' or '{}'".format( MODE_READ, MODE_WRITE) raise ValueError(error) self._openMode = mode if mode == MODE_READ: self.assertExists() if mode == MODE_READ: # This is part of the transitional behaviour where # we load the whole DB into memory to get access to # the data model. self.load()
[ "def", "open", "(", "self", ",", "mode", "=", "MODE_READ", ")", ":", "if", "mode", "not", "in", "[", "MODE_READ", ",", "MODE_WRITE", "]", ":", "error", "=", "\"Open mode must be '{}' or '{}'\"", ".", "format", "(", "MODE_READ", ",", "MODE_WRITE", ")", "raise", "ValueError", "(", "error", ")", "self", ".", "_openMode", "=", "mode", "if", "mode", "==", "MODE_READ", ":", "self", ".", "assertExists", "(", ")", "if", "mode", "==", "MODE_READ", ":", "# This is part of the transitional behaviour where", "# we load the whole DB into memory to get access to", "# the data model.", "self", ".", "load", "(", ")" ]
Opens this repo in the specified mode. TODO: figure out the correct semantics of this and document the intended future behaviour as well as the current transitional behaviour.
[ "Opens", "this", "repo", "in", "the", "specified", "mode", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L599-L618
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.verify
def verify(self): """ Verifies that the data in the repository is consistent. """ # TODO this should emit to a log that we can configure so we can # have verbosity levels. We should provide a way to configure # where we look at various chromosomes and so on. This will be # an important debug tool for administrators. for ontology in self.getOntologys(): print( "Verifying Ontology", ontology.getName(), "@", ontology.getDataUrl()) # TODO how do we verify this? Check some well-know SO terms? for referenceSet in self.getReferenceSets(): print( "Verifying ReferenceSet", referenceSet.getLocalId(), "@", referenceSet.getDataUrl()) for reference in referenceSet.getReferences(): length = min(reference.getLength(), 1000) bases = reference.getBases(0, length) assert len(bases) == length print( "\tReading", length, "bases from", reference.getLocalId()) for dataset in self.getDatasets(): print("Verifying Dataset", dataset.getLocalId()) for featureSet in dataset.getFeatureSets(): for referenceSet in self.getReferenceSets(): # TODO cycle through references? reference = referenceSet.getReferences()[0] print( "\tVerifying FeatureSet", featureSet.getLocalId(), "with reference", reference.getLocalId()) length = min(reference.getLength(), 1000) features = featureSet.getFeatures( reference.getLocalId(), 0, length, None, 3) for feature in features: print("\t{}".format(feature)) # for continuousSet in dataset.getContinuousSets(): # -- there is no getContinuous for readGroupSet in dataset.getReadGroupSets(): print( "\tVerifying ReadGroupSet", readGroupSet.getLocalId(), "@", readGroupSet.getDataUrl()) references = readGroupSet.getReferenceSet().getReferences() # TODO should we cycle through the references? Should probably # be an option. reference = references[0] max_alignments = 10 for readGroup in readGroupSet.getReadGroups(): alignments = readGroup.getReadAlignments(reference) for i, alignment in enumerate(alignments): if i == max_alignments: break print( "\t\tRead", i, "alignments from", readGroup.getLocalId()) for variantSet in dataset.getVariantSets(): print("\tVerifying VariantSet", variantSet.getLocalId()) max_variants = 10 max_annotations = 10 refMap = variantSet.getReferenceToDataUrlIndexMap() for referenceName, (dataUrl, indexFile) in refMap.items(): variants = variantSet.getVariants(referenceName, 0, 2**31) for i, variant in enumerate(variants): if i == max_variants: break print( "\t\tRead", i, "variants from reference", referenceName, "@", dataUrl) for annotationSet in variantSet.getVariantAnnotationSets(): print( "\t\tVerifying VariantAnnotationSet", annotationSet.getLocalId()) for referenceName in refMap.keys(): annotations = annotationSet.getVariantAnnotations( referenceName, 0, 2**31) for i, annotation in enumerate(annotations): if i == max_annotations: break print( "\t\t\tRead", i, "annotations from reference", referenceName) for phenotypeAssociationSet \ in dataset.getPhenotypeAssociationSets(): print("\t\tVerifying PhenotypeAssociationSet") print( "\t\t\t", phenotypeAssociationSet.getLocalId(), phenotypeAssociationSet.getParentContainer().getId(), sep="\t")
python
def verify(self): """ Verifies that the data in the repository is consistent. """ # TODO this should emit to a log that we can configure so we can # have verbosity levels. We should provide a way to configure # where we look at various chromosomes and so on. This will be # an important debug tool for administrators. for ontology in self.getOntologys(): print( "Verifying Ontology", ontology.getName(), "@", ontology.getDataUrl()) # TODO how do we verify this? Check some well-know SO terms? for referenceSet in self.getReferenceSets(): print( "Verifying ReferenceSet", referenceSet.getLocalId(), "@", referenceSet.getDataUrl()) for reference in referenceSet.getReferences(): length = min(reference.getLength(), 1000) bases = reference.getBases(0, length) assert len(bases) == length print( "\tReading", length, "bases from", reference.getLocalId()) for dataset in self.getDatasets(): print("Verifying Dataset", dataset.getLocalId()) for featureSet in dataset.getFeatureSets(): for referenceSet in self.getReferenceSets(): # TODO cycle through references? reference = referenceSet.getReferences()[0] print( "\tVerifying FeatureSet", featureSet.getLocalId(), "with reference", reference.getLocalId()) length = min(reference.getLength(), 1000) features = featureSet.getFeatures( reference.getLocalId(), 0, length, None, 3) for feature in features: print("\t{}".format(feature)) # for continuousSet in dataset.getContinuousSets(): # -- there is no getContinuous for readGroupSet in dataset.getReadGroupSets(): print( "\tVerifying ReadGroupSet", readGroupSet.getLocalId(), "@", readGroupSet.getDataUrl()) references = readGroupSet.getReferenceSet().getReferences() # TODO should we cycle through the references? Should probably # be an option. reference = references[0] max_alignments = 10 for readGroup in readGroupSet.getReadGroups(): alignments = readGroup.getReadAlignments(reference) for i, alignment in enumerate(alignments): if i == max_alignments: break print( "\t\tRead", i, "alignments from", readGroup.getLocalId()) for variantSet in dataset.getVariantSets(): print("\tVerifying VariantSet", variantSet.getLocalId()) max_variants = 10 max_annotations = 10 refMap = variantSet.getReferenceToDataUrlIndexMap() for referenceName, (dataUrl, indexFile) in refMap.items(): variants = variantSet.getVariants(referenceName, 0, 2**31) for i, variant in enumerate(variants): if i == max_variants: break print( "\t\tRead", i, "variants from reference", referenceName, "@", dataUrl) for annotationSet in variantSet.getVariantAnnotationSets(): print( "\t\tVerifying VariantAnnotationSet", annotationSet.getLocalId()) for referenceName in refMap.keys(): annotations = annotationSet.getVariantAnnotations( referenceName, 0, 2**31) for i, annotation in enumerate(annotations): if i == max_annotations: break print( "\t\t\tRead", i, "annotations from reference", referenceName) for phenotypeAssociationSet \ in dataset.getPhenotypeAssociationSets(): print("\t\tVerifying PhenotypeAssociationSet") print( "\t\t\t", phenotypeAssociationSet.getLocalId(), phenotypeAssociationSet.getParentContainer().getId(), sep="\t")
[ "def", "verify", "(", "self", ")", ":", "# TODO this should emit to a log that we can configure so we can", "# have verbosity levels. We should provide a way to configure", "# where we look at various chromosomes and so on. This will be", "# an important debug tool for administrators.", "for", "ontology", "in", "self", ".", "getOntologys", "(", ")", ":", "print", "(", "\"Verifying Ontology\"", ",", "ontology", ".", "getName", "(", ")", ",", "\"@\"", ",", "ontology", ".", "getDataUrl", "(", ")", ")", "# TODO how do we verify this? Check some well-know SO terms?", "for", "referenceSet", "in", "self", ".", "getReferenceSets", "(", ")", ":", "print", "(", "\"Verifying ReferenceSet\"", ",", "referenceSet", ".", "getLocalId", "(", ")", ",", "\"@\"", ",", "referenceSet", ".", "getDataUrl", "(", ")", ")", "for", "reference", "in", "referenceSet", ".", "getReferences", "(", ")", ":", "length", "=", "min", "(", "reference", ".", "getLength", "(", ")", ",", "1000", ")", "bases", "=", "reference", ".", "getBases", "(", "0", ",", "length", ")", "assert", "len", "(", "bases", ")", "==", "length", "print", "(", "\"\\tReading\"", ",", "length", ",", "\"bases from\"", ",", "reference", ".", "getLocalId", "(", ")", ")", "for", "dataset", "in", "self", ".", "getDatasets", "(", ")", ":", "print", "(", "\"Verifying Dataset\"", ",", "dataset", ".", "getLocalId", "(", ")", ")", "for", "featureSet", "in", "dataset", ".", "getFeatureSets", "(", ")", ":", "for", "referenceSet", "in", "self", ".", "getReferenceSets", "(", ")", ":", "# TODO cycle through references?", "reference", "=", "referenceSet", ".", "getReferences", "(", ")", "[", "0", "]", "print", "(", "\"\\tVerifying FeatureSet\"", ",", "featureSet", ".", "getLocalId", "(", ")", ",", "\"with reference\"", ",", "reference", ".", "getLocalId", "(", ")", ")", "length", "=", "min", "(", "reference", ".", "getLength", "(", ")", ",", "1000", ")", "features", "=", "featureSet", ".", "getFeatures", "(", "reference", ".", "getLocalId", "(", ")", ",", "0", ",", "length", ",", "None", ",", "3", ")", "for", "feature", "in", "features", ":", "print", "(", "\"\\t{}\"", ".", "format", "(", "feature", ")", ")", "# for continuousSet in dataset.getContinuousSets():", "# -- there is no getContinuous", "for", "readGroupSet", "in", "dataset", ".", "getReadGroupSets", "(", ")", ":", "print", "(", "\"\\tVerifying ReadGroupSet\"", ",", "readGroupSet", ".", "getLocalId", "(", ")", ",", "\"@\"", ",", "readGroupSet", ".", "getDataUrl", "(", ")", ")", "references", "=", "readGroupSet", ".", "getReferenceSet", "(", ")", ".", "getReferences", "(", ")", "# TODO should we cycle through the references? Should probably", "# be an option.", "reference", "=", "references", "[", "0", "]", "max_alignments", "=", "10", "for", "readGroup", "in", "readGroupSet", ".", "getReadGroups", "(", ")", ":", "alignments", "=", "readGroup", ".", "getReadAlignments", "(", "reference", ")", "for", "i", ",", "alignment", "in", "enumerate", "(", "alignments", ")", ":", "if", "i", "==", "max_alignments", ":", "break", "print", "(", "\"\\t\\tRead\"", ",", "i", ",", "\"alignments from\"", ",", "readGroup", ".", "getLocalId", "(", ")", ")", "for", "variantSet", "in", "dataset", ".", "getVariantSets", "(", ")", ":", "print", "(", "\"\\tVerifying VariantSet\"", ",", "variantSet", ".", "getLocalId", "(", ")", ")", "max_variants", "=", "10", "max_annotations", "=", "10", "refMap", "=", "variantSet", ".", "getReferenceToDataUrlIndexMap", "(", ")", "for", "referenceName", ",", "(", "dataUrl", ",", "indexFile", ")", "in", "refMap", ".", "items", "(", ")", ":", "variants", "=", "variantSet", ".", "getVariants", "(", "referenceName", ",", "0", ",", "2", "**", "31", ")", "for", "i", ",", "variant", "in", "enumerate", "(", "variants", ")", ":", "if", "i", "==", "max_variants", ":", "break", "print", "(", "\"\\t\\tRead\"", ",", "i", ",", "\"variants from reference\"", ",", "referenceName", ",", "\"@\"", ",", "dataUrl", ")", "for", "annotationSet", "in", "variantSet", ".", "getVariantAnnotationSets", "(", ")", ":", "print", "(", "\"\\t\\tVerifying VariantAnnotationSet\"", ",", "annotationSet", ".", "getLocalId", "(", ")", ")", "for", "referenceName", "in", "refMap", ".", "keys", "(", ")", ":", "annotations", "=", "annotationSet", ".", "getVariantAnnotations", "(", "referenceName", ",", "0", ",", "2", "**", "31", ")", "for", "i", ",", "annotation", "in", "enumerate", "(", "annotations", ")", ":", "if", "i", "==", "max_annotations", ":", "break", "print", "(", "\"\\t\\t\\tRead\"", ",", "i", ",", "\"annotations from reference\"", ",", "referenceName", ")", "for", "phenotypeAssociationSet", "in", "dataset", ".", "getPhenotypeAssociationSets", "(", ")", ":", "print", "(", "\"\\t\\tVerifying PhenotypeAssociationSet\"", ")", "print", "(", "\"\\t\\t\\t\"", ",", "phenotypeAssociationSet", ".", "getLocalId", "(", ")", ",", "phenotypeAssociationSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "sep", "=", "\"\\t\"", ")" ]
Verifies that the data in the repository is consistent.
[ "Verifies", "that", "the", "data", "in", "the", "repository", "is", "consistent", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L635-L725
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertOntology
def insertOntology(self, ontology): """ Inserts the specified ontology into this repository. """ try: models.Ontology.create( id=ontology.getName(), name=ontology.getName(), dataurl=ontology.getDataUrl(), ontologyprefix=ontology.getOntologyPrefix()) except Exception: raise exceptions.DuplicateNameException( ontology.getName())
python
def insertOntology(self, ontology): """ Inserts the specified ontology into this repository. """ try: models.Ontology.create( id=ontology.getName(), name=ontology.getName(), dataurl=ontology.getDataUrl(), ontologyprefix=ontology.getOntologyPrefix()) except Exception: raise exceptions.DuplicateNameException( ontology.getName())
[ "def", "insertOntology", "(", "self", ",", "ontology", ")", ":", "try", ":", "models", ".", "Ontology", ".", "create", "(", "id", "=", "ontology", ".", "getName", "(", ")", ",", "name", "=", "ontology", ".", "getName", "(", ")", ",", "dataurl", "=", "ontology", ".", "getDataUrl", "(", ")", ",", "ontologyprefix", "=", "ontology", ".", "getOntologyPrefix", "(", ")", ")", "except", "Exception", ":", "raise", "exceptions", ".", "DuplicateNameException", "(", "ontology", ".", "getName", "(", ")", ")" ]
Inserts the specified ontology into this repository.
[ "Inserts", "the", "specified", "ontology", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L753-L765
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeOntology
def removeOntology(self, ontology): """ Removes the specified ontology term map from this repository. """ q = models.Ontology.delete().where(id == ontology.getId()) q.execute()
python
def removeOntology(self, ontology): """ Removes the specified ontology term map from this repository. """ q = models.Ontology.delete().where(id == ontology.getId()) q.execute()
[ "def", "removeOntology", "(", "self", ",", "ontology", ")", ":", "q", "=", "models", ".", "Ontology", ".", "delete", "(", ")", ".", "where", "(", "id", "==", "ontology", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")" ]
Removes the specified ontology term map from this repository.
[ "Removes", "the", "specified", "ontology", "term", "map", "from", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L775-L780
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertReference
def insertReference(self, reference): """ Inserts the specified reference into this repository. """ models.Reference.create( id=reference.getId(), referencesetid=reference.getParentContainer().getId(), name=reference.getLocalId(), length=reference.getLength(), isderived=reference.getIsDerived(), species=json.dumps(reference.getSpecies()), md5checksum=reference.getMd5Checksum(), sourceaccessions=json.dumps(reference.getSourceAccessions()), sourceuri=reference.getSourceUri())
python
def insertReference(self, reference): """ Inserts the specified reference into this repository. """ models.Reference.create( id=reference.getId(), referencesetid=reference.getParentContainer().getId(), name=reference.getLocalId(), length=reference.getLength(), isderived=reference.getIsDerived(), species=json.dumps(reference.getSpecies()), md5checksum=reference.getMd5Checksum(), sourceaccessions=json.dumps(reference.getSourceAccessions()), sourceuri=reference.getSourceUri())
[ "def", "insertReference", "(", "self", ",", "reference", ")", ":", "models", ".", "Reference", ".", "create", "(", "id", "=", "reference", ".", "getId", "(", ")", ",", "referencesetid", "=", "reference", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "reference", ".", "getLocalId", "(", ")", ",", "length", "=", "reference", ".", "getLength", "(", ")", ",", "isderived", "=", "reference", ".", "getIsDerived", "(", ")", ",", "species", "=", "json", ".", "dumps", "(", "reference", ".", "getSpecies", "(", ")", ")", ",", "md5checksum", "=", "reference", ".", "getMd5Checksum", "(", ")", ",", "sourceaccessions", "=", "json", ".", "dumps", "(", "reference", ".", "getSourceAccessions", "(", ")", ")", ",", "sourceuri", "=", "reference", ".", "getSourceUri", "(", ")", ")" ]
Inserts the specified reference into this repository.
[ "Inserts", "the", "specified", "reference", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L785-L798
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertReferenceSet
def insertReferenceSet(self, referenceSet): """ Inserts the specified referenceSet into this repository. """ try: models.Referenceset.create( id=referenceSet.getId(), name=referenceSet.getLocalId(), description=referenceSet.getDescription(), assemblyid=referenceSet.getAssemblyId(), isderived=referenceSet.getIsDerived(), species=json.dumps(referenceSet.getSpecies()), md5checksum=referenceSet.getMd5Checksum(), sourceaccessions=json.dumps( referenceSet.getSourceAccessions()), sourceuri=referenceSet.getSourceUri(), dataurl=referenceSet.getDataUrl()) for reference in referenceSet.getReferences(): self.insertReference(reference) except Exception: raise exceptions.DuplicateNameException( referenceSet.getLocalId())
python
def insertReferenceSet(self, referenceSet): """ Inserts the specified referenceSet into this repository. """ try: models.Referenceset.create( id=referenceSet.getId(), name=referenceSet.getLocalId(), description=referenceSet.getDescription(), assemblyid=referenceSet.getAssemblyId(), isderived=referenceSet.getIsDerived(), species=json.dumps(referenceSet.getSpecies()), md5checksum=referenceSet.getMd5Checksum(), sourceaccessions=json.dumps( referenceSet.getSourceAccessions()), sourceuri=referenceSet.getSourceUri(), dataurl=referenceSet.getDataUrl()) for reference in referenceSet.getReferences(): self.insertReference(reference) except Exception: raise exceptions.DuplicateNameException( referenceSet.getLocalId())
[ "def", "insertReferenceSet", "(", "self", ",", "referenceSet", ")", ":", "try", ":", "models", ".", "Referenceset", ".", "create", "(", "id", "=", "referenceSet", ".", "getId", "(", ")", ",", "name", "=", "referenceSet", ".", "getLocalId", "(", ")", ",", "description", "=", "referenceSet", ".", "getDescription", "(", ")", ",", "assemblyid", "=", "referenceSet", ".", "getAssemblyId", "(", ")", ",", "isderived", "=", "referenceSet", ".", "getIsDerived", "(", ")", ",", "species", "=", "json", ".", "dumps", "(", "referenceSet", ".", "getSpecies", "(", ")", ")", ",", "md5checksum", "=", "referenceSet", ".", "getMd5Checksum", "(", ")", ",", "sourceaccessions", "=", "json", ".", "dumps", "(", "referenceSet", ".", "getSourceAccessions", "(", ")", ")", ",", "sourceuri", "=", "referenceSet", ".", "getSourceUri", "(", ")", ",", "dataurl", "=", "referenceSet", ".", "getDataUrl", "(", ")", ")", "for", "reference", "in", "referenceSet", ".", "getReferences", "(", ")", ":", "self", ".", "insertReference", "(", "reference", ")", "except", "Exception", ":", "raise", "exceptions", ".", "DuplicateNameException", "(", "referenceSet", ".", "getLocalId", "(", ")", ")" ]
Inserts the specified referenceSet into this repository.
[ "Inserts", "the", "specified", "referenceSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L813-L834
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertDataset
def insertDataset(self, dataset): """ Inserts the specified dataset into this repository. """ try: models.Dataset.create( id=dataset.getId(), name=dataset.getLocalId(), description=dataset.getDescription(), attributes=json.dumps(dataset.getAttributes())) except Exception: raise exceptions.DuplicateNameException( dataset.getLocalId())
python
def insertDataset(self, dataset): """ Inserts the specified dataset into this repository. """ try: models.Dataset.create( id=dataset.getId(), name=dataset.getLocalId(), description=dataset.getDescription(), attributes=json.dumps(dataset.getAttributes())) except Exception: raise exceptions.DuplicateNameException( dataset.getLocalId())
[ "def", "insertDataset", "(", "self", ",", "dataset", ")", ":", "try", ":", "models", ".", "Dataset", ".", "create", "(", "id", "=", "dataset", ".", "getId", "(", ")", ",", "name", "=", "dataset", ".", "getLocalId", "(", ")", ",", "description", "=", "dataset", ".", "getDescription", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "dataset", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", ":", "raise", "exceptions", ".", "DuplicateNameException", "(", "dataset", ".", "getLocalId", "(", ")", ")" ]
Inserts the specified dataset into this repository.
[ "Inserts", "the", "specified", "dataset", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L848-L860
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeDataset
def removeDataset(self, dataset): """ Removes the specified dataset from this repository. This performs a cascading removal of all items within this dataset. """ for datasetRecord in models.Dataset.select().where( models.Dataset.id == dataset.getId()): datasetRecord.delete_instance(recursive=True)
python
def removeDataset(self, dataset): """ Removes the specified dataset from this repository. This performs a cascading removal of all items within this dataset. """ for datasetRecord in models.Dataset.select().where( models.Dataset.id == dataset.getId()): datasetRecord.delete_instance(recursive=True)
[ "def", "removeDataset", "(", "self", ",", "dataset", ")", ":", "for", "datasetRecord", "in", "models", ".", "Dataset", ".", "select", "(", ")", ".", "where", "(", "models", ".", "Dataset", ".", "id", "==", "dataset", ".", "getId", "(", ")", ")", ":", "datasetRecord", ".", "delete_instance", "(", "recursive", "=", "True", ")" ]
Removes the specified dataset from this repository. This performs a cascading removal of all items within this dataset.
[ "Removes", "the", "specified", "dataset", "from", "this", "repository", ".", "This", "performs", "a", "cascading", "removal", "of", "all", "items", "within", "this", "dataset", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L862-L869
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removePhenotypeAssociationSet
def removePhenotypeAssociationSet(self, phenotypeAssociationSet): """ Remove a phenotype association set from the repo """ q = models.Phenotypeassociationset.delete().where( models.Phenotypeassociationset.id == phenotypeAssociationSet.getId()) q.execute()
python
def removePhenotypeAssociationSet(self, phenotypeAssociationSet): """ Remove a phenotype association set from the repo """ q = models.Phenotypeassociationset.delete().where( models.Phenotypeassociationset.id == phenotypeAssociationSet.getId()) q.execute()
[ "def", "removePhenotypeAssociationSet", "(", "self", ",", "phenotypeAssociationSet", ")", ":", "q", "=", "models", ".", "Phenotypeassociationset", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Phenotypeassociationset", ".", "id", "==", "phenotypeAssociationSet", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")" ]
Remove a phenotype association set from the repo
[ "Remove", "a", "phenotype", "association", "set", "from", "the", "repo" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L871-L878
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeFeatureSet
def removeFeatureSet(self, featureSet): """ Removes the specified featureSet from this repository. """ q = models.Featureset.delete().where( models.Featureset.id == featureSet.getId()) q.execute()
python
def removeFeatureSet(self, featureSet): """ Removes the specified featureSet from this repository. """ q = models.Featureset.delete().where( models.Featureset.id == featureSet.getId()) q.execute()
[ "def", "removeFeatureSet", "(", "self", ",", "featureSet", ")", ":", "q", "=", "models", ".", "Featureset", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Featureset", ".", "id", "==", "featureSet", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")" ]
Removes the specified featureSet from this repository.
[ "Removes", "the", "specified", "featureSet", "from", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L880-L886
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeContinuousSet
def removeContinuousSet(self, continuousSet): """ Removes the specified continuousSet from this repository. """ q = models.ContinuousSet.delete().where( models.ContinuousSet.id == continuousSet.getId()) q.execute()
python
def removeContinuousSet(self, continuousSet): """ Removes the specified continuousSet from this repository. """ q = models.ContinuousSet.delete().where( models.ContinuousSet.id == continuousSet.getId()) q.execute()
[ "def", "removeContinuousSet", "(", "self", ",", "continuousSet", ")", ":", "q", "=", "models", ".", "ContinuousSet", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "ContinuousSet", ".", "id", "==", "continuousSet", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")" ]
Removes the specified continuousSet from this repository.
[ "Removes", "the", "specified", "continuousSet", "from", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L888-L894
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertReadGroup
def insertReadGroup(self, readGroup): """ Inserts the specified readGroup into the DB. """ statsJson = json.dumps(protocol.toJsonDict(readGroup.getStats())) experimentJson = json.dumps( protocol.toJsonDict(readGroup.getExperiment())) try: models.Readgroup.create( id=readGroup.getId(), readgroupsetid=readGroup.getParentContainer().getId(), name=readGroup.getLocalId(), predictedinsertedsize=readGroup.getPredictedInsertSize(), samplename=readGroup.getSampleName(), description=readGroup.getDescription(), stats=statsJson, experiment=experimentJson, biosampleid=readGroup.getBiosampleId(), attributes=json.dumps(readGroup.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertReadGroup(self, readGroup): """ Inserts the specified readGroup into the DB. """ statsJson = json.dumps(protocol.toJsonDict(readGroup.getStats())) experimentJson = json.dumps( protocol.toJsonDict(readGroup.getExperiment())) try: models.Readgroup.create( id=readGroup.getId(), readgroupsetid=readGroup.getParentContainer().getId(), name=readGroup.getLocalId(), predictedinsertedsize=readGroup.getPredictedInsertSize(), samplename=readGroup.getSampleName(), description=readGroup.getDescription(), stats=statsJson, experiment=experimentJson, biosampleid=readGroup.getBiosampleId(), attributes=json.dumps(readGroup.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertReadGroup", "(", "self", ",", "readGroup", ")", ":", "statsJson", "=", "json", ".", "dumps", "(", "protocol", ".", "toJsonDict", "(", "readGroup", ".", "getStats", "(", ")", ")", ")", "experimentJson", "=", "json", ".", "dumps", "(", "protocol", ".", "toJsonDict", "(", "readGroup", ".", "getExperiment", "(", ")", ")", ")", "try", ":", "models", ".", "Readgroup", ".", "create", "(", "id", "=", "readGroup", ".", "getId", "(", ")", ",", "readgroupsetid", "=", "readGroup", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "readGroup", ".", "getLocalId", "(", ")", ",", "predictedinsertedsize", "=", "readGroup", ".", "getPredictedInsertSize", "(", ")", ",", "samplename", "=", "readGroup", ".", "getSampleName", "(", ")", ",", "description", "=", "readGroup", ".", "getDescription", "(", ")", ",", "stats", "=", "statsJson", ",", "experiment", "=", "experimentJson", ",", "biosampleid", "=", "readGroup", ".", "getBiosampleId", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "readGroup", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Inserts the specified readGroup into the DB.
[ "Inserts", "the", "specified", "readGroup", "into", "the", "DB", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L907-L927
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeReadGroupSet
def removeReadGroupSet(self, readGroupSet): """ Removes the specified readGroupSet from this repository. This performs a cascading removal of all items within this readGroupSet. """ for readGroupSetRecord in models.Readgroupset.select().where( models.Readgroupset.id == readGroupSet.getId()): readGroupSetRecord.delete_instance(recursive=True)
python
def removeReadGroupSet(self, readGroupSet): """ Removes the specified readGroupSet from this repository. This performs a cascading removal of all items within this readGroupSet. """ for readGroupSetRecord in models.Readgroupset.select().where( models.Readgroupset.id == readGroupSet.getId()): readGroupSetRecord.delete_instance(recursive=True)
[ "def", "removeReadGroupSet", "(", "self", ",", "readGroupSet", ")", ":", "for", "readGroupSetRecord", "in", "models", ".", "Readgroupset", ".", "select", "(", ")", ".", "where", "(", "models", ".", "Readgroupset", ".", "id", "==", "readGroupSet", ".", "getId", "(", ")", ")", ":", "readGroupSetRecord", ".", "delete_instance", "(", "recursive", "=", "True", ")" ]
Removes the specified readGroupSet from this repository. This performs a cascading removal of all items within this readGroupSet.
[ "Removes", "the", "specified", "readGroupSet", "from", "this", "repository", ".", "This", "performs", "a", "cascading", "removal", "of", "all", "items", "within", "this", "readGroupSet", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L929-L936
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeVariantSet
def removeVariantSet(self, variantSet): """ Removes the specified variantSet from this repository. This performs a cascading removal of all items within this variantSet. """ for variantSetRecord in models.Variantset.select().where( models.Variantset.id == variantSet.getId()): variantSetRecord.delete_instance(recursive=True)
python
def removeVariantSet(self, variantSet): """ Removes the specified variantSet from this repository. This performs a cascading removal of all items within this variantSet. """ for variantSetRecord in models.Variantset.select().where( models.Variantset.id == variantSet.getId()): variantSetRecord.delete_instance(recursive=True)
[ "def", "removeVariantSet", "(", "self", ",", "variantSet", ")", ":", "for", "variantSetRecord", "in", "models", ".", "Variantset", ".", "select", "(", ")", ".", "where", "(", "models", ".", "Variantset", ".", "id", "==", "variantSet", ".", "getId", "(", ")", ")", ":", "variantSetRecord", ".", "delete_instance", "(", "recursive", "=", "True", ")" ]
Removes the specified variantSet from this repository. This performs a cascading removal of all items within this variantSet.
[ "Removes", "the", "specified", "variantSet", "from", "this", "repository", ".", "This", "performs", "a", "cascading", "removal", "of", "all", "items", "within", "this", "variantSet", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L938-L945
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeBiosample
def removeBiosample(self, biosample): """ Removes the specified biosample from this repository. """ q = models.Biosample.delete().where( models.Biosample.id == biosample.getId()) q.execute()
python
def removeBiosample(self, biosample): """ Removes the specified biosample from this repository. """ q = models.Biosample.delete().where( models.Biosample.id == biosample.getId()) q.execute()
[ "def", "removeBiosample", "(", "self", ",", "biosample", ")", ":", "q", "=", "models", ".", "Biosample", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Biosample", ".", "id", "==", "biosample", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")" ]
Removes the specified biosample from this repository.
[ "Removes", "the", "specified", "biosample", "from", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L947-L953
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeIndividual
def removeIndividual(self, individual): """ Removes the specified individual from this repository. """ q = models.Individual.delete().where( models.Individual.id == individual.getId()) q.execute()
python
def removeIndividual(self, individual): """ Removes the specified individual from this repository. """ q = models.Individual.delete().where( models.Individual.id == individual.getId()) q.execute()
[ "def", "removeIndividual", "(", "self", ",", "individual", ")", ":", "q", "=", "models", ".", "Individual", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Individual", ".", "id", "==", "individual", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")" ]
Removes the specified individual from this repository.
[ "Removes", "the", "specified", "individual", "from", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L955-L961
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertReadGroupSet
def insertReadGroupSet(self, readGroupSet): """ Inserts a the specified readGroupSet into this repository. """ programsJson = json.dumps( [protocol.toJsonDict(program) for program in readGroupSet.getPrograms()]) statsJson = json.dumps(protocol.toJsonDict(readGroupSet.getStats())) try: models.Readgroupset.create( id=readGroupSet.getId(), datasetid=readGroupSet.getParentContainer().getId(), referencesetid=readGroupSet.getReferenceSet().getId(), name=readGroupSet.getLocalId(), programs=programsJson, stats=statsJson, dataurl=readGroupSet.getDataUrl(), indexfile=readGroupSet.getIndexFile(), attributes=json.dumps(readGroupSet.getAttributes())) for readGroup in readGroupSet.getReadGroups(): self.insertReadGroup(readGroup) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertReadGroupSet(self, readGroupSet): """ Inserts a the specified readGroupSet into this repository. """ programsJson = json.dumps( [protocol.toJsonDict(program) for program in readGroupSet.getPrograms()]) statsJson = json.dumps(protocol.toJsonDict(readGroupSet.getStats())) try: models.Readgroupset.create( id=readGroupSet.getId(), datasetid=readGroupSet.getParentContainer().getId(), referencesetid=readGroupSet.getReferenceSet().getId(), name=readGroupSet.getLocalId(), programs=programsJson, stats=statsJson, dataurl=readGroupSet.getDataUrl(), indexfile=readGroupSet.getIndexFile(), attributes=json.dumps(readGroupSet.getAttributes())) for readGroup in readGroupSet.getReadGroups(): self.insertReadGroup(readGroup) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertReadGroupSet", "(", "self", ",", "readGroupSet", ")", ":", "programsJson", "=", "json", ".", "dumps", "(", "[", "protocol", ".", "toJsonDict", "(", "program", ")", "for", "program", "in", "readGroupSet", ".", "getPrograms", "(", ")", "]", ")", "statsJson", "=", "json", ".", "dumps", "(", "protocol", ".", "toJsonDict", "(", "readGroupSet", ".", "getStats", "(", ")", ")", ")", "try", ":", "models", ".", "Readgroupset", ".", "create", "(", "id", "=", "readGroupSet", ".", "getId", "(", ")", ",", "datasetid", "=", "readGroupSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "referencesetid", "=", "readGroupSet", ".", "getReferenceSet", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "readGroupSet", ".", "getLocalId", "(", ")", ",", "programs", "=", "programsJson", ",", "stats", "=", "statsJson", ",", "dataurl", "=", "readGroupSet", ".", "getDataUrl", "(", ")", ",", "indexfile", "=", "readGroupSet", ".", "getIndexFile", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "readGroupSet", ".", "getAttributes", "(", ")", ")", ")", "for", "readGroup", "in", "readGroupSet", ".", "getReadGroups", "(", ")", ":", "self", ".", "insertReadGroup", "(", "readGroup", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Inserts a the specified readGroupSet into this repository.
[ "Inserts", "a", "the", "specified", "readGroupSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L978-L1000
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeReferenceSet
def removeReferenceSet(self, referenceSet): """ Removes the specified referenceSet from this repository. This performs a cascading removal of all references within this referenceSet. However, it does not remove any of the ReadGroupSets or items that refer to this ReferenceSet. These must be deleted before the referenceSet can be removed. """ try: q = models.Reference.delete().where( models.Reference.referencesetid == referenceSet.getId()) q.execute() q = models.Referenceset.delete().where( models.Referenceset.id == referenceSet.getId()) q.execute() except Exception: msg = ("Unable to delete reference set. " "There are objects currently in the registry which are " "aligned against it. Remove these objects before removing " "the reference set.") raise exceptions.RepoManagerException(msg)
python
def removeReferenceSet(self, referenceSet): """ Removes the specified referenceSet from this repository. This performs a cascading removal of all references within this referenceSet. However, it does not remove any of the ReadGroupSets or items that refer to this ReferenceSet. These must be deleted before the referenceSet can be removed. """ try: q = models.Reference.delete().where( models.Reference.referencesetid == referenceSet.getId()) q.execute() q = models.Referenceset.delete().where( models.Referenceset.id == referenceSet.getId()) q.execute() except Exception: msg = ("Unable to delete reference set. " "There are objects currently in the registry which are " "aligned against it. Remove these objects before removing " "the reference set.") raise exceptions.RepoManagerException(msg)
[ "def", "removeReferenceSet", "(", "self", ",", "referenceSet", ")", ":", "try", ":", "q", "=", "models", ".", "Reference", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Reference", ".", "referencesetid", "==", "referenceSet", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")", "q", "=", "models", ".", "Referenceset", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Referenceset", ".", "id", "==", "referenceSet", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")", "except", "Exception", ":", "msg", "=", "(", "\"Unable to delete reference set. \"", "\"There are objects currently in the registry which are \"", "\"aligned against it. Remove these objects before removing \"", "\"the reference set.\"", ")", "raise", "exceptions", ".", "RepoManagerException", "(", "msg", ")" ]
Removes the specified referenceSet from this repository. This performs a cascading removal of all references within this referenceSet. However, it does not remove any of the ReadGroupSets or items that refer to this ReferenceSet. These must be deleted before the referenceSet can be removed.
[ "Removes", "the", "specified", "referenceSet", "from", "this", "repository", ".", "This", "performs", "a", "cascading", "removal", "of", "all", "references", "within", "this", "referenceSet", ".", "However", "it", "does", "not", "remove", "any", "of", "the", "ReadGroupSets", "or", "items", "that", "refer", "to", "this", "ReferenceSet", ".", "These", "must", "be", "deleted", "before", "the", "referenceSet", "can", "be", "removed", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1002-L1022
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertVariantAnnotationSet
def insertVariantAnnotationSet(self, variantAnnotationSet): """ Inserts a the specified variantAnnotationSet into this repository. """ analysisJson = json.dumps( protocol.toJsonDict(variantAnnotationSet.getAnalysis())) try: models.Variantannotationset.create( id=variantAnnotationSet.getId(), variantsetid=variantAnnotationSet.getParentContainer().getId(), ontologyid=variantAnnotationSet.getOntology().getId(), name=variantAnnotationSet.getLocalId(), analysis=analysisJson, annotationtype=variantAnnotationSet.getAnnotationType(), created=variantAnnotationSet.getCreationTime(), updated=variantAnnotationSet.getUpdatedTime(), attributes=json.dumps(variantAnnotationSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertVariantAnnotationSet(self, variantAnnotationSet): """ Inserts a the specified variantAnnotationSet into this repository. """ analysisJson = json.dumps( protocol.toJsonDict(variantAnnotationSet.getAnalysis())) try: models.Variantannotationset.create( id=variantAnnotationSet.getId(), variantsetid=variantAnnotationSet.getParentContainer().getId(), ontologyid=variantAnnotationSet.getOntology().getId(), name=variantAnnotationSet.getLocalId(), analysis=analysisJson, annotationtype=variantAnnotationSet.getAnnotationType(), created=variantAnnotationSet.getCreationTime(), updated=variantAnnotationSet.getUpdatedTime(), attributes=json.dumps(variantAnnotationSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertVariantAnnotationSet", "(", "self", ",", "variantAnnotationSet", ")", ":", "analysisJson", "=", "json", ".", "dumps", "(", "protocol", ".", "toJsonDict", "(", "variantAnnotationSet", ".", "getAnalysis", "(", ")", ")", ")", "try", ":", "models", ".", "Variantannotationset", ".", "create", "(", "id", "=", "variantAnnotationSet", ".", "getId", "(", ")", ",", "variantsetid", "=", "variantAnnotationSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "ontologyid", "=", "variantAnnotationSet", ".", "getOntology", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "variantAnnotationSet", ".", "getLocalId", "(", ")", ",", "analysis", "=", "analysisJson", ",", "annotationtype", "=", "variantAnnotationSet", ".", "getAnnotationType", "(", ")", ",", "created", "=", "variantAnnotationSet", ".", "getCreationTime", "(", ")", ",", "updated", "=", "variantAnnotationSet", ".", "getUpdatedTime", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "variantAnnotationSet", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Inserts a the specified variantAnnotationSet into this repository.
[ "Inserts", "a", "the", "specified", "variantAnnotationSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1040-L1058
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertCallSet
def insertCallSet(self, callSet): """ Inserts a the specified callSet into this repository. """ try: models.Callset.create( id=callSet.getId(), name=callSet.getLocalId(), variantsetid=callSet.getParentContainer().getId(), biosampleid=callSet.getBiosampleId(), attributes=json.dumps(callSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertCallSet(self, callSet): """ Inserts a the specified callSet into this repository. """ try: models.Callset.create( id=callSet.getId(), name=callSet.getLocalId(), variantsetid=callSet.getParentContainer().getId(), biosampleid=callSet.getBiosampleId(), attributes=json.dumps(callSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertCallSet", "(", "self", ",", "callSet", ")", ":", "try", ":", "models", ".", "Callset", ".", "create", "(", "id", "=", "callSet", ".", "getId", "(", ")", ",", "name", "=", "callSet", ".", "getLocalId", "(", ")", ",", "variantsetid", "=", "callSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "biosampleid", "=", "callSet", ".", "getBiosampleId", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "callSet", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Inserts a the specified callSet into this repository.
[ "Inserts", "a", "the", "specified", "callSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1076-L1088
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertVariantSet
def insertVariantSet(self, variantSet): """ Inserts a the specified variantSet into this repository. """ # We cheat a little here with the VariantSetMetadata, and encode these # within the table as a JSON dump. These should really be stored in # their own table metadataJson = json.dumps( [protocol.toJsonDict(metadata) for metadata in variantSet.getMetadata()]) urlMapJson = json.dumps(variantSet.getReferenceToDataUrlIndexMap()) try: models.Variantset.create( id=variantSet.getId(), datasetid=variantSet.getParentContainer().getId(), referencesetid=variantSet.getReferenceSet().getId(), name=variantSet.getLocalId(), created=datetime.datetime.now(), updated=datetime.datetime.now(), metadata=metadataJson, dataurlindexmap=urlMapJson, attributes=json.dumps(variantSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e) for callSet in variantSet.getCallSets(): self.insertCallSet(callSet)
python
def insertVariantSet(self, variantSet): """ Inserts a the specified variantSet into this repository. """ # We cheat a little here with the VariantSetMetadata, and encode these # within the table as a JSON dump. These should really be stored in # their own table metadataJson = json.dumps( [protocol.toJsonDict(metadata) for metadata in variantSet.getMetadata()]) urlMapJson = json.dumps(variantSet.getReferenceToDataUrlIndexMap()) try: models.Variantset.create( id=variantSet.getId(), datasetid=variantSet.getParentContainer().getId(), referencesetid=variantSet.getReferenceSet().getId(), name=variantSet.getLocalId(), created=datetime.datetime.now(), updated=datetime.datetime.now(), metadata=metadataJson, dataurlindexmap=urlMapJson, attributes=json.dumps(variantSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e) for callSet in variantSet.getCallSets(): self.insertCallSet(callSet)
[ "def", "insertVariantSet", "(", "self", ",", "variantSet", ")", ":", "# We cheat a little here with the VariantSetMetadata, and encode these", "# within the table as a JSON dump. These should really be stored in", "# their own table", "metadataJson", "=", "json", ".", "dumps", "(", "[", "protocol", ".", "toJsonDict", "(", "metadata", ")", "for", "metadata", "in", "variantSet", ".", "getMetadata", "(", ")", "]", ")", "urlMapJson", "=", "json", ".", "dumps", "(", "variantSet", ".", "getReferenceToDataUrlIndexMap", "(", ")", ")", "try", ":", "models", ".", "Variantset", ".", "create", "(", "id", "=", "variantSet", ".", "getId", "(", ")", ",", "datasetid", "=", "variantSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "referencesetid", "=", "variantSet", ".", "getReferenceSet", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "variantSet", ".", "getLocalId", "(", ")", ",", "created", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ",", "updated", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ",", "metadata", "=", "metadataJson", ",", "dataurlindexmap", "=", "urlMapJson", ",", "attributes", "=", "json", ".", "dumps", "(", "variantSet", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")", "for", "callSet", "in", "variantSet", ".", "getCallSets", "(", ")", ":", "self", ".", "insertCallSet", "(", "callSet", ")" ]
Inserts a the specified variantSet into this repository.
[ "Inserts", "a", "the", "specified", "variantSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1102-L1127
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertFeatureSet
def insertFeatureSet(self, featureSet): """ Inserts a the specified featureSet into this repository. """ # TODO add support for info and sourceUri fields. try: models.Featureset.create( id=featureSet.getId(), datasetid=featureSet.getParentContainer().getId(), referencesetid=featureSet.getReferenceSet().getId(), ontologyid=featureSet.getOntology().getId(), name=featureSet.getLocalId(), dataurl=featureSet.getDataUrl(), attributes=json.dumps(featureSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertFeatureSet(self, featureSet): """ Inserts a the specified featureSet into this repository. """ # TODO add support for info and sourceUri fields. try: models.Featureset.create( id=featureSet.getId(), datasetid=featureSet.getParentContainer().getId(), referencesetid=featureSet.getReferenceSet().getId(), ontologyid=featureSet.getOntology().getId(), name=featureSet.getLocalId(), dataurl=featureSet.getDataUrl(), attributes=json.dumps(featureSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertFeatureSet", "(", "self", ",", "featureSet", ")", ":", "# TODO add support for info and sourceUri fields.", "try", ":", "models", ".", "Featureset", ".", "create", "(", "id", "=", "featureSet", ".", "getId", "(", ")", ",", "datasetid", "=", "featureSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "referencesetid", "=", "featureSet", ".", "getReferenceSet", "(", ")", ".", "getId", "(", ")", ",", "ontologyid", "=", "featureSet", ".", "getOntology", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "featureSet", ".", "getLocalId", "(", ")", ",", "dataurl", "=", "featureSet", ".", "getDataUrl", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "featureSet", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Inserts a the specified featureSet into this repository.
[ "Inserts", "a", "the", "specified", "featureSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1145-L1160
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertContinuousSet
def insertContinuousSet(self, continuousSet): """ Inserts a the specified continuousSet into this repository. """ # TODO add support for info and sourceUri fields. try: models.ContinuousSet.create( id=continuousSet.getId(), datasetid=continuousSet.getParentContainer().getId(), referencesetid=continuousSet.getReferenceSet().getId(), name=continuousSet.getLocalId(), dataurl=continuousSet.getDataUrl(), attributes=json.dumps(continuousSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertContinuousSet(self, continuousSet): """ Inserts a the specified continuousSet into this repository. """ # TODO add support for info and sourceUri fields. try: models.ContinuousSet.create( id=continuousSet.getId(), datasetid=continuousSet.getParentContainer().getId(), referencesetid=continuousSet.getReferenceSet().getId(), name=continuousSet.getLocalId(), dataurl=continuousSet.getDataUrl(), attributes=json.dumps(continuousSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertContinuousSet", "(", "self", ",", "continuousSet", ")", ":", "# TODO add support for info and sourceUri fields.", "try", ":", "models", ".", "ContinuousSet", ".", "create", "(", "id", "=", "continuousSet", ".", "getId", "(", ")", ",", "datasetid", "=", "continuousSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "referencesetid", "=", "continuousSet", ".", "getReferenceSet", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "continuousSet", ".", "getLocalId", "(", ")", ",", "dataurl", "=", "continuousSet", ".", "getDataUrl", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "continuousSet", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Inserts a the specified continuousSet into this repository.
[ "Inserts", "a", "the", "specified", "continuousSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1186-L1200
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertBiosample
def insertBiosample(self, biosample): """ Inserts the specified Biosample into this repository. """ try: models.Biosample.create( id=biosample.getId(), datasetid=biosample.getParentContainer().getId(), name=biosample.getLocalId(), description=biosample.getDescription(), disease=json.dumps(biosample.getDisease()), created=biosample.getCreated(), updated=biosample.getUpdated(), individualid=biosample.getIndividualId(), attributes=json.dumps(biosample.getAttributes()), individualAgeAtCollection=json.dumps( biosample.getIndividualAgeAtCollection())) except Exception: raise exceptions.DuplicateNameException( biosample.getLocalId(), biosample.getParentContainer().getLocalId())
python
def insertBiosample(self, biosample): """ Inserts the specified Biosample into this repository. """ try: models.Biosample.create( id=biosample.getId(), datasetid=biosample.getParentContainer().getId(), name=biosample.getLocalId(), description=biosample.getDescription(), disease=json.dumps(biosample.getDisease()), created=biosample.getCreated(), updated=biosample.getUpdated(), individualid=biosample.getIndividualId(), attributes=json.dumps(biosample.getAttributes()), individualAgeAtCollection=json.dumps( biosample.getIndividualAgeAtCollection())) except Exception: raise exceptions.DuplicateNameException( biosample.getLocalId(), biosample.getParentContainer().getLocalId())
[ "def", "insertBiosample", "(", "self", ",", "biosample", ")", ":", "try", ":", "models", ".", "Biosample", ".", "create", "(", "id", "=", "biosample", ".", "getId", "(", ")", ",", "datasetid", "=", "biosample", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "biosample", ".", "getLocalId", "(", ")", ",", "description", "=", "biosample", ".", "getDescription", "(", ")", ",", "disease", "=", "json", ".", "dumps", "(", "biosample", ".", "getDisease", "(", ")", ")", ",", "created", "=", "biosample", ".", "getCreated", "(", ")", ",", "updated", "=", "biosample", ".", "getUpdated", "(", ")", ",", "individualid", "=", "biosample", ".", "getIndividualId", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "biosample", ".", "getAttributes", "(", ")", ")", ",", "individualAgeAtCollection", "=", "json", ".", "dumps", "(", "biosample", ".", "getIndividualAgeAtCollection", "(", ")", ")", ")", "except", "Exception", ":", "raise", "exceptions", ".", "DuplicateNameException", "(", "biosample", ".", "getLocalId", "(", ")", ",", "biosample", ".", "getParentContainer", "(", ")", ".", "getLocalId", "(", ")", ")" ]
Inserts the specified Biosample into this repository.
[ "Inserts", "the", "specified", "Biosample", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1217-L1237
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertIndividual
def insertIndividual(self, individual): """ Inserts the specified individual into this repository. """ try: models.Individual.create( id=individual.getId(), datasetId=individual.getParentContainer().getId(), name=individual.getLocalId(), description=individual.getDescription(), created=individual.getCreated(), updated=individual.getUpdated(), species=json.dumps(individual.getSpecies()), sex=json.dumps(individual.getSex()), attributes=json.dumps(individual.getAttributes())) except Exception: raise exceptions.DuplicateNameException( individual.getLocalId(), individual.getParentContainer().getLocalId())
python
def insertIndividual(self, individual): """ Inserts the specified individual into this repository. """ try: models.Individual.create( id=individual.getId(), datasetId=individual.getParentContainer().getId(), name=individual.getLocalId(), description=individual.getDescription(), created=individual.getCreated(), updated=individual.getUpdated(), species=json.dumps(individual.getSpecies()), sex=json.dumps(individual.getSex()), attributes=json.dumps(individual.getAttributes())) except Exception: raise exceptions.DuplicateNameException( individual.getLocalId(), individual.getParentContainer().getLocalId())
[ "def", "insertIndividual", "(", "self", ",", "individual", ")", ":", "try", ":", "models", ".", "Individual", ".", "create", "(", "id", "=", "individual", ".", "getId", "(", ")", ",", "datasetId", "=", "individual", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "individual", ".", "getLocalId", "(", ")", ",", "description", "=", "individual", ".", "getDescription", "(", ")", ",", "created", "=", "individual", ".", "getCreated", "(", ")", ",", "updated", "=", "individual", ".", "getUpdated", "(", ")", ",", "species", "=", "json", ".", "dumps", "(", "individual", ".", "getSpecies", "(", ")", ")", ",", "sex", "=", "json", ".", "dumps", "(", "individual", ".", "getSex", "(", ")", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "individual", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", ":", "raise", "exceptions", ".", "DuplicateNameException", "(", "individual", ".", "getLocalId", "(", ")", ",", "individual", ".", "getParentContainer", "(", ")", ".", "getLocalId", "(", ")", ")" ]
Inserts the specified individual into this repository.
[ "Inserts", "the", "specified", "individual", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1251-L1269
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertPhenotypeAssociationSet
def insertPhenotypeAssociationSet(self, phenotypeAssociationSet): """ Inserts the specified phenotype annotation set into this repository. """ datasetId = phenotypeAssociationSet.getParentContainer().getId() attributes = json.dumps(phenotypeAssociationSet.getAttributes()) try: models.Phenotypeassociationset.create( id=phenotypeAssociationSet.getId(), name=phenotypeAssociationSet.getLocalId(), datasetid=datasetId, dataurl=phenotypeAssociationSet._dataUrl, attributes=attributes) except Exception: raise exceptions.DuplicateNameException( phenotypeAssociationSet.getParentContainer().getId())
python
def insertPhenotypeAssociationSet(self, phenotypeAssociationSet): """ Inserts the specified phenotype annotation set into this repository. """ datasetId = phenotypeAssociationSet.getParentContainer().getId() attributes = json.dumps(phenotypeAssociationSet.getAttributes()) try: models.Phenotypeassociationset.create( id=phenotypeAssociationSet.getId(), name=phenotypeAssociationSet.getLocalId(), datasetid=datasetId, dataurl=phenotypeAssociationSet._dataUrl, attributes=attributes) except Exception: raise exceptions.DuplicateNameException( phenotypeAssociationSet.getParentContainer().getId())
[ "def", "insertPhenotypeAssociationSet", "(", "self", ",", "phenotypeAssociationSet", ")", ":", "datasetId", "=", "phenotypeAssociationSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", "attributes", "=", "json", ".", "dumps", "(", "phenotypeAssociationSet", ".", "getAttributes", "(", ")", ")", "try", ":", "models", ".", "Phenotypeassociationset", ".", "create", "(", "id", "=", "phenotypeAssociationSet", ".", "getId", "(", ")", ",", "name", "=", "phenotypeAssociationSet", ".", "getLocalId", "(", ")", ",", "datasetid", "=", "datasetId", ",", "dataurl", "=", "phenotypeAssociationSet", ".", "_dataUrl", ",", "attributes", "=", "attributes", ")", "except", "Exception", ":", "raise", "exceptions", ".", "DuplicateNameException", "(", "phenotypeAssociationSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ")" ]
Inserts the specified phenotype annotation set into this repository.
[ "Inserts", "the", "specified", "phenotype", "annotation", "set", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1286-L1301
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertRnaQuantificationSet
def insertRnaQuantificationSet(self, rnaQuantificationSet): """ Inserts a the specified rnaQuantificationSet into this repository. """ try: models.Rnaquantificationset.create( id=rnaQuantificationSet.getId(), datasetid=rnaQuantificationSet.getParentContainer().getId(), referencesetid=rnaQuantificationSet.getReferenceSet().getId(), name=rnaQuantificationSet.getLocalId(), dataurl=rnaQuantificationSet.getDataUrl(), attributes=json.dumps(rnaQuantificationSet.getAttributes())) except Exception: raise exceptions.DuplicateNameException( rnaQuantificationSet.getLocalId(), rnaQuantificationSet.getParentContainer().getLocalId())
python
def insertRnaQuantificationSet(self, rnaQuantificationSet): """ Inserts a the specified rnaQuantificationSet into this repository. """ try: models.Rnaquantificationset.create( id=rnaQuantificationSet.getId(), datasetid=rnaQuantificationSet.getParentContainer().getId(), referencesetid=rnaQuantificationSet.getReferenceSet().getId(), name=rnaQuantificationSet.getLocalId(), dataurl=rnaQuantificationSet.getDataUrl(), attributes=json.dumps(rnaQuantificationSet.getAttributes())) except Exception: raise exceptions.DuplicateNameException( rnaQuantificationSet.getLocalId(), rnaQuantificationSet.getParentContainer().getLocalId())
[ "def", "insertRnaQuantificationSet", "(", "self", ",", "rnaQuantificationSet", ")", ":", "try", ":", "models", ".", "Rnaquantificationset", ".", "create", "(", "id", "=", "rnaQuantificationSet", ".", "getId", "(", ")", ",", "datasetid", "=", "rnaQuantificationSet", ".", "getParentContainer", "(", ")", ".", "getId", "(", ")", ",", "referencesetid", "=", "rnaQuantificationSet", ".", "getReferenceSet", "(", ")", ".", "getId", "(", ")", ",", "name", "=", "rnaQuantificationSet", ".", "getLocalId", "(", ")", ",", "dataurl", "=", "rnaQuantificationSet", ".", "getDataUrl", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "rnaQuantificationSet", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", ":", "raise", "exceptions", ".", "DuplicateNameException", "(", "rnaQuantificationSet", ".", "getLocalId", "(", ")", ",", "rnaQuantificationSet", ".", "getParentContainer", "(", ")", ".", "getLocalId", "(", ")", ")" ]
Inserts a the specified rnaQuantificationSet into this repository.
[ "Inserts", "a", "the", "specified", "rnaQuantificationSet", "into", "this", "repository", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1313-L1328
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removeRnaQuantificationSet
def removeRnaQuantificationSet(self, rnaQuantificationSet): """ Removes the specified rnaQuantificationSet from this repository. This performs a cascading removal of all items within this rnaQuantificationSet. """ q = models.Rnaquantificationset.delete().where( models.Rnaquantificationset.id == rnaQuantificationSet.getId()) q.execute()
python
def removeRnaQuantificationSet(self, rnaQuantificationSet): """ Removes the specified rnaQuantificationSet from this repository. This performs a cascading removal of all items within this rnaQuantificationSet. """ q = models.Rnaquantificationset.delete().where( models.Rnaquantificationset.id == rnaQuantificationSet.getId()) q.execute()
[ "def", "removeRnaQuantificationSet", "(", "self", ",", "rnaQuantificationSet", ")", ":", "q", "=", "models", ".", "Rnaquantificationset", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Rnaquantificationset", ".", "id", "==", "rnaQuantificationSet", ".", "getId", "(", ")", ")", "q", ".", "execute", "(", ")" ]
Removes the specified rnaQuantificationSet from this repository. This performs a cascading removal of all items within this rnaQuantificationSet.
[ "Removes", "the", "specified", "rnaQuantificationSet", "from", "this", "repository", ".", "This", "performs", "a", "cascading", "removal", "of", "all", "items", "within", "this", "rnaQuantificationSet", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1343-L1351
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.insertPeer
def insertPeer(self, peer): """ Accepts a peer datamodel object and adds it to the registry. """ try: models.Peer.create( url=peer.getUrl(), attributes=json.dumps(peer.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
python
def insertPeer(self, peer): """ Accepts a peer datamodel object and adds it to the registry. """ try: models.Peer.create( url=peer.getUrl(), attributes=json.dumps(peer.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e)
[ "def", "insertPeer", "(", "self", ",", "peer", ")", ":", "try", ":", "models", ".", "Peer", ".", "create", "(", "url", "=", "peer", ".", "getUrl", "(", ")", ",", "attributes", "=", "json", ".", "dumps", "(", "peer", ".", "getAttributes", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "exceptions", ".", "RepoManagerException", "(", "e", ")" ]
Accepts a peer datamodel object and adds it to the registry.
[ "Accepts", "a", "peer", "datamodel", "object", "and", "adds", "it", "to", "the", "registry", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1353-L1362
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.removePeer
def removePeer(self, url): """ Remove peers by URL. """ q = models.Peer.delete().where( models.Peer.url == url) q.execute()
python
def removePeer(self, url): """ Remove peers by URL. """ q = models.Peer.delete().where( models.Peer.url == url) q.execute()
[ "def", "removePeer", "(", "self", ",", "url", ")", ":", "q", "=", "models", ".", "Peer", ".", "delete", "(", ")", ".", "where", "(", "models", ".", "Peer", ".", "url", "==", "url", ")", "q", ".", "execute", "(", ")" ]
Remove peers by URL.
[ "Remove", "peers", "by", "URL", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1364-L1370
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.initialise
def initialise(self): """ Initialise this data repository, creating any necessary directories and file paths. """ self._checkWriteMode() self._createSystemTable() self._createNetworkTables() self._createOntologyTable() self._createReferenceSetTable() self._createReferenceTable() self._createDatasetTable() self._createReadGroupSetTable() self._createReadGroupTable() self._createCallSetTable() self._createVariantSetTable() self._createVariantAnnotationSetTable() self._createFeatureSetTable() self._createContinuousSetTable() self._createBiosampleTable() self._createIndividualTable() self._createPhenotypeAssociationSetTable() self._createRnaQuantificationSetTable()
python
def initialise(self): """ Initialise this data repository, creating any necessary directories and file paths. """ self._checkWriteMode() self._createSystemTable() self._createNetworkTables() self._createOntologyTable() self._createReferenceSetTable() self._createReferenceTable() self._createDatasetTable() self._createReadGroupSetTable() self._createReadGroupTable() self._createCallSetTable() self._createVariantSetTable() self._createVariantAnnotationSetTable() self._createFeatureSetTable() self._createContinuousSetTable() self._createBiosampleTable() self._createIndividualTable() self._createPhenotypeAssociationSetTable() self._createRnaQuantificationSetTable()
[ "def", "initialise", "(", "self", ")", ":", "self", ".", "_checkWriteMode", "(", ")", "self", ".", "_createSystemTable", "(", ")", "self", ".", "_createNetworkTables", "(", ")", "self", ".", "_createOntologyTable", "(", ")", "self", ".", "_createReferenceSetTable", "(", ")", "self", ".", "_createReferenceTable", "(", ")", "self", ".", "_createDatasetTable", "(", ")", "self", ".", "_createReadGroupSetTable", "(", ")", "self", ".", "_createReadGroupTable", "(", ")", "self", ".", "_createCallSetTable", "(", ")", "self", ".", "_createVariantSetTable", "(", ")", "self", ".", "_createVariantAnnotationSetTable", "(", ")", "self", ".", "_createFeatureSetTable", "(", ")", "self", ".", "_createContinuousSetTable", "(", ")", "self", ".", "_createBiosampleTable", "(", ")", "self", ".", "_createIndividualTable", "(", ")", "self", ".", "_createPhenotypeAssociationSetTable", "(", ")", "self", ".", "_createRnaQuantificationSetTable", "(", ")" ]
Initialise this data repository, creating any necessary directories and file paths.
[ "Initialise", "this", "data", "repository", "creating", "any", "necessary", "directories", "and", "file", "paths", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1377-L1399
ga4gh/ga4gh-server
ga4gh/server/datarepo.py
SqlDataRepository.load
def load(self): """ Loads this data repository into memory. """ self._readSystemTable() self._readOntologyTable() self._readReferenceSetTable() self._readReferenceTable() self._readDatasetTable() self._readReadGroupSetTable() self._readReadGroupTable() self._readVariantSetTable() self._readCallSetTable() self._readVariantAnnotationSetTable() self._readFeatureSetTable() self._readContinuousSetTable() self._readBiosampleTable() self._readIndividualTable() self._readPhenotypeAssociationSetTable() self._readRnaQuantificationSetTable()
python
def load(self): """ Loads this data repository into memory. """ self._readSystemTable() self._readOntologyTable() self._readReferenceSetTable() self._readReferenceTable() self._readDatasetTable() self._readReadGroupSetTable() self._readReadGroupTable() self._readVariantSetTable() self._readCallSetTable() self._readVariantAnnotationSetTable() self._readFeatureSetTable() self._readContinuousSetTable() self._readBiosampleTable() self._readIndividualTable() self._readPhenotypeAssociationSetTable() self._readRnaQuantificationSetTable()
[ "def", "load", "(", "self", ")", ":", "self", ".", "_readSystemTable", "(", ")", "self", ".", "_readOntologyTable", "(", ")", "self", ".", "_readReferenceSetTable", "(", ")", "self", ".", "_readReferenceTable", "(", ")", "self", ".", "_readDatasetTable", "(", ")", "self", ".", "_readReadGroupSetTable", "(", ")", "self", ".", "_readReadGroupTable", "(", ")", "self", ".", "_readVariantSetTable", "(", ")", "self", ".", "_readCallSetTable", "(", ")", "self", ".", "_readVariantAnnotationSetTable", "(", ")", "self", ".", "_readFeatureSetTable", "(", ")", "self", ".", "_readContinuousSetTable", "(", ")", "self", ".", "_readBiosampleTable", "(", ")", "self", ".", "_readIndividualTable", "(", ")", "self", ".", "_readPhenotypeAssociationSetTable", "(", ")", "self", ".", "_readRnaQuantificationSetTable", "(", ")" ]
Loads this data repository into memory.
[ "Loads", "this", "data", "repository", "into", "memory", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datarepo.py#L1421-L1440
ga4gh/ga4gh-server
ga4gh/server/datamodel/genotype_phenotype_featureset.py
PhenotypeAssociationFeatureSet.populateFromRow
def populateFromRow(self, featureSetRecord): """ Populates the instance variables of this FeatureSet from the specified DB row. """ self._dbFilePath = featureSetRecord.dataurl self.setAttributesJson(featureSetRecord.attributes) self.populateFromFile(self._dbFilePath)
python
def populateFromRow(self, featureSetRecord): """ Populates the instance variables of this FeatureSet from the specified DB row. """ self._dbFilePath = featureSetRecord.dataurl self.setAttributesJson(featureSetRecord.attributes) self.populateFromFile(self._dbFilePath)
[ "def", "populateFromRow", "(", "self", ",", "featureSetRecord", ")", ":", "self", ".", "_dbFilePath", "=", "featureSetRecord", ".", "dataurl", "self", ".", "setAttributesJson", "(", "featureSetRecord", ".", "attributes", ")", "self", ".", "populateFromFile", "(", "self", ".", "_dbFilePath", ")" ]
Populates the instance variables of this FeatureSet from the specified DB row.
[ "Populates", "the", "instance", "variables", "of", "this", "FeatureSet", "from", "the", "specified", "DB", "row", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L48-L55
ga4gh/ga4gh-server
ga4gh/server/datamodel/genotype_phenotype_featureset.py
PhenotypeAssociationFeatureSet.populateFromFile
def populateFromFile(self, dataUrl): """ Populates the instance variables of this FeatureSet from the specified data URL. Initialize dataset, using the passed dict of sources [{source,format}] see rdflib.parse() for more If path is set, this backend will load itself """ self._dbFilePath = dataUrl # initialize graph self._rdfGraph = rdflib.ConjunctiveGraph() # save the path self._dataUrl = dataUrl self._scanDataFiles(self._dataUrl, ['*.ttl']) # extract version cgdTTL = rdflib.URIRef("http://data.monarchinitiative.org/ttl/cgd.ttl") versionInfo = rdflib.URIRef( u'http://www.w3.org/2002/07/owl#versionInfo') self._version = None for _, _, obj in self._rdfGraph.triples((cgdTTL, versionInfo, None)): self._version = obj.toPython() # setup location cache self._initializeLocationCache()
python
def populateFromFile(self, dataUrl): """ Populates the instance variables of this FeatureSet from the specified data URL. Initialize dataset, using the passed dict of sources [{source,format}] see rdflib.parse() for more If path is set, this backend will load itself """ self._dbFilePath = dataUrl # initialize graph self._rdfGraph = rdflib.ConjunctiveGraph() # save the path self._dataUrl = dataUrl self._scanDataFiles(self._dataUrl, ['*.ttl']) # extract version cgdTTL = rdflib.URIRef("http://data.monarchinitiative.org/ttl/cgd.ttl") versionInfo = rdflib.URIRef( u'http://www.w3.org/2002/07/owl#versionInfo') self._version = None for _, _, obj in self._rdfGraph.triples((cgdTTL, versionInfo, None)): self._version = obj.toPython() # setup location cache self._initializeLocationCache()
[ "def", "populateFromFile", "(", "self", ",", "dataUrl", ")", ":", "self", ".", "_dbFilePath", "=", "dataUrl", "# initialize graph", "self", ".", "_rdfGraph", "=", "rdflib", ".", "ConjunctiveGraph", "(", ")", "# save the path", "self", ".", "_dataUrl", "=", "dataUrl", "self", ".", "_scanDataFiles", "(", "self", ".", "_dataUrl", ",", "[", "'*.ttl'", "]", ")", "# extract version", "cgdTTL", "=", "rdflib", ".", "URIRef", "(", "\"http://data.monarchinitiative.org/ttl/cgd.ttl\"", ")", "versionInfo", "=", "rdflib", ".", "URIRef", "(", "u'http://www.w3.org/2002/07/owl#versionInfo'", ")", "self", ".", "_version", "=", "None", "for", "_", ",", "_", ",", "obj", "in", "self", ".", "_rdfGraph", ".", "triples", "(", "(", "cgdTTL", ",", "versionInfo", ",", "None", ")", ")", ":", "self", ".", "_version", "=", "obj", ".", "toPython", "(", ")", "# setup location cache", "self", ".", "_initializeLocationCache", "(", ")" ]
Populates the instance variables of this FeatureSet from the specified data URL. Initialize dataset, using the passed dict of sources [{source,format}] see rdflib.parse() for more If path is set, this backend will load itself
[ "Populates", "the", "instance", "variables", "of", "this", "FeatureSet", "from", "the", "specified", "data", "URL", ".", "Initialize", "dataset", "using", "the", "passed", "dict", "of", "sources", "[", "{", "source", "format", "}", "]", "see", "rdflib", ".", "parse", "()", "for", "more", "If", "path", "is", "set", "this", "backend", "will", "load", "itself" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L57-L81
ga4gh/ga4gh-server
ga4gh/server/datamodel/genotype_phenotype_featureset.py
PhenotypeAssociationFeatureSet.getFeature
def getFeature(self, compoundId): """ find a feature and return ga4gh representation, use compoundId as featureId """ feature = self._getFeatureById(compoundId.featureId) feature.id = str(compoundId) return feature
python
def getFeature(self, compoundId): """ find a feature and return ga4gh representation, use compoundId as featureId """ feature = self._getFeatureById(compoundId.featureId) feature.id = str(compoundId) return feature
[ "def", "getFeature", "(", "self", ",", "compoundId", ")", ":", "feature", "=", "self", ".", "_getFeatureById", "(", "compoundId", ".", "featureId", ")", "feature", ".", "id", "=", "str", "(", "compoundId", ")", "return", "feature" ]
find a feature and return ga4gh representation, use compoundId as featureId
[ "find", "a", "feature", "and", "return", "ga4gh", "representation", "use", "compoundId", "as", "featureId" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L84-L91
ga4gh/ga4gh-server
ga4gh/server/datamodel/genotype_phenotype_featureset.py
PhenotypeAssociationFeatureSet._getFeatureById
def _getFeatureById(self, featureId): """ find a feature and return ga4gh representation, use 'native' id as featureId """ featureRef = rdflib.URIRef(featureId) featureDetails = self._detailTuples([featureRef]) feature = {} for detail in featureDetails: feature[detail['predicate']] = [] for detail in featureDetails: feature[detail['predicate']].append(detail['object']) pbFeature = protocol.Feature() term = protocol.OntologyTerm() # Schema for feature only supports one type of `type` # here we default to first OBO defined for featureType in sorted(feature[TYPE]): if "obolibrary" in featureType: term.term = self._featureTypeLabel(featureType) term.term_id = featureType pbFeature.feature_type.MergeFrom(term) break pbFeature.id = featureId # Schema for feature only supports one type of `name` `symbol` # here we default to shortest for symbol and longest for name feature[LABEL].sort(key=len) pbFeature.gene_symbol = feature[LABEL][0] pbFeature.name = feature[LABEL][-1] pbFeature.attributes.MergeFrom(protocol.Attributes()) for key in feature: for val in sorted(feature[key]): pbFeature.attributes.attr[key].values.add().string_value = val if featureId in self._locationMap: location = self._locationMap[featureId] pbFeature.reference_name = location["chromosome"] pbFeature.start = location["begin"] pbFeature.end = location["end"] return pbFeature
python
def _getFeatureById(self, featureId): """ find a feature and return ga4gh representation, use 'native' id as featureId """ featureRef = rdflib.URIRef(featureId) featureDetails = self._detailTuples([featureRef]) feature = {} for detail in featureDetails: feature[detail['predicate']] = [] for detail in featureDetails: feature[detail['predicate']].append(detail['object']) pbFeature = protocol.Feature() term = protocol.OntologyTerm() # Schema for feature only supports one type of `type` # here we default to first OBO defined for featureType in sorted(feature[TYPE]): if "obolibrary" in featureType: term.term = self._featureTypeLabel(featureType) term.term_id = featureType pbFeature.feature_type.MergeFrom(term) break pbFeature.id = featureId # Schema for feature only supports one type of `name` `symbol` # here we default to shortest for symbol and longest for name feature[LABEL].sort(key=len) pbFeature.gene_symbol = feature[LABEL][0] pbFeature.name = feature[LABEL][-1] pbFeature.attributes.MergeFrom(protocol.Attributes()) for key in feature: for val in sorted(feature[key]): pbFeature.attributes.attr[key].values.add().string_value = val if featureId in self._locationMap: location = self._locationMap[featureId] pbFeature.reference_name = location["chromosome"] pbFeature.start = location["begin"] pbFeature.end = location["end"] return pbFeature
[ "def", "_getFeatureById", "(", "self", ",", "featureId", ")", ":", "featureRef", "=", "rdflib", ".", "URIRef", "(", "featureId", ")", "featureDetails", "=", "self", ".", "_detailTuples", "(", "[", "featureRef", "]", ")", "feature", "=", "{", "}", "for", "detail", "in", "featureDetails", ":", "feature", "[", "detail", "[", "'predicate'", "]", "]", "=", "[", "]", "for", "detail", "in", "featureDetails", ":", "feature", "[", "detail", "[", "'predicate'", "]", "]", ".", "append", "(", "detail", "[", "'object'", "]", ")", "pbFeature", "=", "protocol", ".", "Feature", "(", ")", "term", "=", "protocol", ".", "OntologyTerm", "(", ")", "# Schema for feature only supports one type of `type`", "# here we default to first OBO defined", "for", "featureType", "in", "sorted", "(", "feature", "[", "TYPE", "]", ")", ":", "if", "\"obolibrary\"", "in", "featureType", ":", "term", ".", "term", "=", "self", ".", "_featureTypeLabel", "(", "featureType", ")", "term", ".", "term_id", "=", "featureType", "pbFeature", ".", "feature_type", ".", "MergeFrom", "(", "term", ")", "break", "pbFeature", ".", "id", "=", "featureId", "# Schema for feature only supports one type of `name` `symbol`", "# here we default to shortest for symbol and longest for name", "feature", "[", "LABEL", "]", ".", "sort", "(", "key", "=", "len", ")", "pbFeature", ".", "gene_symbol", "=", "feature", "[", "LABEL", "]", "[", "0", "]", "pbFeature", ".", "name", "=", "feature", "[", "LABEL", "]", "[", "-", "1", "]", "pbFeature", ".", "attributes", ".", "MergeFrom", "(", "protocol", ".", "Attributes", "(", ")", ")", "for", "key", "in", "feature", ":", "for", "val", "in", "sorted", "(", "feature", "[", "key", "]", ")", ":", "pbFeature", ".", "attributes", ".", "attr", "[", "key", "]", ".", "values", ".", "add", "(", ")", ".", "string_value", "=", "val", "if", "featureId", "in", "self", ".", "_locationMap", ":", "location", "=", "self", ".", "_locationMap", "[", "featureId", "]", "pbFeature", ".", "reference_name", "=", "location", "[", "\"chromosome\"", "]", "pbFeature", ".", "start", "=", "location", "[", "\"begin\"", "]", "pbFeature", ".", "end", "=", "location", "[", "\"end\"", "]", "return", "pbFeature" ]
find a feature and return ga4gh representation, use 'native' id as featureId
[ "find", "a", "feature", "and", "return", "ga4gh", "representation", "use", "native", "id", "as", "featureId" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L93-L137
ga4gh/ga4gh-server
ga4gh/server/datamodel/genotype_phenotype_featureset.py
PhenotypeAssociationFeatureSet._filterSearchFeaturesRequest
def _filterSearchFeaturesRequest(self, reference_name, gene_symbol, name, start, end): """ formulate a sparql query string based on parameters """ filters = [] query = self._baseQuery() filters = [] location = self._findLocation(reference_name, start, end) if location: filters.append("?feature = <{}>".format(location)) if gene_symbol: filters.append('regex(?feature_label, "{}")') if name: filters.append( 'regex(?feature_label, "{}")'.format(name)) # apply filters filter = "FILTER ({})".format(' && '.join(filters)) if len(filters) == 0: filter = "" query = query.replace("#%FILTER%", filter) return query
python
def _filterSearchFeaturesRequest(self, reference_name, gene_symbol, name, start, end): """ formulate a sparql query string based on parameters """ filters = [] query = self._baseQuery() filters = [] location = self._findLocation(reference_name, start, end) if location: filters.append("?feature = <{}>".format(location)) if gene_symbol: filters.append('regex(?feature_label, "{}")') if name: filters.append( 'regex(?feature_label, "{}")'.format(name)) # apply filters filter = "FILTER ({})".format(' && '.join(filters)) if len(filters) == 0: filter = "" query = query.replace("#%FILTER%", filter) return query
[ "def", "_filterSearchFeaturesRequest", "(", "self", ",", "reference_name", ",", "gene_symbol", ",", "name", ",", "start", ",", "end", ")", ":", "filters", "=", "[", "]", "query", "=", "self", ".", "_baseQuery", "(", ")", "filters", "=", "[", "]", "location", "=", "self", ".", "_findLocation", "(", "reference_name", ",", "start", ",", "end", ")", "if", "location", ":", "filters", ".", "append", "(", "\"?feature = <{}>\"", ".", "format", "(", "location", ")", ")", "if", "gene_symbol", ":", "filters", ".", "append", "(", "'regex(?feature_label, \"{}\")'", ")", "if", "name", ":", "filters", ".", "append", "(", "'regex(?feature_label, \"{}\")'", ".", "format", "(", "name", ")", ")", "# apply filters", "filter", "=", "\"FILTER ({})\"", ".", "format", "(", "' && '", ".", "join", "(", "filters", ")", ")", "if", "len", "(", "filters", ")", "==", "0", ":", "filter", "=", "\"\"", "query", "=", "query", ".", "replace", "(", "\"#%FILTER%\"", ",", "filter", ")", "return", "query" ]
formulate a sparql query string based on parameters
[ "formulate", "a", "sparql", "query", "string", "based", "on", "parameters" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L184-L205
ga4gh/ga4gh-server
ga4gh/server/datamodel/genotype_phenotype_featureset.py
PhenotypeAssociationFeatureSet._findLocation
def _findLocation(self, reference_name, start, end): """ return a location key form the locationMap """ try: # TODO - sequence_annotations does not have build? return self._locationMap['hg19'][reference_name][start][end] except: return None
python
def _findLocation(self, reference_name, start, end): """ return a location key form the locationMap """ try: # TODO - sequence_annotations does not have build? return self._locationMap['hg19'][reference_name][start][end] except: return None
[ "def", "_findLocation", "(", "self", ",", "reference_name", ",", "start", ",", "end", ")", ":", "try", ":", "# TODO - sequence_annotations does not have build?", "return", "self", ".", "_locationMap", "[", "'hg19'", "]", "[", "reference_name", "]", "[", "start", "]", "[", "end", "]", "except", ":", "return", "None" ]
return a location key form the locationMap
[ "return", "a", "location", "key", "form", "the", "locationMap" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L207-L215
ga4gh/ga4gh-server
ga4gh/server/datamodel/genotype_phenotype_featureset.py
PhenotypeAssociationFeatureSet._initializeLocationCache
def _initializeLocationCache(self): """ CGD uses Faldo ontology for locations, it's a bit complicated. This function sets up an in memory cache of all locations, which can be queried via: locationMap[build][chromosome][begin][end] = location["_id"] """ # cache of locations self._locationMap = {} locationMap = self._locationMap triples = self._rdfGraph.triples Ref = rdflib.URIRef associations = [] for subj, _, _ in triples((None, RDF.type, Ref(ASSOCIATION))): associations.append(subj.toPython()) locationIds = [] for association in associations: for _, _, obj in triples((Ref(association), Ref(HAS_SUBJECT), None)): locationIds.append(obj.toPython()) locations = [] for _id in locationIds: location = {} location["_id"] = _id for subj, predicate, obj in triples((Ref(location["_id"]), None, None)): if not predicate.toPython() in location: location[predicate.toPython()] = [] bisect.insort(location[predicate.toPython()], obj.toPython()) if FALDO_LOCATION in location: locations.append(location) for location in locations: for _id in location[FALDO_LOCATION]: # lookup faldo region, ensure positions are sorted faldoLocation = {} faldoLocation["_id"] = _id for subj, predicate, obj in triples((Ref(faldoLocation["_id"]), None, None)): if not predicate.toPython() in faldoLocation: faldoLocation[predicate.toPython()] = [] bisect.insort(faldoLocation[predicate.toPython()], obj.toPython()) faldoBegins = [] for _id in faldoLocation[FALDO_BEGIN]: faldoBegin = {} faldoBegin["_id"] = _id for subj, predicate, obj in triples( (Ref(faldoBegin["_id"]), None, None)): faldoBegin[predicate.toPython()] = obj.toPython() faldoBegins.append(faldoBegin) faldoReferences = [] for _id in faldoLocation[FALDO_BEGIN]: faldoReference = {} faldoReference["_id"] = faldoBegin[FALDO_REFERENCE] for subj, predicate, obj in triples( (Ref(faldoReference["_id"]), None, None)): faldoReference[predicate.toPython()] = obj.toPython() faldoReferences.append(faldoReference) faldoEnds = [] for _id in faldoLocation[FALDO_END]: faldoEnd = {} faldoEnd["_id"] = _id for subj, predicate, obj in triples((Ref(faldoEnd["_id"]), None, None)): faldoEnd[predicate.toPython()] = obj.toPython() faldoEnds.append(faldoEnd) for idx, faldoReference in enumerate(faldoReferences): if MEMBER_OF in faldoReference: build = faldoReference[MEMBER_OF].split('/')[-1] chromosome = faldoReference[LABEL].split(' ')[0] begin = faldoBegins[idx][FALDO_POSITION] end = faldoEnds[idx][FALDO_POSITION] if build not in locationMap: locationMap[build] = {} if chromosome not in locationMap[build]: locationMap[build][chromosome] = {} if begin not in locationMap[build][chromosome]: locationMap[build][chromosome][begin] = {} if end not in locationMap[build][chromosome][begin]: locationMap[build][chromosome][begin][end] = {} locationMap[build][chromosome][begin][end] = \ location["_id"] locationMap[location["_id"]] = { "build": build, "chromosome": chromosome, "begin": begin, "end": end, }
python
def _initializeLocationCache(self): """ CGD uses Faldo ontology for locations, it's a bit complicated. This function sets up an in memory cache of all locations, which can be queried via: locationMap[build][chromosome][begin][end] = location["_id"] """ # cache of locations self._locationMap = {} locationMap = self._locationMap triples = self._rdfGraph.triples Ref = rdflib.URIRef associations = [] for subj, _, _ in triples((None, RDF.type, Ref(ASSOCIATION))): associations.append(subj.toPython()) locationIds = [] for association in associations: for _, _, obj in triples((Ref(association), Ref(HAS_SUBJECT), None)): locationIds.append(obj.toPython()) locations = [] for _id in locationIds: location = {} location["_id"] = _id for subj, predicate, obj in triples((Ref(location["_id"]), None, None)): if not predicate.toPython() in location: location[predicate.toPython()] = [] bisect.insort(location[predicate.toPython()], obj.toPython()) if FALDO_LOCATION in location: locations.append(location) for location in locations: for _id in location[FALDO_LOCATION]: # lookup faldo region, ensure positions are sorted faldoLocation = {} faldoLocation["_id"] = _id for subj, predicate, obj in triples((Ref(faldoLocation["_id"]), None, None)): if not predicate.toPython() in faldoLocation: faldoLocation[predicate.toPython()] = [] bisect.insort(faldoLocation[predicate.toPython()], obj.toPython()) faldoBegins = [] for _id in faldoLocation[FALDO_BEGIN]: faldoBegin = {} faldoBegin["_id"] = _id for subj, predicate, obj in triples( (Ref(faldoBegin["_id"]), None, None)): faldoBegin[predicate.toPython()] = obj.toPython() faldoBegins.append(faldoBegin) faldoReferences = [] for _id in faldoLocation[FALDO_BEGIN]: faldoReference = {} faldoReference["_id"] = faldoBegin[FALDO_REFERENCE] for subj, predicate, obj in triples( (Ref(faldoReference["_id"]), None, None)): faldoReference[predicate.toPython()] = obj.toPython() faldoReferences.append(faldoReference) faldoEnds = [] for _id in faldoLocation[FALDO_END]: faldoEnd = {} faldoEnd["_id"] = _id for subj, predicate, obj in triples((Ref(faldoEnd["_id"]), None, None)): faldoEnd[predicate.toPython()] = obj.toPython() faldoEnds.append(faldoEnd) for idx, faldoReference in enumerate(faldoReferences): if MEMBER_OF in faldoReference: build = faldoReference[MEMBER_OF].split('/')[-1] chromosome = faldoReference[LABEL].split(' ')[0] begin = faldoBegins[idx][FALDO_POSITION] end = faldoEnds[idx][FALDO_POSITION] if build not in locationMap: locationMap[build] = {} if chromosome not in locationMap[build]: locationMap[build][chromosome] = {} if begin not in locationMap[build][chromosome]: locationMap[build][chromosome][begin] = {} if end not in locationMap[build][chromosome][begin]: locationMap[build][chromosome][begin][end] = {} locationMap[build][chromosome][begin][end] = \ location["_id"] locationMap[location["_id"]] = { "build": build, "chromosome": chromosome, "begin": begin, "end": end, }
[ "def", "_initializeLocationCache", "(", "self", ")", ":", "# cache of locations", "self", ".", "_locationMap", "=", "{", "}", "locationMap", "=", "self", ".", "_locationMap", "triples", "=", "self", ".", "_rdfGraph", ".", "triples", "Ref", "=", "rdflib", ".", "URIRef", "associations", "=", "[", "]", "for", "subj", ",", "_", ",", "_", "in", "triples", "(", "(", "None", ",", "RDF", ".", "type", ",", "Ref", "(", "ASSOCIATION", ")", ")", ")", ":", "associations", ".", "append", "(", "subj", ".", "toPython", "(", ")", ")", "locationIds", "=", "[", "]", "for", "association", "in", "associations", ":", "for", "_", ",", "_", ",", "obj", "in", "triples", "(", "(", "Ref", "(", "association", ")", ",", "Ref", "(", "HAS_SUBJECT", ")", ",", "None", ")", ")", ":", "locationIds", ".", "append", "(", "obj", ".", "toPython", "(", ")", ")", "locations", "=", "[", "]", "for", "_id", "in", "locationIds", ":", "location", "=", "{", "}", "location", "[", "\"_id\"", "]", "=", "_id", "for", "subj", ",", "predicate", ",", "obj", "in", "triples", "(", "(", "Ref", "(", "location", "[", "\"_id\"", "]", ")", ",", "None", ",", "None", ")", ")", ":", "if", "not", "predicate", ".", "toPython", "(", ")", "in", "location", ":", "location", "[", "predicate", ".", "toPython", "(", ")", "]", "=", "[", "]", "bisect", ".", "insort", "(", "location", "[", "predicate", ".", "toPython", "(", ")", "]", ",", "obj", ".", "toPython", "(", ")", ")", "if", "FALDO_LOCATION", "in", "location", ":", "locations", ".", "append", "(", "location", ")", "for", "location", "in", "locations", ":", "for", "_id", "in", "location", "[", "FALDO_LOCATION", "]", ":", "# lookup faldo region, ensure positions are sorted", "faldoLocation", "=", "{", "}", "faldoLocation", "[", "\"_id\"", "]", "=", "_id", "for", "subj", ",", "predicate", ",", "obj", "in", "triples", "(", "(", "Ref", "(", "faldoLocation", "[", "\"_id\"", "]", ")", ",", "None", ",", "None", ")", ")", ":", "if", "not", "predicate", ".", "toPython", "(", ")", "in", "faldoLocation", ":", "faldoLocation", "[", "predicate", ".", "toPython", "(", ")", "]", "=", "[", "]", "bisect", ".", "insort", "(", "faldoLocation", "[", "predicate", ".", "toPython", "(", ")", "]", ",", "obj", ".", "toPython", "(", ")", ")", "faldoBegins", "=", "[", "]", "for", "_id", "in", "faldoLocation", "[", "FALDO_BEGIN", "]", ":", "faldoBegin", "=", "{", "}", "faldoBegin", "[", "\"_id\"", "]", "=", "_id", "for", "subj", ",", "predicate", ",", "obj", "in", "triples", "(", "(", "Ref", "(", "faldoBegin", "[", "\"_id\"", "]", ")", ",", "None", ",", "None", ")", ")", ":", "faldoBegin", "[", "predicate", ".", "toPython", "(", ")", "]", "=", "obj", ".", "toPython", "(", ")", "faldoBegins", ".", "append", "(", "faldoBegin", ")", "faldoReferences", "=", "[", "]", "for", "_id", "in", "faldoLocation", "[", "FALDO_BEGIN", "]", ":", "faldoReference", "=", "{", "}", "faldoReference", "[", "\"_id\"", "]", "=", "faldoBegin", "[", "FALDO_REFERENCE", "]", "for", "subj", ",", "predicate", ",", "obj", "in", "triples", "(", "(", "Ref", "(", "faldoReference", "[", "\"_id\"", "]", ")", ",", "None", ",", "None", ")", ")", ":", "faldoReference", "[", "predicate", ".", "toPython", "(", ")", "]", "=", "obj", ".", "toPython", "(", ")", "faldoReferences", ".", "append", "(", "faldoReference", ")", "faldoEnds", "=", "[", "]", "for", "_id", "in", "faldoLocation", "[", "FALDO_END", "]", ":", "faldoEnd", "=", "{", "}", "faldoEnd", "[", "\"_id\"", "]", "=", "_id", "for", "subj", ",", "predicate", ",", "obj", "in", "triples", "(", "(", "Ref", "(", "faldoEnd", "[", "\"_id\"", "]", ")", ",", "None", ",", "None", ")", ")", ":", "faldoEnd", "[", "predicate", ".", "toPython", "(", ")", "]", "=", "obj", ".", "toPython", "(", ")", "faldoEnds", ".", "append", "(", "faldoEnd", ")", "for", "idx", ",", "faldoReference", "in", "enumerate", "(", "faldoReferences", ")", ":", "if", "MEMBER_OF", "in", "faldoReference", ":", "build", "=", "faldoReference", "[", "MEMBER_OF", "]", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "chromosome", "=", "faldoReference", "[", "LABEL", "]", ".", "split", "(", "' '", ")", "[", "0", "]", "begin", "=", "faldoBegins", "[", "idx", "]", "[", "FALDO_POSITION", "]", "end", "=", "faldoEnds", "[", "idx", "]", "[", "FALDO_POSITION", "]", "if", "build", "not", "in", "locationMap", ":", "locationMap", "[", "build", "]", "=", "{", "}", "if", "chromosome", "not", "in", "locationMap", "[", "build", "]", ":", "locationMap", "[", "build", "]", "[", "chromosome", "]", "=", "{", "}", "if", "begin", "not", "in", "locationMap", "[", "build", "]", "[", "chromosome", "]", ":", "locationMap", "[", "build", "]", "[", "chromosome", "]", "[", "begin", "]", "=", "{", "}", "if", "end", "not", "in", "locationMap", "[", "build", "]", "[", "chromosome", "]", "[", "begin", "]", ":", "locationMap", "[", "build", "]", "[", "chromosome", "]", "[", "begin", "]", "[", "end", "]", "=", "{", "}", "locationMap", "[", "build", "]", "[", "chromosome", "]", "[", "begin", "]", "[", "end", "]", "=", "location", "[", "\"_id\"", "]", "locationMap", "[", "location", "[", "\"_id\"", "]", "]", "=", "{", "\"build\"", ":", "build", ",", "\"chromosome\"", ":", "chromosome", ",", "\"begin\"", ":", "begin", ",", "\"end\"", ":", "end", ",", "}" ]
CGD uses Faldo ontology for locations, it's a bit complicated. This function sets up an in memory cache of all locations, which can be queried via: locationMap[build][chromosome][begin][end] = location["_id"]
[ "CGD", "uses", "Faldo", "ontology", "for", "locations", "it", "s", "a", "bit", "complicated", ".", "This", "function", "sets", "up", "an", "in", "memory", "cache", "of", "all", "locations", "which", "can", "be", "queried", "via", ":", "locationMap", "[", "build", "]", "[", "chromosome", "]", "[", "begin", "]", "[", "end", "]", "=", "location", "[", "_id", "]" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/genotype_phenotype_featureset.py#L217-L315
ga4gh/ga4gh-server
ga4gh/server/response_builder.py
SearchResponseBuilder.addValue
def addValue(self, protocolElement): """ Appends the specified protocolElement to the value list for this response. """ self._numElements += 1 self._bufferSize += protocolElement.ByteSize() attr = getattr(self._protoObject, self._valueListName) obj = attr.add() obj.CopyFrom(protocolElement)
python
def addValue(self, protocolElement): """ Appends the specified protocolElement to the value list for this response. """ self._numElements += 1 self._bufferSize += protocolElement.ByteSize() attr = getattr(self._protoObject, self._valueListName) obj = attr.add() obj.CopyFrom(protocolElement)
[ "def", "addValue", "(", "self", ",", "protocolElement", ")", ":", "self", ".", "_numElements", "+=", "1", "self", ".", "_bufferSize", "+=", "protocolElement", ".", "ByteSize", "(", ")", "attr", "=", "getattr", "(", "self", ".", "_protoObject", ",", "self", ".", "_valueListName", ")", "obj", "=", "attr", ".", "add", "(", ")", "obj", ".", "CopyFrom", "(", "protocolElement", ")" ]
Appends the specified protocolElement to the value list for this response.
[ "Appends", "the", "specified", "protocolElement", "to", "the", "value", "list", "for", "this", "response", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/response_builder.py#L61-L70
ga4gh/ga4gh-server
ga4gh/server/response_builder.py
SearchResponseBuilder.isFull
def isFull(self): """ Returns True if the response buffer is full, and False otherwise. The buffer is full if either (1) the number of items in the value list is >= pageSize or (2) the total length of the serialised elements in the page is >= maxBufferSize. If page_size or max_response_length were not set in the request then they're not checked. """ return ( (self._pageSize > 0 and self._numElements >= self._pageSize) or (self._bufferSize >= self._maxBufferSize) )
python
def isFull(self): """ Returns True if the response buffer is full, and False otherwise. The buffer is full if either (1) the number of items in the value list is >= pageSize or (2) the total length of the serialised elements in the page is >= maxBufferSize. If page_size or max_response_length were not set in the request then they're not checked. """ return ( (self._pageSize > 0 and self._numElements >= self._pageSize) or (self._bufferSize >= self._maxBufferSize) )
[ "def", "isFull", "(", "self", ")", ":", "return", "(", "(", "self", ".", "_pageSize", ">", "0", "and", "self", ".", "_numElements", ">=", "self", ".", "_pageSize", ")", "or", "(", "self", ".", "_bufferSize", ">=", "self", ".", "_maxBufferSize", ")", ")" ]
Returns True if the response buffer is full, and False otherwise. The buffer is full if either (1) the number of items in the value list is >= pageSize or (2) the total length of the serialised elements in the page is >= maxBufferSize. If page_size or max_response_length were not set in the request then they're not checked.
[ "Returns", "True", "if", "the", "response", "buffer", "is", "full", "and", "False", "otherwise", ".", "The", "buffer", "is", "full", "if", "either", "(", "1", ")", "the", "number", "of", "items", "in", "the", "value", "list", "is", ">", "=", "pageSize", "or", "(", "2", ")", "the", "total", "length", "of", "the", "serialised", "elements", "in", "the", "page", "is", ">", "=", "maxBufferSize", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/response_builder.py#L72-L85
ga4gh/ga4gh-server
ga4gh/server/response_builder.py
SearchResponseBuilder.getSerializedResponse
def getSerializedResponse(self): """ Returns a string version of the SearchResponse that has been built by this SearchResponseBuilder. """ self._protoObject.next_page_token = pb.string(self._nextPageToken) s = protocol.toJson(self._protoObject) return s
python
def getSerializedResponse(self): """ Returns a string version of the SearchResponse that has been built by this SearchResponseBuilder. """ self._protoObject.next_page_token = pb.string(self._nextPageToken) s = protocol.toJson(self._protoObject) return s
[ "def", "getSerializedResponse", "(", "self", ")", ":", "self", ".", "_protoObject", ".", "next_page_token", "=", "pb", ".", "string", "(", "self", ".", "_nextPageToken", ")", "s", "=", "protocol", ".", "toJson", "(", "self", ".", "_protoObject", ")", "return", "s" ]
Returns a string version of the SearchResponse that has been built by this SearchResponseBuilder.
[ "Returns", "a", "string", "version", "of", "the", "SearchResponse", "that", "has", "been", "built", "by", "this", "SearchResponseBuilder", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/response_builder.py#L87-L94
ga4gh/ga4gh-server
ga4gh/server/datamodel/ontologies.py
Ontology.populateFromRow
def populateFromRow(self, ontologyRecord): """ Populates this Ontology using values in the specified DB row. """ self._id = ontologyRecord.id self._dataUrl = ontologyRecord.dataurl self._readFile()
python
def populateFromRow(self, ontologyRecord): """ Populates this Ontology using values in the specified DB row. """ self._id = ontologyRecord.id self._dataUrl = ontologyRecord.dataurl self._readFile()
[ "def", "populateFromRow", "(", "self", ",", "ontologyRecord", ")", ":", "self", ".", "_id", "=", "ontologyRecord", ".", "id", "self", ".", "_dataUrl", "=", "ontologyRecord", ".", "dataurl", "self", ".", "_readFile", "(", ")" ]
Populates this Ontology using values in the specified DB row.
[ "Populates", "this", "Ontology", "using", "values", "in", "the", "specified", "DB", "row", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/ontologies.py#L75-L81
ga4gh/ga4gh-server
ga4gh/server/datamodel/ontologies.py
Ontology.getGaTermByName
def getGaTermByName(self, name): """ Returns a GA4GH OntologyTerm object by name. :param name: name of the ontology term, ex. "gene". :return: GA4GH OntologyTerm object. """ # TODO what is the correct value when we have no mapping?? termIds = self.getTermIds(name) if len(termIds) == 0: termId = "" # TODO add logging for missed term translation. else: # TODO what is the correct behaviour here when we have multiple # IDs matching a given name? termId = termIds[0] term = protocol.OntologyTerm() term.term = name term.term_id = termId return term
python
def getGaTermByName(self, name): """ Returns a GA4GH OntologyTerm object by name. :param name: name of the ontology term, ex. "gene". :return: GA4GH OntologyTerm object. """ # TODO what is the correct value when we have no mapping?? termIds = self.getTermIds(name) if len(termIds) == 0: termId = "" # TODO add logging for missed term translation. else: # TODO what is the correct behaviour here when we have multiple # IDs matching a given name? termId = termIds[0] term = protocol.OntologyTerm() term.term = name term.term_id = termId return term
[ "def", "getGaTermByName", "(", "self", ",", "name", ")", ":", "# TODO what is the correct value when we have no mapping??", "termIds", "=", "self", ".", "getTermIds", "(", "name", ")", "if", "len", "(", "termIds", ")", "==", "0", ":", "termId", "=", "\"\"", "# TODO add logging for missed term translation.", "else", ":", "# TODO what is the correct behaviour here when we have multiple", "# IDs matching a given name?", "termId", "=", "termIds", "[", "0", "]", "term", "=", "protocol", ".", "OntologyTerm", "(", ")", "term", ".", "term", "=", "name", "term", ".", "term_id", "=", "termId", "return", "term" ]
Returns a GA4GH OntologyTerm object by name. :param name: name of the ontology term, ex. "gene". :return: GA4GH OntologyTerm object.
[ "Returns", "a", "GA4GH", "OntologyTerm", "object", "by", "name", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/ontologies.py#L114-L133
ga4gh/ga4gh-server
scripts/server_benchmark.py
_heavyQuery
def _heavyQuery(variantSetId, callSetIds): """ Very heavy query: calls for the specified list of callSetIds on chromosome 2 (11 pages, 90 seconds to fetch the entire thing on a high-end desktop machine) """ request = protocol.SearchVariantsRequest() request.reference_name = '2' request.variant_set_id = variantSetId for callSetId in callSetIds: request.call_set_ids.add(callSetId) request.page_size = 100 request.end = 100000 return request
python
def _heavyQuery(variantSetId, callSetIds): """ Very heavy query: calls for the specified list of callSetIds on chromosome 2 (11 pages, 90 seconds to fetch the entire thing on a high-end desktop machine) """ request = protocol.SearchVariantsRequest() request.reference_name = '2' request.variant_set_id = variantSetId for callSetId in callSetIds: request.call_set_ids.add(callSetId) request.page_size = 100 request.end = 100000 return request
[ "def", "_heavyQuery", "(", "variantSetId", ",", "callSetIds", ")", ":", "request", "=", "protocol", ".", "SearchVariantsRequest", "(", ")", "request", ".", "reference_name", "=", "'2'", "request", ".", "variant_set_id", "=", "variantSetId", "for", "callSetId", "in", "callSetIds", ":", "request", ".", "call_set_ids", ".", "add", "(", "callSetId", ")", "request", ".", "page_size", "=", "100", "request", ".", "end", "=", "100000", "return", "request" ]
Very heavy query: calls for the specified list of callSetIds on chromosome 2 (11 pages, 90 seconds to fetch the entire thing on a high-end desktop machine)
[ "Very", "heavy", "query", ":", "calls", "for", "the", "specified", "list", "of", "callSetIds", "on", "chromosome", "2", "(", "11", "pages", "90", "seconds", "to", "fetch", "the", "entire", "thing", "on", "a", "high", "-", "end", "desktop", "machine", ")" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/scripts/server_benchmark.py#L53-L66
ga4gh/ga4gh-server
scripts/server_benchmark.py
timeOneSearch
def timeOneSearch(queryString): """ Returns (search result as JSON string, time elapsed during search) """ startTime = time.clock() resultString = backend.runSearchVariants(queryString) endTime = time.clock() elapsedTime = endTime - startTime return resultString, elapsedTime
python
def timeOneSearch(queryString): """ Returns (search result as JSON string, time elapsed during search) """ startTime = time.clock() resultString = backend.runSearchVariants(queryString) endTime = time.clock() elapsedTime = endTime - startTime return resultString, elapsedTime
[ "def", "timeOneSearch", "(", "queryString", ")", ":", "startTime", "=", "time", ".", "clock", "(", ")", "resultString", "=", "backend", ".", "runSearchVariants", "(", "queryString", ")", "endTime", "=", "time", ".", "clock", "(", ")", "elapsedTime", "=", "endTime", "-", "startTime", "return", "resultString", ",", "elapsedTime" ]
Returns (search result as JSON string, time elapsed during search)
[ "Returns", "(", "search", "result", "as", "JSON", "string", "time", "elapsed", "during", "search", ")" ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/scripts/server_benchmark.py#L69-L77
ga4gh/ga4gh-server
scripts/server_benchmark.py
benchmarkOneQuery
def benchmarkOneQuery(request, repeatLimit=3, pageLimit=3): """ Repeat the query several times; perhaps don't go through *all* the pages. Returns minimum time to run backend.searchVariants() to execute the query (as far as pageLimit allows), *not* including JSON processing to prepare queries or parse responses. """ times = [] queryString = protocol.toJson(request) for i in range(0, repeatLimit): resultString, elapsedTime = timeOneSearch(queryString) accruedTime = elapsedTime pageCount = 1 token = extractNextPageToken(resultString) # Iterate to go beyond the first page of results. while token is not None and pageCount < pageLimit: pageRequest = request pageRequest.page_token = token pageRequestString = protocol.toJson(pageRequest) resultString, elapsedTime = timeOneSearch(pageRequestString) accruedTime += elapsedTime pageCount = pageCount + 1 token = extractNextPageToken(resultString) times.append(accruedTime) # TODO: more sophisticated statistics. Sometimes we want min(), # sometimes mean = sum() / len(), sometimes other measures, # perhaps exclude outliers... # If we compute average we should throw out at least the first one. # return sum(times[2:])/len(times[2:]) return min(times)
python
def benchmarkOneQuery(request, repeatLimit=3, pageLimit=3): """ Repeat the query several times; perhaps don't go through *all* the pages. Returns minimum time to run backend.searchVariants() to execute the query (as far as pageLimit allows), *not* including JSON processing to prepare queries or parse responses. """ times = [] queryString = protocol.toJson(request) for i in range(0, repeatLimit): resultString, elapsedTime = timeOneSearch(queryString) accruedTime = elapsedTime pageCount = 1 token = extractNextPageToken(resultString) # Iterate to go beyond the first page of results. while token is not None and pageCount < pageLimit: pageRequest = request pageRequest.page_token = token pageRequestString = protocol.toJson(pageRequest) resultString, elapsedTime = timeOneSearch(pageRequestString) accruedTime += elapsedTime pageCount = pageCount + 1 token = extractNextPageToken(resultString) times.append(accruedTime) # TODO: more sophisticated statistics. Sometimes we want min(), # sometimes mean = sum() / len(), sometimes other measures, # perhaps exclude outliers... # If we compute average we should throw out at least the first one. # return sum(times[2:])/len(times[2:]) return min(times)
[ "def", "benchmarkOneQuery", "(", "request", ",", "repeatLimit", "=", "3", ",", "pageLimit", "=", "3", ")", ":", "times", "=", "[", "]", "queryString", "=", "protocol", ".", "toJson", "(", "request", ")", "for", "i", "in", "range", "(", "0", ",", "repeatLimit", ")", ":", "resultString", ",", "elapsedTime", "=", "timeOneSearch", "(", "queryString", ")", "accruedTime", "=", "elapsedTime", "pageCount", "=", "1", "token", "=", "extractNextPageToken", "(", "resultString", ")", "# Iterate to go beyond the first page of results.", "while", "token", "is", "not", "None", "and", "pageCount", "<", "pageLimit", ":", "pageRequest", "=", "request", "pageRequest", ".", "page_token", "=", "token", "pageRequestString", "=", "protocol", ".", "toJson", "(", "pageRequest", ")", "resultString", ",", "elapsedTime", "=", "timeOneSearch", "(", "pageRequestString", ")", "accruedTime", "+=", "elapsedTime", "pageCount", "=", "pageCount", "+", "1", "token", "=", "extractNextPageToken", "(", "resultString", ")", "times", ".", "append", "(", "accruedTime", ")", "# TODO: more sophisticated statistics. Sometimes we want min(),", "# sometimes mean = sum() / len(), sometimes other measures,", "# perhaps exclude outliers...", "# If we compute average we should throw out at least the first one.", "# return sum(times[2:])/len(times[2:])", "return", "min", "(", "times", ")" ]
Repeat the query several times; perhaps don't go through *all* the pages. Returns minimum time to run backend.searchVariants() to execute the query (as far as pageLimit allows), *not* including JSON processing to prepare queries or parse responses.
[ "Repeat", "the", "query", "several", "times", ";", "perhaps", "don", "t", "go", "through", "*", "all", "*", "the", "pages", ".", "Returns", "minimum", "time", "to", "run", "backend", ".", "searchVariants", "()", "to", "execute", "the", "query", "(", "as", "far", "as", "pageLimit", "allows", ")", "*", "not", "*", "including", "JSON", "processing", "to", "prepare", "queries", "or", "parse", "responses", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/scripts/server_benchmark.py#L93-L124
ga4gh/ga4gh-server
ga4gh/server/exceptions.py
getExceptionClass
def getExceptionClass(errorCode): """ Converts the specified error code into the corresponding class object. Raises a KeyError if the errorCode is not found. """ classMap = {} for name, class_ in inspect.getmembers(sys.modules[__name__]): if inspect.isclass(class_) and issubclass(class_, BaseServerException): classMap[class_.getErrorCode()] = class_ return classMap[errorCode]
python
def getExceptionClass(errorCode): """ Converts the specified error code into the corresponding class object. Raises a KeyError if the errorCode is not found. """ classMap = {} for name, class_ in inspect.getmembers(sys.modules[__name__]): if inspect.isclass(class_) and issubclass(class_, BaseServerException): classMap[class_.getErrorCode()] = class_ return classMap[errorCode]
[ "def", "getExceptionClass", "(", "errorCode", ")", ":", "classMap", "=", "{", "}", "for", "name", ",", "class_", "in", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ")", ":", "if", "inspect", ".", "isclass", "(", "class_", ")", "and", "issubclass", "(", "class_", ",", "BaseServerException", ")", ":", "classMap", "[", "class_", ".", "getErrorCode", "(", ")", "]", "=", "class_", "return", "classMap", "[", "errorCode", "]" ]
Converts the specified error code into the corresponding class object. Raises a KeyError if the errorCode is not found.
[ "Converts", "the", "specified", "error", "code", "into", "the", "corresponding", "class", "object", ".", "Raises", "a", "KeyError", "if", "the", "errorCode", "is", "not", "found", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/exceptions.py#L17-L26
ga4gh/ga4gh-server
ga4gh/server/exceptions.py
RuntimeException.toProtocolElement
def toProtocolElement(self): """ Converts this exception into the GA4GH protocol type so that it can be communicated back to the client. """ error = protocol.GAException() error.error_code = self.getErrorCode() error.message = self.getMessage() return error
python
def toProtocolElement(self): """ Converts this exception into the GA4GH protocol type so that it can be communicated back to the client. """ error = protocol.GAException() error.error_code = self.getErrorCode() error.message = self.getMessage() return error
[ "def", "toProtocolElement", "(", "self", ")", ":", "error", "=", "protocol", ".", "GAException", "(", ")", "error", ".", "error_code", "=", "self", ".", "getErrorCode", "(", ")", "error", ".", "message", "=", "self", ".", "getMessage", "(", ")", "return", "error" ]
Converts this exception into the GA4GH protocol type so that it can be communicated back to the client.
[ "Converts", "this", "exception", "into", "the", "GA4GH", "protocol", "type", "so", "that", "it", "can", "be", "communicated", "back", "to", "the", "client", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/exceptions.py#L94-L102
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader._init_goterm_ref
def _init_goterm_ref(self, rec_curr, name, lnum): """Initialize new reference and perform checks.""" if rec_curr is None: return GOTerm() msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name) self._die(msg, lnum)
python
def _init_goterm_ref(self, rec_curr, name, lnum): """Initialize new reference and perform checks.""" if rec_curr is None: return GOTerm() msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name) self._die(msg, lnum)
[ "def", "_init_goterm_ref", "(", "self", ",", "rec_curr", ",", "name", ",", "lnum", ")", ":", "if", "rec_curr", "is", "None", ":", "return", "GOTerm", "(", ")", "msg", "=", "\"PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED\"", ".", "format", "(", "REC", "=", "name", ")", "self", ".", "_die", "(", "msg", ",", "lnum", ")" ]
Initialize new reference and perform checks.
[ "Initialize", "new", "reference", "and", "perform", "checks", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L116-L121
ga4gh/ga4gh-server
ga4gh/server/datamodel/obo_parser.py
OBOReader._init_typedef
def _init_typedef(self, typedef_curr, name, lnum): """Initialize new typedef and perform checks.""" if typedef_curr is None: return TypeDef() msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name) self._die(msg, lnum)
python
def _init_typedef(self, typedef_curr, name, lnum): """Initialize new typedef and perform checks.""" if typedef_curr is None: return TypeDef() msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name) self._die(msg, lnum)
[ "def", "_init_typedef", "(", "self", ",", "typedef_curr", ",", "name", ",", "lnum", ")", ":", "if", "typedef_curr", "is", "None", ":", "return", "TypeDef", "(", ")", "msg", "=", "\"PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED\"", ".", "format", "(", "REC", "=", "name", ")", "self", ".", "_die", "(", "msg", ",", "lnum", ")" ]
Initialize new typedef and perform checks.
[ "Initialize", "new", "typedef", "and", "perform", "checks", "." ]
train
https://github.com/ga4gh/ga4gh-server/blob/1aa18922ef136db8604f6f098cb1732cba6f2a76/ga4gh/server/datamodel/obo_parser.py#L123-L128