sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def get_logs(self, login=None, **kwargs): """Get a user's logs. :param str login: User's login (Default: self._login) :return: JSON """ _login = kwargs.get( 'login', login ) log_events_url = GSA_LOGS_URL.format(login=_login) return self._request_api(url=log_events_url).json()
Get a user's logs. :param str login: User's login (Default: self._login) :return: JSON
entailment
def negotiate(cls, headers): """ Process headers dict to return the format class (not the instance) """ # set lower keys headers = {k.lower(): v for k, v in headers.items()} accept = headers.get('accept', "*/*") parsed_accept = accept.split(";") parsed_accept = [i.strip() for i in parsed_accept] # Protobuffer (only one version) if all([i in parsed_accept for i in cls.PROTOBUF['default']]): return ProtobufFormat elif all([i in parsed_accept for i in cls.PROTOBUF['text']]): return ProtobufTextFormat # Text 0.0.4 elif all([i in parsed_accept for i in cls.TEXT['0.0.4']]): return TextFormat # Text (Default) elif all([i in parsed_accept for i in cls.TEXT['default']]): return TextFormat # Default else: return cls.FALLBACK
Process headers dict to return the format class (not the instance)
entailment
def register(self, collector): """ Registers a collector""" if not isinstance(collector, Collector): raise TypeError( "Can't register instance, not a valid type of collector") if collector.name in self.collectors: raise ValueError("Collector already exists or name colision") with mutex: self.collectors[collector.name] = collector
Registers a collector
entailment
def add(self, registry): """ Add works like replace, but only previously pushed metrics with the same name (and the same job and instance) will be replaced. (It uses HTTP method 'POST' to push to the Pushgateway.) """ # POST payload = self.formatter.marshall(registry) r = requests.post(self.path, data=payload, headers=self.headers)
Add works like replace, but only previously pushed metrics with the same name (and the same job and instance) will be replaced. (It uses HTTP method 'POST' to push to the Pushgateway.)
entailment
def replace(self, registry): """ Push triggers a metric collection and pushes all collected metrics to the Pushgateway specified by addr Note that all previously pushed metrics with the same job and instance will be replaced with the metrics pushed by this call. (It uses HTTP method 'PUT' to push to the Pushgateway.) """ # PUT payload = self.formatter.marshall(registry) r = requests.put(self.path, data=payload, headers=self.headers)
Push triggers a metric collection and pushes all collected metrics to the Pushgateway specified by addr Note that all previously pushed metrics with the same job and instance will be replaced with the metrics pushed by this call. (It uses HTTP method 'PUT' to push to the Pushgateway.)
entailment
def marshall_lines(self, collector): """ Marshalls a collector and returns the storage/transfer format in a tuple, this tuple has reprensentation format per element. """ if isinstance(collector, collectors.Counter): exec_method = self._format_counter elif isinstance(collector, collectors.Gauge): exec_method = self._format_gauge elif isinstance(collector, collectors.Summary): exec_method = self._format_summary else: raise TypeError("Not a valid object format") # create headers help_header = TextFormat.HELP_FMT.format(name=collector.name, help_text=collector.help_text) type_header = TextFormat.TYPE_FMT.format(name=collector.name, value_type=collector.REPR_STR) # Prepare start headers lines = [help_header, type_header] for i in collector.get_all(): r = exec_method(i, collector.name, collector.const_labels) # Check if it returns one or multiple lines if not isinstance(r, str) and isinstance(r, collections.Iterable): lines.extend(r) else: lines.append(r) return lines
Marshalls a collector and returns the storage/transfer format in a tuple, this tuple has reprensentation format per element.
entailment
def marshall(self, registry): """Marshalls a full registry (various collectors)""" blocks = [] for i in registry.get_all(): blocks.append(self.marshall_collector(i)) # Sort? used in tests blocks = sorted(blocks) # Needs EOF blocks.append("") return self.__class__.LINE_SEPARATOR_FMT.join(blocks)
Marshalls a full registry (various collectors)
entailment
def marshall(self, registry): """Returns bytes""" result = b"" for i in registry.get_all(): # Each message needs to be prefixed with a varint with the size of # the message (MetrycType) # https://github.com/matttproud/golang_protobuf_extensions/blob/master/ext/encode.go # http://zombietetris.de/blog/building-your-own-writedelimitedto-for-python-protobuf/ body = self.marshall_collector(i).SerializeToString() msg = encoder._VarintBytes(len(body)) + body result += msg return result
Returns bytes
entailment
def gather_data(registry): """Gathers the metrics""" # Get the host name of the machine host = socket.gethostname() # Create our collectors trig_metric = Gauge("trigonometry_example", "Various trigonometry examples.", {'host': host}) # register the metric collectors registry.register(trig_metric) # Start gathering metrics every second counter = 0 while True: time.sleep(1) sine = math.sin(math.radians(counter % 360)) cosine = math.cos(math.radians(counter % 360)) trig_metric.set({'type': "sine"}, sine) trig_metric.set({'type': "cosine"}, cosine) counter += 1
Gathers the metrics
entailment
def set_value(self, labels, value): """ Sets a value in the container""" if labels: self._label_names_correct(labels) with mutex: self.values[labels] = value
Sets a value in the container
entailment
def _label_names_correct(self, labels): """Raise exception (ValueError) if labels not correct""" for k, v in labels.items(): # Check reserved labels if k in RESTRICTED_LABELS_NAMES: raise ValueError("Labels not correct") # Check prefixes if any(k.startswith(i) for i in RESTRICTED_LABELS_PREFIXES): raise ValueError("Labels not correct") return True
Raise exception (ValueError) if labels not correct
entailment
def get_all(self): """ Returns a list populated by tuples of 2 elements, first one is a dict with all the labels and the second elemnt is the value of the metric itself """ with mutex: items = self.values.items() result = [] for k, v in items: # Check if is a single value dict (custom empty key) if not k or k == MetricDict.EMPTY_KEY: key = None else: key = decoder.decode(k) result.append((key, self.get(k))) return result
Returns a list populated by tuples of 2 elements, first one is a dict with all the labels and the second elemnt is the value of the metric itself
entailment
def add(self, labels, value): """ Add adds the given value to the Gauge. (The value can be negative, resulting in a decrease of the Gauge.) """ try: current = self.get_value(labels) except KeyError: current = 0 self.set_value(labels, current + value)
Add adds the given value to the Gauge. (The value can be negative, resulting in a decrease of the Gauge.)
entailment
def add(self, labels, value): """Add adds a single observation to the summary.""" if type(value) not in (float, int): raise TypeError("Summary only works with digits (int, float)") # We have already a lock for data but not for the estimator with mutex: try: e = self.get_value(labels) except KeyError: # Initialize quantile estimator e = quantile.Estimator(*self.__class__.DEFAULT_INVARIANTS) self.set_value(labels, e) e.observe(float(value))
Add adds a single observation to the summary.
entailment
def get(self, labels): """ Get gets the data in the form of 0.5, 0.9 and 0.99 percentiles. Also you get sum and count, all in a dict """ return_data = {} # We have already a lock for data but not for the estimator with mutex: e = self.get_value(labels) # Set invariants data (default to 0.50, 0.90 and 0.99) for i in e._invariants: q = i._quantile return_data[q] = e.query(q) # Set sum and count return_data[self.__class__.SUM_KEY] = e._sum return_data[self.__class__.COUNT_KEY] = e._observations return return_data
Get gets the data in the form of 0.5, 0.9 and 0.99 percentiles. Also you get sum and count, all in a dict
entailment
def gather_data(registry): """Gathers the metrics""" # Get the host name of the machine host = socket.gethostname() # Create our collectors ram_metric = Gauge("memory_usage_bytes", "Memory usage in bytes.", {'host': host}) cpu_metric = Gauge("cpu_usage_percent", "CPU usage percent.", {'host': host}) # register the metric collectors registry.register(ram_metric) registry.register(cpu_metric) # Start gathering metrics every second while True: time.sleep(1) # Add ram metrics ram = psutil.virtual_memory() swap = psutil.swap_memory() ram_metric.set({'type': "virtual", }, ram.used) ram_metric.set({'type': "virtual", 'status': "cached"}, ram.cached) ram_metric.set({'type': "swap"}, swap.used) # Add cpu metrics for c, p in enumerate(psutil.cpu_percent(interval=1, percpu=True)): cpu_metric.set({'core': c}, p)
Gathers the metrics
entailment
def gather_data(registry): """Gathers the metrics""" # Get the host name of the machine host = socket.gethostname() # Create our collectors io_metric = Summary("write_file_io_example", "Writing io file in disk example.", {'host': host}) # register the metric collectors registry.register(io_metric) chunk = b'\xff'*4000 # 4000 bytes filename_path = "/tmp/prometheus_test" blocksizes = (100, 10000, 1000000, 100000000) # Start gathering metrics every 0.7 seconds while True: time.sleep(0.7) for i in blocksizes: time_start = time.time() # Action with open(filename_path, "wb") as f: for _ in range(i // 10000): f.write(chunk) io_metric.add({"file": filename_path, "block": i}, time.time() - time_start)
Gathers the metrics
entailment
def get_child(self, name, attribs=None): """ Returns the first child that matches the given name and attributes. """ if name == '.': if attribs is None or len(attribs) == 0: return self if attribs == self.attribs: return self return self.child_index.get(nodehash(name, attribs))
Returns the first child that matches the given name and attributes.
entailment
def create(self, path, data=None): """ Creates the given node, regardless of whether or not it already exists. Returns the new node. """ node = self.current[-1] path = self._splitpath(path) n_items = len(path) for n, item in enumerate(path): tag, attribs = self._splittag(item) # The leaf node is always newly created. if n == n_items-1: node = node.add(Node(tag, attribs)) break # Parent nodes are only created if they do not exist yet. existing = node.get_child(tag, attribs) if existing is not None: node = existing else: node = node.add(Node(tag, attribs)) if data: node.text = unquote(data) return node
Creates the given node, regardless of whether or not it already exists. Returns the new node.
entailment
def add(self, path, data=None, replace=False): """ Creates the given node if it does not exist. Returns the (new or existing) node. """ node = self.current[-1] for item in self._splitpath(path): tag, attribs = self._splittag(item) next_node = node.get_child(tag, attribs) if next_node is not None: node = next_node else: node = node.add(Node(tag, attribs)) if replace: node.text = '' if data: if node.text is None: node.text = unquote(data) else: node.text += unquote(data) return node
Creates the given node if it does not exist. Returns the (new or existing) node.
entailment
def add_attribute(self, path, name, value): """ Creates the given attribute and sets it to the given value. Returns the (new or existing) node to which the attribute was added. """ node = self.add(path) node.attribs.append((name, value)) return node
Creates the given attribute and sets it to the given value. Returns the (new or existing) node to which the attribute was added.
entailment
def open(self, path): """ Creates and enters the given node, regardless of whether it already exists. Returns the new node. """ self.current.append(self.create(path)) return self.current[-1]
Creates and enters the given node, regardless of whether it already exists. Returns the new node.
entailment
def enter(self, path): """ Enters the given node. Creates it if it does not exist. Returns the node. """ self.current.append(self.add(path)) return self.current[-1]
Enters the given node. Creates it if it does not exist. Returns the node.
entailment
def generate(converter, input_file, format='xml', encoding='utf8'): """ Given a converter (as returned by compile()), this function reads the given input file and converts it to the requested output format. Supported output formats are 'xml', 'yaml', 'json', or 'none'. :type converter: compiler.Context :param converter: The compiled converter. :type input_file: str :param input_file: Name of a file to convert. :type format: str :param format: The output format. :type encoding: str :param encoding: Character encoding of the input file. :rtype: str :return: The resulting output. """ with codecs.open(input_file, encoding=encoding) as thefile: return generate_string(converter, thefile.read(), format=format)
Given a converter (as returned by compile()), this function reads the given input file and converts it to the requested output format. Supported output formats are 'xml', 'yaml', 'json', or 'none'. :type converter: compiler.Context :param converter: The compiled converter. :type input_file: str :param input_file: Name of a file to convert. :type format: str :param format: The output format. :type encoding: str :param encoding: Character encoding of the input file. :rtype: str :return: The resulting output.
entailment
def generate_to_file(converter, input_file, output_file, format='xml', in_encoding='utf8', out_encoding='utf8'): """ Like generate(), but writes the output to the given output file instead. :type converter: compiler.Context :param converter: The compiled converter. :type input_file: str :param input_file: Name of a file to convert. :type output_file: str :param output_file: The output filename. :type format: str :param format: The output format. :type in_encoding: str :param in_encoding: Character encoding of the input file. :type out_encoding: str :param out_encoding: Character encoding of the output file. :rtype: str :return: The resulting output. """ with codecs.open(output_file, 'w', encoding=out_encoding) as thefile: result = generate(converter, input_file, format=format, encoding=in_encoding) thefile.write(result)
Like generate(), but writes the output to the given output file instead. :type converter: compiler.Context :param converter: The compiled converter. :type input_file: str :param input_file: Name of a file to convert. :type output_file: str :param output_file: The output filename. :type format: str :param format: The output format. :type in_encoding: str :param in_encoding: Character encoding of the input file. :type out_encoding: str :param out_encoding: Character encoding of the output file. :rtype: str :return: The resulting output.
entailment
def generate_string(converter, input, format='xml'): """ Like generate(), but reads the input from a string instead of from a file. :type converter: compiler.Context :param converter: The compiled converter. :type input: str :param input: The string to convert. :type format: str :param format: The output format. :rtype: str :return: The resulting output. """ serializer = generator.new(format) if serializer is None: raise TypeError('invalid output format ' + repr(format)) builder = Builder() converter.parse_string(input, builder) return builder.serialize(serializer)
Like generate(), but reads the input from a string instead of from a file. :type converter: compiler.Context :param converter: The compiled converter. :type input: str :param input: The string to convert. :type format: str :param format: The output format. :rtype: str :return: The resulting output.
entailment
def generate_string_to_file(converter, input, output_file, format='xml', out_encoding='utf8'): """ Like generate(), but reads the input from a string instead of from a file, and writes the output to the given output file. :type converter: compiler.Context :param converter: The compiled converter. :type input: str :param input: The string to convert. :type output_file: str :param output_file: The output filename. :type format: str :param format: The output format. :type out_encoding: str :param out_encoding: Character encoding of the output file. :rtype: str :return: The resulting output. """ with codecs.open(output_file, 'w', encoding=out_encoding) as thefile: result = generate_string(converter, input, format=format) thefile.write(result)
Like generate(), but reads the input from a string instead of from a file, and writes the output to the given output file. :type converter: compiler.Context :param converter: The compiled converter. :type input: str :param input: The string to convert. :type output_file: str :param output_file: The output filename. :type format: str :param format: The output format. :type out_encoding: str :param out_encoding: Character encoding of the output file. :rtype: str :return: The resulting output.
entailment
def is_now(s, dt=None): ''' A very simple cron-like parser to determine, if (cron-like) string is valid for this date and time. @input: s = cron-like string (minute, hour, day of month, month, day of week) dt = datetime to use as reference time, defaults to now @output: boolean of result ''' if dt is None: dt = datetime.now() minute, hour, dom, month, dow = s.split(' ') weekday = dt.isoweekday() return _parse_arg(minute, dt.minute) \ and _parse_arg(hour, dt.hour) \ and _parse_arg(dom, dt.day) \ and _parse_arg(month, dt.month) \ and _parse_arg(dow, 0 if weekday == 7 else weekday, True)
A very simple cron-like parser to determine, if (cron-like) string is valid for this date and time. @input: s = cron-like string (minute, hour, day of month, month, day of week) dt = datetime to use as reference time, defaults to now @output: boolean of result
entailment
def has_been(s, since, dt=None): ''' A parser to check whether a (cron-like) string has been true during a certain time period. Useful for applications which cannot check every minute or need to catch up during a restart. @input: s = cron-like string (minute, hour, day of month, month, day of week) since = datetime to use as reference time for start of period dt = datetime to use as reference time for end of period, defaults to now @output: boolean of result ''' if dt is None: dt = datetime.now(tz=since.tzinfo) if dt < since: raise ValueError("The since datetime must be before the current datetime.") while since <= dt: if is_now(s, since): return True since += timedelta(minutes=1) return False
A parser to check whether a (cron-like) string has been true during a certain time period. Useful for applications which cannot check every minute or need to catch up during a restart. @input: s = cron-like string (minute, hour, day of month, month, day of week) since = datetime to use as reference time for start of period dt = datetime to use as reference time for end of period, defaults to now @output: boolean of result
entailment
def auprc(y_true, y_pred): """Area under the precision-recall curve """ y_true, y_pred = _mask_value_nan(y_true, y_pred) return skm.average_precision_score(y_true, y_pred)
Area under the precision-recall curve
entailment
def best_trial_tid(self, rank=0): """Get tid of the best trial rank=0 means the best model rank=1 means second best ... """ candidates = [t for t in self.trials if t['result']['status'] == STATUS_OK] if len(candidates) == 0: return None losses = [float(t['result']['loss']) for t in candidates] assert not np.any(np.isnan(losses)) lid = np.where(np.argsort(losses).argsort() == rank)[0][0] return candidates[lid]["tid"]
Get tid of the best trial rank=0 means the best model rank=1 means second best ...
entailment
def count_by_state_unsynced(self, arg): """Extends the original object in order to inject checking for stalled jobs and killing them if they are running for too long """ if self.kill_timeout is not None: self.delete_running(self.kill_timeout) return super(KMongoTrials, self).count_by_state_unsynced(arg)
Extends the original object in order to inject checking for stalled jobs and killing them if they are running for too long
entailment
def plot_history(self, tid, scores=["loss", "f1", "accuracy"], figsize=(15, 3)): """Plot the loss curves""" history = self.train_history(tid) import matplotlib.pyplot as plt fig = plt.figure(figsize=figsize) for i, score in enumerate(scores): plt.subplot(1, len(scores), i + 1) plt.tight_layout() plt.plot(history[score], label="train") plt.plot(history['val_' + score], label="validation") plt.title(score) plt.ylabel(score) plt.xlabel('epoch') plt.legend(loc='best') return fig
Plot the loss curves
entailment
def load_model(self, tid, custom_objects=None): """Load saved keras model of the trial. If tid = None, get the best model Not applicable for trials ran in cross validion (i.e. not applicable for `CompileFN.cv_n_folds is None` """ if tid is None: tid = self.best_trial_tid() model_path = self.get_trial(tid)["result"]["path"]["model"] return load_model(model_path, custom_objects=custom_objects)
Load saved keras model of the trial. If tid = None, get the best model Not applicable for trials ran in cross validion (i.e. not applicable for `CompileFN.cv_n_folds is None`
entailment
def n_ok(self): """Number of ok trials() """ if len(self.trials) == 0: return 0 else: return np.sum(np.array(self.statuses()) == "ok")
Number of ok trials()
entailment
def get_ok_results(self, verbose=True): """Return a list of results with ok status """ if len(self.trials) == 0: return [] not_ok = np.where(np.array(self.statuses()) != "ok")[0] if len(not_ok) > 0 and verbose: print("{0}/{1} trials were not ok.".format(len(not_ok), len(self.trials))) print("Trials: " + str(not_ok)) print("Statuses: " + str(np.array(self.statuses())[not_ok])) r = [merge_dicts({"tid": t["tid"]}, t["result"].to_dict()) for t in self.trials if t["result"]["status"] == "ok"] return r
Return a list of results with ok status
entailment
def VerifierMiddleware(verifier): """Common wrapper for the authentication modules. * Parses the request before passing it on to the authentication module. * Sets 'pyoidc' cookie if authentication succeeds. * Redirects the user to complete the authentication. * Allows the user to retry authentication if it fails. :param verifier: authentication module """ @wraps(verifier.verify) def wrapper(environ, start_response): data = get_post(environ) kwargs = dict(urlparse.parse_qsl(data)) kwargs["state"] = json.loads(urllib.unquote(kwargs["state"])) val, completed = verifier.verify(**kwargs) if not completed: return val(environ, start_response) if val: set_cookie, cookie_value = verifier.create_cookie(val, "auth") cookie_value += "; path=/" url = "{base_url}?{query_string}".format( base_url="/authorization", query_string=kwargs["state"]["query"]) response = SeeOther(url, headers=[(set_cookie, cookie_value)]) return response(environ, start_response) else: # Unsuccessful authentication url = "{base_url}?{query_string}".format( base_url="/authorization", query_string=kwargs["state"]["query"]) response = SeeOther(url) return response(environ, start_response) return wrapper
Common wrapper for the authentication modules. * Parses the request before passing it on to the authentication module. * Sets 'pyoidc' cookie if authentication succeeds. * Redirects the user to complete the authentication. * Allows the user to retry authentication if it fails. :param verifier: authentication module
entailment
def pyoidcMiddleware(func): """Common wrapper for the underlying pyoidc library functions. Reads GET params and POST data before passing it on the library and converts the response from oic.utils.http_util to wsgi. :param func: underlying library function """ def wrapper(environ, start_response): data = get_or_post(environ) cookies = environ.get("HTTP_COOKIE", "") resp = func(request=data, cookie=cookies) return resp(environ, start_response) return wrapper
Common wrapper for the underlying pyoidc library functions. Reads GET params and POST data before passing it on the library and converts the response from oic.utils.http_util to wsgi. :param func: underlying library function
entailment
def resp2flask(resp): """Convert an oic.utils.http_util instance to Flask.""" if isinstance(resp, Redirect) or isinstance(resp, SeeOther): code = int(resp.status.split()[0]) raise cherrypy.HTTPRedirect(resp.message, code) return resp.message, resp.status, resp.headers
Convert an oic.utils.http_util instance to Flask.
entailment
def setup_authentication_methods(authn_config, template_env): """Add all authentication methods specified in the configuration.""" routing = {} ac = AuthnBroker() for authn_method in authn_config: cls = make_cls_from_name(authn_method["class"]) instance = cls(template_env=template_env, **authn_method["kwargs"]) ac.add(authn_method["acr"], instance) routing[instance.url_endpoint] = VerifierMiddleware(instance) return ac, routing
Add all authentication methods specified in the configuration.
entailment
def setup_endpoints(provider): """Setup the OpenID Connect Provider endpoints.""" app_routing = {} endpoints = [ AuthorizationEndpoint( pyoidcMiddleware(provider.authorization_endpoint)), TokenEndpoint( pyoidcMiddleware(provider.token_endpoint)), UserinfoEndpoint( pyoidcMiddleware(provider.userinfo_endpoint)), RegistrationEndpoint( pyoidcMiddleware(provider.registration_endpoint)), EndSessionEndpoint( pyoidcMiddleware(provider.endsession_endpoint)) ] for ep in endpoints: app_routing["/{}".format(ep.etype)] = ep return app_routing
Setup the OpenID Connect Provider endpoints.
entailment
def _webfinger(provider, request, **kwargs): """Handle webfinger requests.""" params = urlparse.parse_qs(request) if params["rel"][0] == OIC_ISSUER: wf = WebFinger() return Response(wf.response(params["resource"][0], provider.baseurl), headers=[("Content-Type", "application/jrd+json")]) else: return BadRequest("Incorrect webfinger.")
Handle webfinger requests.
entailment
def featuresQuery(self, **kwargs): """ Converts a dictionary of keyword arguments into a tuple of SQL select statements and the list of SQL arguments """ # TODO: Optimize by refactoring out string concatenation sql = "" sql_rows = "SELECT * FROM FEATURE WHERE id > 1 " sql_args = () if 'name' in kwargs and kwargs['name']: sql += "AND name = ? " sql_args += (kwargs.get('name'),) if 'geneSymbol' in kwargs and kwargs['geneSymbol']: sql += "AND gene_name = ? " sql_args += (kwargs.get('geneSymbol'),) if 'start' in kwargs and kwargs['start'] is not None: sql += "AND end > ? " sql_args += (kwargs.get('start'),) if 'end' in kwargs and kwargs['end'] is not None: sql += "AND start < ? " sql_args += (kwargs.get('end'),) if 'referenceName' in kwargs and kwargs['referenceName']: sql += "AND reference_name = ?" sql_args += (kwargs.get('referenceName'),) if 'parentId' in kwargs and kwargs['parentId']: sql += "AND parent_id = ? " sql_args += (kwargs['parentId'],) if kwargs.get('featureTypes') is not None \ and len(kwargs['featureTypes']) > 0: sql += "AND type IN (" sql += ", ".join(["?", ] * len(kwargs.get('featureTypes'))) sql += ") " sql_args += tuple(kwargs.get('featureTypes')) sql_rows += sql sql_rows += " ORDER BY reference_name, start, end ASC " return sql_rows, sql_args
Converts a dictionary of keyword arguments into a tuple of SQL select statements and the list of SQL arguments
entailment
def searchFeaturesInDb( self, startIndex=0, maxResults=None, referenceName=None, start=None, end=None, parentId=None, featureTypes=None, name=None, geneSymbol=None): """ Perform a full features query in database. :param startIndex: int representing first record to return :param maxResults: int representing number of records to return :param referenceName: string representing reference name, ex 'chr1' :param start: int position on reference to start search :param end: int position on reference to end search >= start :param parentId: string restrict search by id of parent node. :param name: match features by name :param geneSymbol: match features by gene symbol :return an array of dictionaries, representing the returned data. """ # TODO: Refactor out common bits of this and the above count query. sql, sql_args = self.featuresQuery( startIndex=startIndex, maxResults=maxResults, referenceName=referenceName, start=start, end=end, parentId=parentId, featureTypes=featureTypes, name=name, geneSymbol=geneSymbol) sql += sqlite_backend.limitsSql(startIndex, maxResults) query = self._dbconn.execute(sql, sql_args) return sqlite_backend.sqliteRowsToDicts(query.fetchall())
Perform a full features query in database. :param startIndex: int representing first record to return :param maxResults: int representing number of records to return :param referenceName: string representing reference name, ex 'chr1' :param start: int position on reference to start search :param end: int position on reference to end search >= start :param parentId: string restrict search by id of parent node. :param name: match features by name :param geneSymbol: match features by gene symbol :return an array of dictionaries, representing the returned data.
entailment
def getFeatureById(self, featureId): """ Fetch feature by featureID. :param featureId: the FeatureID as found in GFF3 records :return: dictionary representing a feature object, or None if no match is found. """ sql = "SELECT * FROM FEATURE WHERE id = ?" query = self._dbconn.execute(sql, (featureId,)) ret = query.fetchone() if ret is None: return None return sqlite_backend.sqliteRowToDict(ret)
Fetch feature by featureID. :param featureId: the FeatureID as found in GFF3 records :return: dictionary representing a feature object, or None if no match is found.
entailment
def toProtocolElement(self): """ Returns the representation of this FeatureSet as the corresponding ProtocolElement. """ gaFeatureSet = protocol.FeatureSet() gaFeatureSet.id = self.getId() gaFeatureSet.dataset_id = self.getParentContainer().getId() gaFeatureSet.reference_set_id = pb.string(self._referenceSet.getId()) gaFeatureSet.name = self._name gaFeatureSet.source_uri = self._sourceUri attributes = self.getAttributes() for key in attributes: gaFeatureSet.attributes.attr[key] \ .values.extend(protocol.encodeValue(attributes[key])) return gaFeatureSet
Returns the representation of this FeatureSet as the corresponding ProtocolElement.
entailment
def getCompoundIdForFeatureId(self, featureId): """ Returns server-style compound ID for an internal featureId. :param long featureId: id of feature in database :return: string representing ID for the specified GA4GH protocol Feature object in this FeatureSet. """ if featureId is not None and featureId != "": compoundId = datamodel.FeatureCompoundId( self.getCompoundId(), str(featureId)) else: compoundId = "" return str(compoundId)
Returns server-style compound ID for an internal featureId. :param long featureId: id of feature in database :return: string representing ID for the specified GA4GH protocol Feature object in this FeatureSet.
entailment
def getFeature(self, compoundId): """ Fetches a simulated feature by ID. :param compoundId: any non-null string :return: A simulated feature with id set to the same value as the passed-in compoundId. ":raises: exceptions.ObjectWithIdNotFoundException if None is passed in for the compoundId. """ if compoundId is None: raise exceptions.ObjectWithIdNotFoundException(compoundId) randomNumberGenerator = random.Random() randomNumberGenerator.seed(self._randomSeed) feature = self._generateSimulatedFeature(randomNumberGenerator) feature.id = str(compoundId) feature.parent_id = "" # TODO: Test with nonempty parentIDs? return feature
Fetches a simulated feature by ID. :param compoundId: any non-null string :return: A simulated feature with id set to the same value as the passed-in compoundId. ":raises: exceptions.ObjectWithIdNotFoundException if None is passed in for the compoundId.
entailment
def getFeatures(self, referenceName=None, start=None, end=None, startIndex=None, maxResults=None, featureTypes=None, parentId=None, name=None, geneSymbol=None, numFeatures=10): """ Returns a set number of simulated features. :param referenceName: name of reference to "search" on :param start: start coordinate of query :param end: end coordinate of query :param startIndex: None or int :param maxResults: None or int :param featureTypes: optional list of ontology terms to limit query :param parentId: optional parentId to limit query. :param name: the name of the feature :param geneSymbol: the symbol for the gene the features are on :param numFeatures: number of features to generate in the return. 10 is a reasonable (if arbitrary) default. :return: Yields feature list """ randomNumberGenerator = random.Random() randomNumberGenerator.seed(self._randomSeed) for featureId in range(numFeatures): gaFeature = self._generateSimulatedFeature(randomNumberGenerator) gaFeature.id = self.getCompoundIdForFeatureId(featureId) match = ( gaFeature.start < end and gaFeature.end > start and gaFeature.reference_name == referenceName and ( featureTypes is None or len(featureTypes) == 0 or gaFeature.feature_type in featureTypes)) if match: gaFeature.parent_id = "" # TODO: Test nonempty parentIDs? yield gaFeature
Returns a set number of simulated features. :param referenceName: name of reference to "search" on :param start: start coordinate of query :param end: end coordinate of query :param startIndex: None or int :param maxResults: None or int :param featureTypes: optional list of ontology terms to limit query :param parentId: optional parentId to limit query. :param name: the name of the feature :param geneSymbol: the symbol for the gene the features are on :param numFeatures: number of features to generate in the return. 10 is a reasonable (if arbitrary) default. :return: Yields feature list
entailment
def populateFromFile(self, dataUrl): """ Populates the instance variables of this FeatureSet from the specified data URL. """ self._dbFilePath = dataUrl self._db = Gff3DbBackend(self._dbFilePath)
Populates the instance variables of this FeatureSet from the specified data URL.
entailment
def populateFromRow(self, featureSetRecord): """ Populates the instance variables of this FeatureSet from the specified DB row. """ self._dbFilePath = featureSetRecord.dataurl self.setAttributesJson(featureSetRecord.attributes) self._db = Gff3DbBackend(self._dbFilePath)
Populates the instance variables of this FeatureSet from the specified DB row.
entailment
def getFeature(self, compoundId): """ Returns a protocol.Feature object corresponding to a compoundId :param compoundId: a datamodel.FeatureCompoundId object :return: a Feature object. :raises: exceptions.ObjectWithIdNotFoundException if invalid compoundId is provided. """ featureId = long(compoundId.featureId) with self._db as dataSource: featureReturned = dataSource.getFeatureById(featureId) if featureReturned is None: raise exceptions.ObjectWithIdNotFoundException(compoundId) else: gaFeature = self._gaFeatureForFeatureDbRecord(featureReturned) return gaFeature
Returns a protocol.Feature object corresponding to a compoundId :param compoundId: a datamodel.FeatureCompoundId object :return: a Feature object. :raises: exceptions.ObjectWithIdNotFoundException if invalid compoundId is provided.
entailment
def _gaFeatureForFeatureDbRecord(self, feature): """ :param feature: The DB Row representing a feature :return: the corresponding GA4GH protocol.Feature object """ gaFeature = protocol.Feature() gaFeature.id = self.getCompoundIdForFeatureId(feature['id']) if feature.get('parent_id'): gaFeature.parent_id = self.getCompoundIdForFeatureId( feature['parent_id']) else: gaFeature.parent_id = "" gaFeature.feature_set_id = self.getId() gaFeature.reference_name = pb.string(feature.get('reference_name')) gaFeature.start = pb.int(feature.get('start')) gaFeature.end = pb.int(feature.get('end')) gaFeature.name = pb.string(feature.get('name')) if feature.get('strand', '') == '-': gaFeature.strand = protocol.NEG_STRAND else: # default to positive strand gaFeature.strand = protocol.POS_STRAND gaFeature.child_ids.extend(map( self.getCompoundIdForFeatureId, json.loads(feature['child_ids']))) gaFeature.feature_type.CopyFrom( self._ontology.getGaTermByName(feature['type'])) attributes = json.loads(feature['attributes']) # TODO: Identify which values are ExternalIdentifiers and OntologyTerms for key in attributes: for v in attributes[key]: gaFeature.attributes.attr[key].values.add().string_value = v if 'gene_name' in attributes and len(attributes['gene_name']) > 0: gaFeature.gene_symbol = pb.string(attributes['gene_name'][0]) return gaFeature
:param feature: The DB Row representing a feature :return: the corresponding GA4GH protocol.Feature object
entailment
def getFeatures(self, referenceName=None, start=None, end=None, startIndex=None, maxResults=None, featureTypes=None, parentId=None, name=None, geneSymbol=None): """ method passed to runSearchRequest to fulfill the request :param str referenceName: name of reference (ex: "chr1") :param start: castable to int, start position on reference :param end: castable to int, end position on reference :param startIndex: none or castable to int :param maxResults: none or castable to int :param featureTypes: array of str :param parentId: none or featureID of parent :param name: the name of the feature :param geneSymbol: the symbol for the gene the features are on :return: yields a protocol.Feature at a time """ with self._db as dataSource: features = dataSource.searchFeaturesInDb( startIndex, maxResults, referenceName=referenceName, start=start, end=end, parentId=parentId, featureTypes=featureTypes, name=name, geneSymbol=geneSymbol) for feature in features: gaFeature = self._gaFeatureForFeatureDbRecord(feature) yield gaFeature
method passed to runSearchRequest to fulfill the request :param str referenceName: name of reference (ex: "chr1") :param start: castable to int, start position on reference :param end: castable to int, end position on reference :param startIndex: none or castable to int :param maxResults: none or castable to int :param featureTypes: array of str :param parentId: none or featureID of parent :param name: the name of the feature :param geneSymbol: the symbol for the gene the features are on :return: yields a protocol.Feature at a time
entailment
def addRnaQuantification(self, rnaQuantification): """ Add an rnaQuantification to this rnaQuantificationSet """ id_ = rnaQuantification.getId() self._rnaQuantificationIdMap[id_] = rnaQuantification self._rnaQuantificationIds.append(id_)
Add an rnaQuantification to this rnaQuantificationSet
entailment
def toProtocolElement(self): """ Converts this rnaQuant into its GA4GH protocol equivalent. """ protocolElement = protocol.RnaQuantificationSet() protocolElement.id = self.getId() protocolElement.dataset_id = self._parentContainer.getId() protocolElement.name = self._name self.serializeAttributes(protocolElement) return protocolElement
Converts this rnaQuant into its GA4GH protocol equivalent.
entailment
def populateFromFile(self, dataUrl): """ Populates the instance variables of this RnaQuantificationSet from the specified data URL. """ self._dbFilePath = dataUrl self._db = SqliteRnaBackend(self._dbFilePath) self.addRnaQuants()
Populates the instance variables of this RnaQuantificationSet from the specified data URL.
entailment
def populateFromRow(self, quantificationSetRecord): """ Populates the instance variables of this RnaQuantificationSet from the specified DB row. """ self._dbFilePath = quantificationSetRecord.dataurl self.setAttributesJson(quantificationSetRecord.attributes) self._db = SqliteRnaBackend(self._dbFilePath) self.addRnaQuants()
Populates the instance variables of this RnaQuantificationSet from the specified DB row.
entailment
def toProtocolElement(self): """ Converts this rnaQuant into its GA4GH protocol equivalent. """ protocolElement = protocol.RnaQuantification() protocolElement.id = self.getId() protocolElement.name = self._name protocolElement.description = self._description protocolElement.read_group_ids.extend(self._readGroupIds) protocolElement.programs.extend(self._programs) protocolElement.biosample_id = self._biosampleId protocolElement.feature_set_ids.extend(self._featureSetIds) protocolElement.rna_quantification_set_id = \ self._parentContainer.getId() self.serializeAttributes(protocolElement) return protocolElement
Converts this rnaQuant into its GA4GH protocol equivalent.
entailment
def addRnaQuantMetadata(self, fields): """ data elements are: Id, annotations, description, name, readGroupId where annotations is a comma separated list """ self._featureSetIds = fields["feature_set_ids"].split(',') self._description = fields["description"] self._name = fields["name"] self._biosampleId = fields.get("biosample_id", "") if fields["read_group_ids"] == "": self._readGroupIds = [] else: self._readGroupIds = fields["read_group_ids"].split(',') if fields["programs"] == "": self._programs = [] else: # Need to use program Id's here to generate a list of Programs # for now set to empty self._programs = []
data elements are: Id, annotations, description, name, readGroupId where annotations is a comma separated list
entailment
def getRnaQuantMetadata(self): """ input is tab file with no header. Columns are: Id, annotations, description, name, readGroupId where annotation is a comma separated list """ rnaQuantId = self.getLocalId() with self._db as dataSource: rnaQuantReturned = dataSource.getRnaQuantificationById( rnaQuantId) self.addRnaQuantMetadata(rnaQuantReturned)
input is tab file with no header. Columns are: Id, annotations, description, name, readGroupId where annotation is a comma separated list
entailment
def populateFromFile(self, dataUrl): """ Populates the instance variables of this FeatureSet from the specified data URL. """ self._dbFilePath = dataUrl self._db = SqliteRnaBackend(self._dbFilePath) self.getRnaQuantMetadata()
Populates the instance variables of this FeatureSet from the specified data URL.
entailment
def populateFromRow(self, row): """ Populates the instance variables of this FeatureSet from the specified DB row. """ self._dbFilePath = row[b'dataUrl'] self._db = SqliteRnaBackend(self._dbFilePath) self.getRnaQuantMetadata()
Populates the instance variables of this FeatureSet from the specified DB row.
entailment
def getExpressionLevels( self, threshold=0.0, names=[], startIndex=0, maxResults=0): """ Returns the list of ExpressionLevels in this RNA Quantification. """ rnaQuantificationId = self.getLocalId() with self._db as dataSource: expressionsReturned = dataSource.searchExpressionLevelsInDb( rnaQuantificationId, names=names, threshold=threshold, startIndex=startIndex, maxResults=maxResults) expressionLevels = [ SqliteExpressionLevel(self, expressionEntry) for expressionEntry in expressionsReturned] return expressionLevels
Returns the list of ExpressionLevels in this RNA Quantification.
entailment
def searchRnaQuantificationsInDb( self, rnaQuantificationId=""): """ :param rnaQuantificationId: string restrict search by id :return an array of dictionaries, representing the returned data. """ sql = ("SELECT * FROM RnaQuantification") sql_args = () if len(rnaQuantificationId) > 0: sql += " WHERE id = ? " sql_args += (rnaQuantificationId,) query = self._dbconn.execute(sql, sql_args) try: return sqlite_backend.iterativeFetch(query) except AttributeError: raise exceptions.RnaQuantificationNotFoundException( rnaQuantificationId)
:param rnaQuantificationId: string restrict search by id :return an array of dictionaries, representing the returned data.
entailment
def getRnaQuantificationById(self, rnaQuantificationId): """ :param rnaQuantificationId: the RNA Quantification ID :return: dictionary representing an RnaQuantification object, or None if no match is found. """ sql = ("SELECT * FROM RnaQuantification WHERE id = ?") query = self._dbconn.execute(sql, (rnaQuantificationId,)) try: return sqlite_backend.fetchOne(query) except AttributeError: raise exceptions.RnaQuantificationNotFoundException( rnaQuantificationId)
:param rnaQuantificationId: the RNA Quantification ID :return: dictionary representing an RnaQuantification object, or None if no match is found.
entailment
def searchExpressionLevelsInDb( self, rnaQuantId, names=[], threshold=0.0, startIndex=0, maxResults=0): """ :param rnaQuantId: string restrict search by quantification id :param threshold: float minimum expression values to return :return an array of dictionaries, representing the returned data. """ sql = ("SELECT * FROM Expression WHERE " "rna_quantification_id = ? " "AND expression > ? ") sql_args = (rnaQuantId, threshold) if len(names) > 0: sql += "AND name in (" sql += ",".join(['?' for name in names]) sql += ") " for name in names: sql_args += (name,) sql += sqlite_backend.limitsSql( startIndex=startIndex, maxResults=maxResults) query = self._dbconn.execute(sql, sql_args) return sqlite_backend.iterativeFetch(query)
:param rnaQuantId: string restrict search by quantification id :param threshold: float minimum expression values to return :return an array of dictionaries, representing the returned data.
entailment
def getExpressionLevelById(self, expressionId): """ :param expressionId: the ExpressionLevel ID :return: dictionary representing an ExpressionLevel object, or None if no match is found. """ sql = ("SELECT * FROM Expression WHERE id = ?") query = self._dbconn.execute(sql, (expressionId,)) try: return sqlite_backend.fetchOne(query) except AttributeError: raise exceptions.ExpressionLevelNotFoundException( expressionId)
:param expressionId: the ExpressionLevel ID :return: dictionary representing an ExpressionLevel object, or None if no match is found.
entailment
def populateFromRow(self, callSetRecord): """ Populates this CallSet from the specified DB row. """ self._biosampleId = callSetRecord.biosampleid self.setAttributesJson(callSetRecord.attributes)
Populates this CallSet from the specified DB row.
entailment
def toProtocolElement(self): """ Returns the representation of this CallSet as the corresponding ProtocolElement. """ variantSet = self.getParentContainer() gaCallSet = protocol.CallSet( biosample_id=self.getBiosampleId()) if variantSet.getCreationTime(): gaCallSet.created = variantSet.getCreationTime() if variantSet.getUpdatedTime(): gaCallSet.updated = variantSet.getUpdatedTime() gaCallSet.id = self.getId() gaCallSet.name = self.getLocalId() gaCallSet.variant_set_ids.append(variantSet.getId()) self.serializeAttributes(gaCallSet) return gaCallSet
Returns the representation of this CallSet as the corresponding ProtocolElement.
entailment
def addVariantAnnotationSet(self, variantAnnotationSet): """ Adds the specified variantAnnotationSet to this dataset. """ id_ = variantAnnotationSet.getId() self._variantAnnotationSetIdMap[id_] = variantAnnotationSet self._variantAnnotationSetIds.append(id_)
Adds the specified variantAnnotationSet to this dataset.
entailment
def getVariantAnnotationSet(self, id_): """ Returns the AnnotationSet in this dataset with the specified 'id' """ if id_ not in self._variantAnnotationSetIdMap: raise exceptions.AnnotationSetNotFoundException(id_) return self._variantAnnotationSetIdMap[id_]
Returns the AnnotationSet in this dataset with the specified 'id'
entailment
def addCallSet(self, callSet): """ Adds the specfied CallSet to this VariantSet. """ callSetId = callSet.getId() self._callSetIdMap[callSetId] = callSet self._callSetNameMap[callSet.getLocalId()] = callSet self._callSetIds.append(callSetId) self._callSetIdToIndex[callSet.getId()] = len(self._callSetIds) - 1
Adds the specfied CallSet to this VariantSet.
entailment
def addCallSetFromName(self, sampleName): """ Adds a CallSet for the specified sample name. """ callSet = CallSet(self, sampleName) self.addCallSet(callSet)
Adds a CallSet for the specified sample name.
entailment
def getCallSetByName(self, name): """ Returns a CallSet with the specified name, or raises a CallSetNameNotFoundException if it does not exist. """ if name not in self._callSetNameMap: raise exceptions.CallSetNameNotFoundException(name) return self._callSetNameMap[name]
Returns a CallSet with the specified name, or raises a CallSetNameNotFoundException if it does not exist.
entailment
def getCallSet(self, id_): """ Returns a CallSet with the specified id, or raises a CallSetNotFoundException if it does not exist. """ if id_ not in self._callSetIdMap: raise exceptions.CallSetNotFoundException(id_) return self._callSetIdMap[id_]
Returns a CallSet with the specified id, or raises a CallSetNotFoundException if it does not exist.
entailment
def toProtocolElement(self): """ Converts this VariantSet into its GA4GH protocol equivalent. """ protocolElement = protocol.VariantSet() protocolElement.id = self.getId() protocolElement.dataset_id = self.getParentContainer().getId() protocolElement.reference_set_id = self._referenceSet.getId() protocolElement.metadata.extend(self.getMetadata()) protocolElement.dataset_id = self.getParentContainer().getId() protocolElement.reference_set_id = self._referenceSet.getId() protocolElement.name = self.getLocalId() self.serializeAttributes(protocolElement) return protocolElement
Converts this VariantSet into its GA4GH protocol equivalent.
entailment
def _createGaVariant(self): """ Convenience method to set the common fields in a GA Variant object from this variant set. """ ret = protocol.Variant() if self._creationTime: ret.created = self._creationTime if self._updatedTime: ret.updated = self._updatedTime ret.variant_set_id = self.getId() return ret
Convenience method to set the common fields in a GA Variant object from this variant set.
entailment
def getVariantId(self, gaVariant): """ Returns an ID string suitable for the specified GA Variant object in this variant set. """ md5 = self.hashVariant(gaVariant) compoundId = datamodel.VariantCompoundId( self.getCompoundId(), gaVariant.reference_name, str(gaVariant.start), md5) return str(compoundId)
Returns an ID string suitable for the specified GA Variant object in this variant set.
entailment
def getCallSetId(self, sampleName): """ Returns the callSetId for the specified sampleName in this VariantSet. """ compoundId = datamodel.CallSetCompoundId( self.getCompoundId(), sampleName) return str(compoundId)
Returns the callSetId for the specified sampleName in this VariantSet.
entailment
def hashVariant(cls, gaVariant): """ Produces an MD5 hash of the ga variant object to distinguish it from other variants at the same genomic coordinate. """ hash_str = gaVariant.reference_bases + \ str(tuple(gaVariant.alternate_bases)) return hashlib.md5(hash_str).hexdigest()
Produces an MD5 hash of the ga variant object to distinguish it from other variants at the same genomic coordinate.
entailment
def generateVariant(self, referenceName, position, randomNumberGenerator): """ Generate a random variant for the specified position using the specified random number generator. This generator should be seeded with a value that is unique to this position so that the same variant will always be produced regardless of the order it is generated in. """ variant = self._createGaVariant() variant.reference_name = referenceName variant.start = position variant.end = position + 1 # SNPs only for now bases = ["A", "C", "G", "T"] ref = randomNumberGenerator.choice(bases) variant.reference_bases = ref alt = randomNumberGenerator.choice( [base for base in bases if base != ref]) variant.alternate_bases.append(alt) randChoice = randomNumberGenerator.randint(0, 2) if randChoice == 0: variant.filters_applied = False elif randChoice == 1: variant.filters_applied = True variant.filters_passed = True else: variant.filters_applied = True variant.filters_passed = False variant.filters_failed.append('q10') for callSet in self.getCallSets(): call = variant.calls.add() call.call_set_id = callSet.getId() # for now, the genotype is either [0,1], [1,1] or [1,0] with equal # probability; probably will want to do something more # sophisticated later. randomChoice = randomNumberGenerator.choice( [[0, 1], [1, 0], [1, 1]]) call.genotype.extend(randomChoice) # TODO What is a reasonable model for generating these likelihoods? # Are these log-scaled? Spec does not say. call.genotype_likelihood.extend([-100, -100, -100]) variant.id = self.getVariantId(variant) return variant
Generate a random variant for the specified position using the specified random number generator. This generator should be seeded with a value that is unique to this position so that the same variant will always be produced regardless of the order it is generated in.
entailment
def populateFromRow(self, variantSetRecord): """ Populates this VariantSet from the specified DB row. """ self._created = variantSetRecord.created self._updated = variantSetRecord.updated self.setAttributesJson(variantSetRecord.attributes) self._chromFileMap = {} # We can't load directly as we want tuples to be stored # rather than lists. for key, value in json.loads(variantSetRecord.dataurlindexmap).items(): self._chromFileMap[key] = tuple(value) self._metadata = [] for jsonDict in json.loads(variantSetRecord.metadata): metadata = protocol.fromJson(json.dumps(jsonDict), protocol.VariantSetMetadata) self._metadata.append(metadata)
Populates this VariantSet from the specified DB row.
entailment
def populateFromFile(self, dataUrls, indexFiles): """ Populates this variant set using the specified lists of data files and indexes. These must be in the same order, such that the jth index file corresponds to the jth data file. """ assert len(dataUrls) == len(indexFiles) for dataUrl, indexFile in zip(dataUrls, indexFiles): varFile = pysam.VariantFile(dataUrl, index_filename=indexFile) try: self._populateFromVariantFile(varFile, dataUrl, indexFile) finally: varFile.close()
Populates this variant set using the specified lists of data files and indexes. These must be in the same order, such that the jth index file corresponds to the jth data file.
entailment
def populateFromDirectory(self, vcfDirectory): """ Populates this VariantSet by examing all the VCF files in the specified directory. This is mainly used for as a convenience for testing purposes. """ pattern = os.path.join(vcfDirectory, "*.vcf.gz") dataFiles = [] indexFiles = [] for vcfFile in glob.glob(pattern): dataFiles.append(vcfFile) indexFiles.append(vcfFile + ".tbi") self.populateFromFile(dataFiles, indexFiles)
Populates this VariantSet by examing all the VCF files in the specified directory. This is mainly used for as a convenience for testing purposes.
entailment
def checkConsistency(self): """ Perform consistency check on the variant set """ for referenceName, (dataUrl, indexFile) in self._chromFileMap.items(): varFile = pysam.VariantFile(dataUrl, index_filename=indexFile) try: for chrom in varFile.index: chrom, _, _ = self.sanitizeVariantFileFetch(chrom) if not isEmptyIter(varFile.fetch(chrom)): self._checkMetadata(varFile) self._checkCallSetIds(varFile) finally: varFile.close()
Perform consistency check on the variant set
entailment
def _populateFromVariantFile(self, varFile, dataUrl, indexFile): """ Populates the instance variables of this VariantSet from the specified pysam VariantFile object. """ if varFile.index is None: raise exceptions.NotIndexedException(dataUrl) for chrom in varFile.index: # Unlike Tabix indices, CSI indices include all contigs defined # in the BCF header. Thus we must test each one to see if # records exist or else they are likely to trigger spurious # overlapping errors. chrom, _, _ = self.sanitizeVariantFileFetch(chrom) if not isEmptyIter(varFile.fetch(chrom)): if chrom in self._chromFileMap: raise exceptions.OverlappingVcfException(dataUrl, chrom) self._chromFileMap[chrom] = dataUrl, indexFile self._updateMetadata(varFile) self._updateCallSetIds(varFile) self._updateVariantAnnotationSets(varFile, dataUrl)
Populates the instance variables of this VariantSet from the specified pysam VariantFile object.
entailment
def _updateVariantAnnotationSets(self, variantFile, dataUrl): """ Updates the variant annotation set associated with this variant using information in the specified pysam variantFile. """ # TODO check the consistency of this between VCF files. if not self.isAnnotated(): annotationType = None for record in variantFile.header.records: if record.type == "GENERIC": if record.key == "SnpEffVersion": annotationType = ANNOTATIONS_SNPEFF elif record.key == "VEP": version = record.value.split()[0] # TODO we need _much_ more sophisticated processing # of VEP versions here. When do they become # incompatible? if version == "v82": annotationType = ANNOTATIONS_VEP_V82 elif version == "v77": annotationType = ANNOTATIONS_VEP_V77 else: # TODO raise a proper typed exception there with # the file name as an argument. raise ValueError( "Unsupported VEP version {} in '{}'".format( version, dataUrl)) if annotationType is None: infoKeys = variantFile.header.info.keys() if 'CSQ' in infoKeys or 'ANN' in infoKeys: # TODO likewise, we want a properly typed exception that # we can throw back to the repo manager UI and display # as an import error. raise ValueError( "Unsupported annotations in '{}'".format(dataUrl)) if annotationType is not None: vas = HtslibVariantAnnotationSet(self, self.getLocalId()) vas.populateFromFile(variantFile, annotationType) self.addVariantAnnotationSet(vas)
Updates the variant annotation set associated with this variant using information in the specified pysam variantFile.
entailment
def _updateMetadata(self, variantFile): """ Updates the metadata for his variant set based on the specified variant file """ metadata = self._getMetadataFromVcf(variantFile) if self._metadata is None: self._metadata = metadata
Updates the metadata for his variant set based on the specified variant file
entailment
def _checkMetadata(self, variantFile): """ Checks that metadata is consistent """ metadata = self._getMetadataFromVcf(variantFile) if self._metadata is not None and self._metadata != metadata: raise exceptions.InconsistentMetaDataException( variantFile.filename)
Checks that metadata is consistent
entailment
def _checkCallSetIds(self, variantFile): """ Checks callSetIds for consistency """ if len(self._callSetIdMap) > 0: callSetIds = set([ self.getCallSetId(sample) for sample in variantFile.header.samples]) if callSetIds != set(self._callSetIdMap.keys()): raise exceptions.InconsistentCallSetIdException( variantFile.filename)
Checks callSetIds for consistency
entailment
def _updateCallSetIds(self, variantFile): """ Updates the call set IDs based on the specified variant file. """ if len(self._callSetIdMap) == 0: for sample in variantFile.header.samples: self.addCallSetFromName(sample)
Updates the call set IDs based on the specified variant file.
entailment
def convertVariant(self, record, callSetIds): """ Converts the specified pysam variant record into a GA4GH Variant object. Only calls for the specified list of callSetIds will be included. """ variant = self._createGaVariant() variant.reference_name = record.contig if record.id is not None: variant.names.extend(record.id.split(';')) variant.start = record.start # 0-based inclusive variant.end = record.stop # 0-based exclusive variant.reference_bases = record.ref if record.alts is not None: variant.alternate_bases.extend(list(record.alts)) filterKeys = record.filter.keys() if len(filterKeys) == 0: variant.filters_applied = False else: variant.filters_applied = True if len(filterKeys) == 1 and filterKeys[0] == 'PASS': variant.filters_passed = True else: variant.filters_passed = False variant.filters_failed.extend(filterKeys) # record.qual is also available, when supported by GAVariant. for key, value in record.info.iteritems(): if value is None: continue if key == 'SVTYPE': variant.variant_type = value elif key == 'SVLEN': variant.svlen = int(value[0]) elif key == 'CIPOS': variant.cipos.extend(value) elif key == 'CIEND': variant.ciend.extend(value) elif isinstance(value, str): value = value.split(',') protocol.setAttribute( variant.attributes.attr[key].values, value) for callSetId in callSetIds: callSet = self.getCallSet(callSetId) pysamCall = record.samples[str(callSet.getSampleName())] variant.calls.add().CopyFrom( self._convertGaCall(callSet, pysamCall)) variant.id = self.getVariantId(variant) return variant
Converts the specified pysam variant record into a GA4GH Variant object. Only calls for the specified list of callSetIds will be included.
entailment
def getPysamVariants(self, referenceName, startPosition, endPosition): """ Returns an iterator over the pysam VCF records corresponding to the specified query. """ if referenceName in self._chromFileMap: varFileName = self._chromFileMap[referenceName] referenceName, startPosition, endPosition = \ self.sanitizeVariantFileFetch( referenceName, startPosition, endPosition) cursor = self.getFileHandle(varFileName).fetch( referenceName, startPosition, endPosition) for record in cursor: yield record
Returns an iterator over the pysam VCF records corresponding to the specified query.
entailment
def getVariants(self, referenceName, startPosition, endPosition, callSetIds=[]): """ Returns an iterator over the specified variants. The parameters correspond to the attributes of a GASearchVariantsRequest object. """ if callSetIds is None: callSetIds = self._callSetIds else: for callSetId in callSetIds: if callSetId not in self._callSetIds: raise exceptions.CallSetNotInVariantSetException( callSetId, self.getId()) for record in self.getPysamVariants( referenceName, startPosition, endPosition): yield self.convertVariant(record, callSetIds)
Returns an iterator over the specified variants. The parameters correspond to the attributes of a GASearchVariantsRequest object.
entailment
def getMetadataId(self, metadata): """ Returns the id of a metadata """ return str(datamodel.VariantSetMetadataCompoundId( self.getCompoundId(), 'metadata:' + metadata.key))
Returns the id of a metadata
entailment
def _createGaVariantAnnotation(self): """ Convenience method to set the common fields in a GA VariantAnnotation object from this variant set. """ ret = protocol.VariantAnnotation() ret.created = self._creationTime ret.variant_annotation_set_id = self.getId() return ret
Convenience method to set the common fields in a GA VariantAnnotation object from this variant set.
entailment
def toProtocolElement(self): """ Converts this VariantAnnotationSet into its GA4GH protocol equivalent. """ protocolElement = protocol.VariantAnnotationSet() protocolElement.id = self.getId() protocolElement.variant_set_id = self._variantSet.getId() protocolElement.name = self.getLocalId() protocolElement.analysis.CopyFrom(self.getAnalysis()) self.serializeAttributes(protocolElement) return protocolElement
Converts this VariantAnnotationSet into its GA4GH protocol equivalent.
entailment
def hashVariantAnnotation(cls, gaVariant, gaVariantAnnotation): """ Produces an MD5 hash of the gaVariant and gaVariantAnnotation objects """ treffs = [treff.id for treff in gaVariantAnnotation.transcript_effects] return hashlib.md5( "{}\t{}\t{}\t".format( gaVariant.reference_bases, tuple(gaVariant.alternate_bases), treffs) ).hexdigest()
Produces an MD5 hash of the gaVariant and gaVariantAnnotation objects
entailment
def getVariantAnnotationId(self, gaVariant, gaAnnotation): """ Produces a stringified compoundId representing a variant annotation. :param gaVariant: protocol.Variant :param gaAnnotation: protocol.VariantAnnotation :return: compoundId String """ md5 = self.hashVariantAnnotation(gaVariant, gaAnnotation) compoundId = datamodel.VariantAnnotationCompoundId( self.getCompoundId(), gaVariant.reference_name, str(gaVariant.start), md5) return str(compoundId)
Produces a stringified compoundId representing a variant annotation. :param gaVariant: protocol.Variant :param gaAnnotation: protocol.VariantAnnotation :return: compoundId String
entailment