id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
36,896
def figure_nobar(*args, **kwargs): from matplotlib import rcParams, pyplot as plt old_val = rcParams['toolbar'] try: rcParams['toolbar'] = 'none' fig = plt.figure(*args, **kwargs) cbs = list(fig.canvas.callbacks.callbacks['key_press_event'].keys()) for key in cbs: fig.canvas.callbacks.disconnect(key) except Exception as ex: raise ex finally: rcParams['toolbar'] = old_val return fig
[ "def", "figure_nobar", "(", "*", "args", ",", "**", "kwargs", ")", ":", "from", "matplotlib", "import", "rcParams", ",", "pyplot", "as", "plt", "old_val", "=", "rcParams", "[", "'toolbar'", "]", "try", ":", "rcParams", "[", "'toolbar'", "]", "=", "'none'", "fig", "=", "plt", ".", "figure", "(", "*", "args", ",", "**", "kwargs", ")", "cbs", "=", "list", "(", "fig", ".", "canvas", ".", "callbacks", ".", "callbacks", "[", "'key_press_event'", "]", ".", "keys", "(", ")", ")", "for", "key", "in", "cbs", ":", "fig", ".", "canvas", ".", "callbacks", ".", "disconnect", "(", "key", ")", "except", "Exception", "as", "ex", ":", "raise", "ex", "finally", ":", "rcParams", "[", "'toolbar'", "]", "=", "old_val", "return", "fig" ]
make matplotlib figure with no toolbar .
train
false
36,898
def validate_recaptcha(response, remote_ip=None): if (not response): return False payload = {'secret': settings.RECAPTCHA_SECRET_KEY, 'response': response} if remote_ip: payload.update({'remoteip': remote_ip}) resp = requests.post(settings.RECAPTCHA_VERIFY_URL, data=payload) return ((resp.status_code == httplib.OK) and resp.json().get('success'))
[ "def", "validate_recaptcha", "(", "response", ",", "remote_ip", "=", "None", ")", ":", "if", "(", "not", "response", ")", ":", "return", "False", "payload", "=", "{", "'secret'", ":", "settings", ".", "RECAPTCHA_SECRET_KEY", ",", "'response'", ":", "response", "}", "if", "remote_ip", ":", "payload", ".", "update", "(", "{", "'remoteip'", ":", "remote_ip", "}", ")", "resp", "=", "requests", ".", "post", "(", "settings", ".", "RECAPTCHA_VERIFY_URL", ",", "data", "=", "payload", ")", "return", "(", "(", "resp", ".", "status_code", "==", "httplib", ".", "OK", ")", "and", "resp", ".", "json", "(", ")", ".", "get", "(", "'success'", ")", ")" ]
validate if the recaptcha response is valid .
train
false
36,899
def parse_posix_class(source, info): negate = source.match('^') (prop_name, name) = parse_property_name(source) if (not source.match(':]')): raise ParseError() return lookup_property(prop_name, name, (not negate), source, posix=True)
[ "def", "parse_posix_class", "(", "source", ",", "info", ")", ":", "negate", "=", "source", ".", "match", "(", "'^'", ")", "(", "prop_name", ",", "name", ")", "=", "parse_property_name", "(", "source", ")", "if", "(", "not", "source", ".", "match", "(", "':]'", ")", ")", ":", "raise", "ParseError", "(", ")", "return", "lookup_property", "(", "prop_name", ",", "name", ",", "(", "not", "negate", ")", ",", "source", ",", "posix", "=", "True", ")" ]
parses a posix character class .
train
false
36,900
def _str_to_ord(content, weights): ordinal = 0 for (i, c) in enumerate(content): ordinal += ((weights[i] * _ALPHABET.index(c)) + 1) return ordinal
[ "def", "_str_to_ord", "(", "content", ",", "weights", ")", ":", "ordinal", "=", "0", "for", "(", "i", ",", "c", ")", "in", "enumerate", "(", "content", ")", ":", "ordinal", "+=", "(", "(", "weights", "[", "i", "]", "*", "_ALPHABET", ".", "index", "(", "c", ")", ")", "+", "1", ")", "return", "ordinal" ]
converts a string to its lexicographical order .
train
true
36,901
def org_region_options(zones=False): represent = current.s3db.org_region_represent if (represent is None): return {} db = current.db rtable = db.org_region if zones: query = (rtable.parent == None) else: query = (rtable.parent != None) query &= (rtable.deleted != True) rows = db(query).select(rtable.id, rtable.name) options = represent.bulk(None, rows=rows) options.pop(None, None) return options
[ "def", "org_region_options", "(", "zones", "=", "False", ")", ":", "represent", "=", "current", ".", "s3db", ".", "org_region_represent", "if", "(", "represent", "is", "None", ")", ":", "return", "{", "}", "db", "=", "current", ".", "db", "rtable", "=", "db", ".", "org_region", "if", "zones", ":", "query", "=", "(", "rtable", ".", "parent", "==", "None", ")", "else", ":", "query", "=", "(", "rtable", ".", "parent", "!=", "None", ")", "query", "&=", "(", "rtable", ".", "deleted", "!=", "True", ")", "rows", "=", "db", "(", "query", ")", ".", "select", "(", "rtable", ".", "id", ",", "rtable", ".", "name", ")", "options", "=", "represent", ".", "bulk", "(", "None", ",", "rows", "=", "rows", ")", "options", ".", "pop", "(", "None", ",", "None", ")", "return", "options" ]
get all options for region ids .
train
false
36,903
def org_search(): s3.prep = (lambda r: (r.method == 'search_ac')) return s3_rest_controller(module, 'organisation')
[ "def", "org_search", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "r", ".", "method", "==", "'search_ac'", ")", ")", "return", "s3_rest_controller", "(", "module", ",", "'organisation'", ")" ]
organisation rest controller - limited to just search_ac for use in autocompletes - allows differential access permissions .
train
false
36,908
@release.command() @click.argument('version') def bump(version): bump_version(version)
[ "@", "release", ".", "command", "(", ")", "@", "click", ".", "argument", "(", "'version'", ")", "def", "bump", "(", "version", ")", ":", "bump_version", "(", "version", ")" ]
bump the version number .
train
false
36,909
def vpn_ping(address, port, timeout=0.05, session_id=None): if (session_id is None): session_id = random.randint(0, 18446744073709551615L) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) data = struct.pack('!BQxxxxx', 56, session_id) sock.sendto(data, (address, port)) sock.settimeout(timeout) try: received = sock.recv(2048) except socket.timeout: return False finally: sock.close() fmt = '!BQxxxxxQxxxx' if (len(received) != struct.calcsize(fmt)): print struct.calcsize(fmt) return False (identifier, server_sess, client_sess) = struct.unpack(fmt, received) if ((identifier == 64) and (client_sess == session_id)): return server_sess
[ "def", "vpn_ping", "(", "address", ",", "port", ",", "timeout", "=", "0.05", ",", "session_id", "=", "None", ")", ":", "if", "(", "session_id", "is", "None", ")", ":", "session_id", "=", "random", ".", "randint", "(", "0", ",", "18446744073709551615", "L", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_DGRAM", ")", "data", "=", "struct", ".", "pack", "(", "'!BQxxxxx'", ",", "56", ",", "session_id", ")", "sock", ".", "sendto", "(", "data", ",", "(", "address", ",", "port", ")", ")", "sock", ".", "settimeout", "(", "timeout", ")", "try", ":", "received", "=", "sock", ".", "recv", "(", "2048", ")", "except", "socket", ".", "timeout", ":", "return", "False", "finally", ":", "sock", ".", "close", "(", ")", "fmt", "=", "'!BQxxxxxQxxxx'", "if", "(", "len", "(", "received", ")", "!=", "struct", ".", "calcsize", "(", "fmt", ")", ")", ":", "print", "struct", ".", "calcsize", "(", "fmt", ")", "return", "False", "(", "identifier", ",", "server_sess", ",", "client_sess", ")", "=", "struct", ".", "unpack", "(", "fmt", ",", "received", ")", "if", "(", "(", "identifier", "==", "64", ")", "and", "(", "client_sess", "==", "session_id", ")", ")", ":", "return", "server_sess" ]
sends a vpn negotiation packet and returns the server session .
train
false
36,910
def binary_hit_or_miss(input, structure1=None, structure2=None, output=None, origin1=0, origin2=None): input = numpy.asarray(input) if (structure1 is None): structure1 = generate_binary_structure(input.ndim, 1) if (structure2 is None): structure2 = numpy.logical_not(structure1) origin1 = _ni_support._normalize_sequence(origin1, input.ndim) if (origin2 is None): origin2 = origin1 else: origin2 = _ni_support._normalize_sequence(origin2, input.ndim) tmp1 = _binary_erosion(input, structure1, 1, None, None, 0, origin1, 0, False) inplace = isinstance(output, numpy.ndarray) result = _binary_erosion(input, structure2, 1, None, output, 0, origin2, 1, False) if inplace: numpy.logical_not(output, output) numpy.logical_and(tmp1, output, output) else: numpy.logical_not(result, result) return numpy.logical_and(tmp1, result)
[ "def", "binary_hit_or_miss", "(", "input", ",", "structure1", "=", "None", ",", "structure2", "=", "None", ",", "output", "=", "None", ",", "origin1", "=", "0", ",", "origin2", "=", "None", ")", ":", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "if", "(", "structure1", "is", "None", ")", ":", "structure1", "=", "generate_binary_structure", "(", "input", ".", "ndim", ",", "1", ")", "if", "(", "structure2", "is", "None", ")", ":", "structure2", "=", "numpy", ".", "logical_not", "(", "structure1", ")", "origin1", "=", "_ni_support", ".", "_normalize_sequence", "(", "origin1", ",", "input", ".", "ndim", ")", "if", "(", "origin2", "is", "None", ")", ":", "origin2", "=", "origin1", "else", ":", "origin2", "=", "_ni_support", ".", "_normalize_sequence", "(", "origin2", ",", "input", ".", "ndim", ")", "tmp1", "=", "_binary_erosion", "(", "input", ",", "structure1", ",", "1", ",", "None", ",", "None", ",", "0", ",", "origin1", ",", "0", ",", "False", ")", "inplace", "=", "isinstance", "(", "output", ",", "numpy", ".", "ndarray", ")", "result", "=", "_binary_erosion", "(", "input", ",", "structure2", ",", "1", ",", "None", ",", "output", ",", "0", ",", "origin2", ",", "1", ",", "False", ")", "if", "inplace", ":", "numpy", ".", "logical_not", "(", "output", ",", "output", ")", "numpy", ".", "logical_and", "(", "tmp1", ",", "output", ",", "output", ")", "else", ":", "numpy", ".", "logical_not", "(", "result", ",", "result", ")", "return", "numpy", ".", "logical_and", "(", "tmp1", ",", "result", ")" ]
multi-dimensional binary hit-or-miss transform .
train
false
36,911
def ascii85decode(data): n = b = 0 out = '' for c in data: if (('!' <= c) and (c <= 'u')): n += 1 b = ((b * 85) + (ord(c) - 33)) if (n == 5): out += struct.pack('>L', b) n = b = 0 elif (c == 'z'): assert (n == 0) out += '\x00\x00\x00\x00' elif (c == '~'): if n: for _ in range((5 - n)): b = ((b * 85) + 84) out += struct.pack('>L', b)[:(n - 1)] break return out
[ "def", "ascii85decode", "(", "data", ")", ":", "n", "=", "b", "=", "0", "out", "=", "''", "for", "c", "in", "data", ":", "if", "(", "(", "'!'", "<=", "c", ")", "and", "(", "c", "<=", "'u'", ")", ")", ":", "n", "+=", "1", "b", "=", "(", "(", "b", "*", "85", ")", "+", "(", "ord", "(", "c", ")", "-", "33", ")", ")", "if", "(", "n", "==", "5", ")", ":", "out", "+=", "struct", ".", "pack", "(", "'>L'", ",", "b", ")", "n", "=", "b", "=", "0", "elif", "(", "c", "==", "'z'", ")", ":", "assert", "(", "n", "==", "0", ")", "out", "+=", "'\\x00\\x00\\x00\\x00'", "elif", "(", "c", "==", "'~'", ")", ":", "if", "n", ":", "for", "_", "in", "range", "(", "(", "5", "-", "n", ")", ")", ":", "b", "=", "(", "(", "b", "*", "85", ")", "+", "84", ")", "out", "+=", "struct", ".", "pack", "(", "'>L'", ",", "b", ")", "[", ":", "(", "n", "-", "1", ")", "]", "break", "return", "out" ]
in ascii85 encoding .
train
false
36,912
def slot(): return s3_rest_controller()
[ "def", "slot", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
36,914
def sorted_groupby(seq, key): if (not hasattr(key, '__call__')): key = operator.attrgetter(key) return itertools.groupby(sorted(seq, key=key), key=key)
[ "def", "sorted_groupby", "(", "seq", ",", "key", ")", ":", "if", "(", "not", "hasattr", "(", "key", ",", "'__call__'", ")", ")", ":", "key", "=", "operator", ".", "attrgetter", "(", "key", ")", "return", "itertools", ".", "groupby", "(", "sorted", "(", "seq", ",", "key", "=", "key", ")", ",", "key", "=", "key", ")" ]
given a sequence .
train
false
36,915
def update_sink(sink_name, filter_): logging_client = logging.Client() sink = logging_client.sink(sink_name) sink.reload() sink.filter_ = filter_ print 'Updated sink {}'.format(sink.name) sink.update()
[ "def", "update_sink", "(", "sink_name", ",", "filter_", ")", ":", "logging_client", "=", "logging", ".", "Client", "(", ")", "sink", "=", "logging_client", ".", "sink", "(", "sink_name", ")", "sink", ".", "reload", "(", ")", "sink", ".", "filter_", "=", "filter_", "print", "'Updated sink {}'", ".", "format", "(", "sink", ".", "name", ")", "sink", ".", "update", "(", ")" ]
changes a sinks filter .
train
false
36,916
def _iterKeywords(results): for key in results: (yield (key.id, key.keyword))
[ "def", "_iterKeywords", "(", "results", ")", ":", "for", "key", "in", "results", ":", "(", "yield", "(", "key", ".", "id", ",", "key", ".", "keyword", ")", ")" ]
iterate over columns of a selection of the keyword table .
train
false
36,918
def parse_openshift_version(output): versions = dict((e.split(' v') for e in output.splitlines() if (' v' in e))) ver = versions.get('openshift', '') ver = ver.split('-')[0] return ver
[ "def", "parse_openshift_version", "(", "output", ")", ":", "versions", "=", "dict", "(", "(", "e", ".", "split", "(", "' v'", ")", "for", "e", "in", "output", ".", "splitlines", "(", ")", "if", "(", "' v'", "in", "e", ")", ")", ")", "ver", "=", "versions", ".", "get", "(", "'openshift'", ",", "''", ")", "ver", "=", "ver", ".", "split", "(", "'-'", ")", "[", "0", "]", "return", "ver" ]
apply provider facts to supplied facts dict args: string: output of openshift version returns: string: the version number .
train
false
36,919
@register.filter @stringfilter def url_target_blank(text): return mark_safe(text.replace('<a ', '<a target="_blank" '))
[ "@", "register", ".", "filter", "@", "stringfilter", "def", "url_target_blank", "(", "text", ")", ":", "return", "mark_safe", "(", "text", ".", "replace", "(", "'<a '", ",", "'<a target=\"_blank\" '", ")", ")" ]
sets the target="_blank" for hyperlinks .
train
false
36,921
def fit_image(width, height, pwidth, pheight): scaled = ((height > pheight) or (width > pwidth)) if (height > pheight): corrf = (pheight / float(height)) (width, height) = (floor((corrf * width)), pheight) if (width > pwidth): corrf = (pwidth / float(width)) (width, height) = (pwidth, floor((corrf * height))) if (height > pheight): corrf = (pheight / float(height)) (width, height) = (floor((corrf * width)), pheight) return (scaled, int(width), int(height))
[ "def", "fit_image", "(", "width", ",", "height", ",", "pwidth", ",", "pheight", ")", ":", "scaled", "=", "(", "(", "height", ">", "pheight", ")", "or", "(", "width", ">", "pwidth", ")", ")", "if", "(", "height", ">", "pheight", ")", ":", "corrf", "=", "(", "pheight", "/", "float", "(", "height", ")", ")", "(", "width", ",", "height", ")", "=", "(", "floor", "(", "(", "corrf", "*", "width", ")", ")", ",", "pheight", ")", "if", "(", "width", ">", "pwidth", ")", ":", "corrf", "=", "(", "pwidth", "/", "float", "(", "width", ")", ")", "(", "width", ",", "height", ")", "=", "(", "pwidth", ",", "floor", "(", "(", "corrf", "*", "height", ")", ")", ")", "if", "(", "height", ">", "pheight", ")", ":", "corrf", "=", "(", "pheight", "/", "float", "(", "height", ")", ")", "(", "width", ",", "height", ")", "=", "(", "floor", "(", "(", "corrf", "*", "width", ")", ")", ",", "pheight", ")", "return", "(", "scaled", ",", "int", "(", "width", ")", ",", "int", "(", "height", ")", ")" ]
fit image in box of width pwidth and height pheight .
train
false
36,922
def mod_inverse(a, m): c = None try: (a, m) = (as_int(a), as_int(m)) if (m > 1): (x, y, g) = igcdex(a, m) if (g == 1): c = (x % m) if (a < 0): c -= m except ValueError: (a, m) = (sympify(a), sympify(m)) if (not (a.is_number and m.is_number)): raise TypeError(filldedent('\n Expected numbers for arguments; symbolic `mod_inverse`\n is not implemented\n but symbolic expressions can be handled with the\n similar function,\n sympy.polys.polytools.invert')) big = (m > 1) if (not ((big is S.true) or (big is S.false))): raise ValueError(('m > 1 did not evaluate; try to simplify %s' % m)) elif big: c = (1 / a) if (c is None): raise ValueError(('inverse of %s (mod %s) does not exist' % (a, m))) return c
[ "def", "mod_inverse", "(", "a", ",", "m", ")", ":", "c", "=", "None", "try", ":", "(", "a", ",", "m", ")", "=", "(", "as_int", "(", "a", ")", ",", "as_int", "(", "m", ")", ")", "if", "(", "m", ">", "1", ")", ":", "(", "x", ",", "y", ",", "g", ")", "=", "igcdex", "(", "a", ",", "m", ")", "if", "(", "g", "==", "1", ")", ":", "c", "=", "(", "x", "%", "m", ")", "if", "(", "a", "<", "0", ")", ":", "c", "-=", "m", "except", "ValueError", ":", "(", "a", ",", "m", ")", "=", "(", "sympify", "(", "a", ")", ",", "sympify", "(", "m", ")", ")", "if", "(", "not", "(", "a", ".", "is_number", "and", "m", ".", "is_number", ")", ")", ":", "raise", "TypeError", "(", "filldedent", "(", "'\\n Expected numbers for arguments; symbolic `mod_inverse`\\n is not implemented\\n but symbolic expressions can be handled with the\\n similar function,\\n sympy.polys.polytools.invert'", ")", ")", "big", "=", "(", "m", ">", "1", ")", "if", "(", "not", "(", "(", "big", "is", "S", ".", "true", ")", "or", "(", "big", "is", "S", ".", "false", ")", ")", ")", ":", "raise", "ValueError", "(", "(", "'m > 1 did not evaluate; try to simplify %s'", "%", "m", ")", ")", "elif", "big", ":", "c", "=", "(", "1", "/", "a", ")", "if", "(", "c", "is", "None", ")", ":", "raise", "ValueError", "(", "(", "'inverse of %s (mod %s) does not exist'", "%", "(", "a", ",", "m", ")", ")", ")", "return", "c" ]
return the number c such that .
train
false
36,923
def image_meta_delete(image_id=None, name=None, keys=None, profile=None): conn = _auth(profile) return conn.image_meta_delete(image_id, name, keys)
[ "def", "image_meta_delete", "(", "image_id", "=", "None", ",", "name", "=", "None", ",", "keys", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "image_meta_delete", "(", "image_id", ",", "name", ",", "keys", ")" ]
delete a key=value pair from the metadata for an image cli examples: .
train
true
36,924
def test_merge_once_only(merge_log_err): packages = {'pack_1': {'homeassistant': {}}, 'pack_2': {'mqtt': {}, 'api': {}}} config = {config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages}, 'mqtt': {}, 'api': {}} config_util.merge_packages_config(config, packages) assert (merge_log_err.call_count == 3) assert (len(config) == 3)
[ "def", "test_merge_once_only", "(", "merge_log_err", ")", ":", "packages", "=", "{", "'pack_1'", ":", "{", "'homeassistant'", ":", "{", "}", "}", ",", "'pack_2'", ":", "{", "'mqtt'", ":", "{", "}", ",", "'api'", ":", "{", "}", "}", "}", "config", "=", "{", "config_util", ".", "CONF_CORE", ":", "{", "config_util", ".", "CONF_PACKAGES", ":", "packages", "}", ",", "'mqtt'", ":", "{", "}", ",", "'api'", ":", "{", "}", "}", "config_util", ".", "merge_packages_config", "(", "config", ",", "packages", ")", "assert", "(", "merge_log_err", ".", "call_count", "==", "3", ")", "assert", "(", "len", "(", "config", ")", "==", "3", ")" ]
test if we have a merge for a comp that may occur only once .
train
false
36,925
def permission_required_or_404(perm, *args, **kwargs): kwargs[u'return_404'] = True return permission_required(perm, *args, **kwargs)
[ "def", "permission_required_or_404", "(", "perm", ",", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", "[", "u'return_404'", "]", "=", "True", "return", "permission_required", "(", "perm", ",", "*", "args", ",", "**", "kwargs", ")" ]
simple wrapper for permission_required decorator .
train
false
36,926
def risk(): return s3_rest_controller(rheader=s3db.vulnerability_rheader)
[ "def", "risk", "(", ")", ":", "return", "s3_rest_controller", "(", "rheader", "=", "s3db", ".", "vulnerability_rheader", ")" ]
rest controller .
train
false
36,927
def _fastq_convert_qual(in_handle, out_handle, mapping): from Bio.SeqIO.QualityIO import FastqGeneralIterator count = 0 for (title, seq, qual) in FastqGeneralIterator(in_handle): count += 1 out_handle.write(('>%s\n' % title)) try: qualities_strs = [mapping[ascii] for ascii in qual] except KeyError: raise ValueError('Invalid character in quality string') data = ' '.join(qualities_strs) while (len(data) > 60): if (data[60] == ' '): out_handle.write((data[:60] + '\n')) data = data[61:] elif (data[59] == ' '): out_handle.write((data[:59] + '\n')) data = data[60:] else: assert (data[58] == ' '), 'Internal logic failure in wrapping' out_handle.write((data[:58] + '\n')) data = data[59:] out_handle.write((data + '\n')) return count
[ "def", "_fastq_convert_qual", "(", "in_handle", ",", "out_handle", ",", "mapping", ")", ":", "from", "Bio", ".", "SeqIO", ".", "QualityIO", "import", "FastqGeneralIterator", "count", "=", "0", "for", "(", "title", ",", "seq", ",", "qual", ")", "in", "FastqGeneralIterator", "(", "in_handle", ")", ":", "count", "+=", "1", "out_handle", ".", "write", "(", "(", "'>%s\\n'", "%", "title", ")", ")", "try", ":", "qualities_strs", "=", "[", "mapping", "[", "ascii", "]", "for", "ascii", "in", "qual", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Invalid character in quality string'", ")", "data", "=", "' '", ".", "join", "(", "qualities_strs", ")", "while", "(", "len", "(", "data", ")", ">", "60", ")", ":", "if", "(", "data", "[", "60", "]", "==", "' '", ")", ":", "out_handle", ".", "write", "(", "(", "data", "[", ":", "60", "]", "+", "'\\n'", ")", ")", "data", "=", "data", "[", "61", ":", "]", "elif", "(", "data", "[", "59", "]", "==", "' '", ")", ":", "out_handle", ".", "write", "(", "(", "data", "[", ":", "59", "]", "+", "'\\n'", ")", ")", "data", "=", "data", "[", "60", ":", "]", "else", ":", "assert", "(", "data", "[", "58", "]", "==", "' '", ")", ",", "'Internal logic failure in wrapping'", "out_handle", ".", "write", "(", "(", "data", "[", ":", "58", "]", "+", "'\\n'", ")", ")", "data", "=", "data", "[", "59", ":", "]", "out_handle", ".", "write", "(", "(", "data", "+", "'\\n'", ")", ")", "return", "count" ]
fastq helper function for qual output .
train
false
36,928
def skip_if(predicate, reason): def _inner(fun): return ((predicate and skip(reason)(fun)) or fun) return _inner
[ "def", "skip_if", "(", "predicate", ",", "reason", ")", ":", "def", "_inner", "(", "fun", ")", ":", "return", "(", "(", "predicate", "and", "skip", "(", "reason", ")", "(", "fun", ")", ")", "or", "fun", ")", "return", "_inner" ]
decorator to skip test if condition is true .
train
false
36,929
def generate_login_token(user): return _security.login_serializer.dumps([str(user.id)])
[ "def", "generate_login_token", "(", "user", ")", ":", "return", "_security", ".", "login_serializer", ".", "dumps", "(", "[", "str", "(", "user", ".", "id", ")", "]", ")" ]
generates a unique login token for the specified user .
train
false
36,930
def strip_language_code(request): path = request.path if (settings.USE_I18N and hasattr(request, 'LANGUAGE_CODE')): return re.sub(('^/%s/' % request.LANGUAGE_CODE), '/', path) return path
[ "def", "strip_language_code", "(", "request", ")", ":", "path", "=", "request", ".", "path", "if", "(", "settings", ".", "USE_I18N", "and", "hasattr", "(", "request", ",", "'LANGUAGE_CODE'", ")", ")", ":", "return", "re", ".", "sub", "(", "(", "'^/%s/'", "%", "request", ".", "LANGUAGE_CODE", ")", ",", "'/'", ",", "path", ")", "return", "path" ]
when using djangos i18n_patterns .
train
false
36,931
def _bytes_chr_py2(i): return chr(i)
[ "def", "_bytes_chr_py2", "(", "i", ")", ":", "return", "chr", "(", "i", ")" ]
returns a byte string of length 1 whose ordinal value is i in python 2 .
train
false
36,932
def test_hist_append(hist, xonsh_builtins): xonsh_builtins.__xonsh_env__['HISTCONTROL'] = set() hf = hist.append({'inp': 'still alive', 'rtn': 1}) assert (hf is None) items = list(hist.items()) assert (len(items) == 1) assert ('still alive' == items[0]['inp']) assert (1 == items[0]['rtn']) hist.append({'inp': 'still alive', 'rtn': 0}) items = list(hist.items()) assert (len(items) == 2) assert ('still alive' == items[1]['inp']) assert (0 == items[1]['rtn']) assert (list(hist.all_items()) == items)
[ "def", "test_hist_append", "(", "hist", ",", "xonsh_builtins", ")", ":", "xonsh_builtins", ".", "__xonsh_env__", "[", "'HISTCONTROL'", "]", "=", "set", "(", ")", "hf", "=", "hist", ".", "append", "(", "{", "'inp'", ":", "'still alive'", ",", "'rtn'", ":", "1", "}", ")", "assert", "(", "hf", "is", "None", ")", "items", "=", "list", "(", "hist", ".", "items", "(", ")", ")", "assert", "(", "len", "(", "items", ")", "==", "1", ")", "assert", "(", "'still alive'", "==", "items", "[", "0", "]", "[", "'inp'", "]", ")", "assert", "(", "1", "==", "items", "[", "0", "]", "[", "'rtn'", "]", ")", "hist", ".", "append", "(", "{", "'inp'", ":", "'still alive'", ",", "'rtn'", ":", "0", "}", ")", "items", "=", "list", "(", "hist", ".", "items", "(", ")", ")", "assert", "(", "len", "(", "items", ")", "==", "2", ")", "assert", "(", "'still alive'", "==", "items", "[", "1", "]", "[", "'inp'", "]", ")", "assert", "(", "0", "==", "items", "[", "1", "]", "[", "'rtn'", "]", ")", "assert", "(", "list", "(", "hist", ".", "all_items", "(", ")", ")", "==", "items", ")" ]
verify appending to the history works .
train
false
36,933
def get_kdbg(addr_space): if obj.VolMagic(addr_space).KPCR.value: kpcr = obj.Object('_KPCR', offset=obj.VolMagic(addr_space).KPCR.value, vm=addr_space) kdbg = kpcr.get_kdbg() if kdbg.is_valid(): return kdbg kdbg_magic = obj.VolMagic(addr_space).KDBG for kdbg in kdbg_magic.get_suggestions(): if kdbg.is_valid(): return kdbg memmode = addr_space.profile.metadata.get('memory_model', '32bit') version = (addr_space.profile.metadata.get('major', 0), addr_space.profile.metadata.get('minor', 0)) if ((memmode == '32bit') or (version <= (6, 1))): for kpcr_off in obj.VolMagic(addr_space).KPCR.get_suggestions(): kpcr = obj.Object('_KPCR', offset=kpcr_off, vm=addr_space) kdbg = kpcr.get_kdbg() if kdbg.is_valid(): return kdbg return obj.NoneObject('KDDEBUGGER structure not found using either KDBG signature or KPCR pointer')
[ "def", "get_kdbg", "(", "addr_space", ")", ":", "if", "obj", ".", "VolMagic", "(", "addr_space", ")", ".", "KPCR", ".", "value", ":", "kpcr", "=", "obj", ".", "Object", "(", "'_KPCR'", ",", "offset", "=", "obj", ".", "VolMagic", "(", "addr_space", ")", ".", "KPCR", ".", "value", ",", "vm", "=", "addr_space", ")", "kdbg", "=", "kpcr", ".", "get_kdbg", "(", ")", "if", "kdbg", ".", "is_valid", "(", ")", ":", "return", "kdbg", "kdbg_magic", "=", "obj", ".", "VolMagic", "(", "addr_space", ")", ".", "KDBG", "for", "kdbg", "in", "kdbg_magic", ".", "get_suggestions", "(", ")", ":", "if", "kdbg", ".", "is_valid", "(", ")", ":", "return", "kdbg", "memmode", "=", "addr_space", ".", "profile", ".", "metadata", ".", "get", "(", "'memory_model'", ",", "'32bit'", ")", "version", "=", "(", "addr_space", ".", "profile", ".", "metadata", ".", "get", "(", "'major'", ",", "0", ")", ",", "addr_space", ".", "profile", ".", "metadata", ".", "get", "(", "'minor'", ",", "0", ")", ")", "if", "(", "(", "memmode", "==", "'32bit'", ")", "or", "(", "version", "<=", "(", "6", ",", "1", ")", ")", ")", ":", "for", "kpcr_off", "in", "obj", ".", "VolMagic", "(", "addr_space", ")", ".", "KPCR", ".", "get_suggestions", "(", ")", ":", "kpcr", "=", "obj", ".", "Object", "(", "'_KPCR'", ",", "offset", "=", "kpcr_off", ",", "vm", "=", "addr_space", ")", "kdbg", "=", "kpcr", ".", "get_kdbg", "(", ")", "if", "kdbg", ".", "is_valid", "(", ")", ":", "return", "kdbg", "return", "obj", ".", "NoneObject", "(", "'KDDEBUGGER structure not found using either KDBG signature or KPCR pointer'", ")" ]
a function designed to return the kdbg structure from an address space .
train
false
36,934
def SwitchPlacer(name, *args, **params): if (name in remoteSwitches): return RemoteOVSSwitch(name, server=remoteServer, *args, **params) else: return RemoteOVSSwitch(name, *args, **params)
[ "def", "SwitchPlacer", "(", "name", ",", "*", "args", ",", "**", "params", ")", ":", "if", "(", "name", "in", "remoteSwitches", ")", ":", "return", "RemoteOVSSwitch", "(", "name", ",", "server", "=", "remoteServer", ",", "*", "args", ",", "**", "params", ")", "else", ":", "return", "RemoteOVSSwitch", "(", "name", ",", "*", "args", ",", "**", "params", ")" ]
custom switch() constructor which places switches on servers .
train
false
36,935
def componentFactory(componentid, password): a = ConnectComponentAuthenticator(componentid, password) return xmlstream.XmlStreamFactory(a)
[ "def", "componentFactory", "(", "componentid", ",", "password", ")", ":", "a", "=", "ConnectComponentAuthenticator", "(", "componentid", ",", "password", ")", "return", "xmlstream", ".", "XmlStreamFactory", "(", "a", ")" ]
xml stream factory for external server-side components .
train
false
36,936
def get_ec2_instance_id_by_uuid(context, instance_id): return IMPL.get_ec2_instance_id_by_uuid(context, instance_id)
[ "def", "get_ec2_instance_id_by_uuid", "(", "context", ",", "instance_id", ")", ":", "return", "IMPL", ".", "get_ec2_instance_id_by_uuid", "(", "context", ",", "instance_id", ")" ]
get ec2 id through uuid from instance_id_mappings table .
train
false
36,937
def safe_issubclass(*args): try: if issubclass(*args): return True except TypeError: pass return False
[ "def", "safe_issubclass", "(", "*", "args", ")", ":", "try", ":", "if", "issubclass", "(", "*", "args", ")", ":", "return", "True", "except", "TypeError", ":", "pass", "return", "False" ]
like issubclass .
train
false
36,939
@hook.command('steamid', 'sid', 'steamuser', 'su') def steamid(text): try: data = get_data(text) except SteamError as e: return '{}'.format(e) return '{name} ({state}): \x02ID64:\x02 {id_64}, \x02ID32:\x02 {id_32}, \x02ID3:\x02 {id_3}'.format(**data)
[ "@", "hook", ".", "command", "(", "'steamid'", ",", "'sid'", ",", "'steamuser'", ",", "'su'", ")", "def", "steamid", "(", "text", ")", ":", "try", ":", "data", "=", "get_data", "(", "text", ")", "except", "SteamError", "as", "e", ":", "return", "'{}'", ".", "format", "(", "e", ")", "return", "'{name} ({state}): \\x02ID64:\\x02 {id_64}, \\x02ID32:\\x02 {id_32}, \\x02ID3:\\x02 {id_3}'", ".", "format", "(", "**", "data", ")" ]
steamid <username> -- gets the steam id of <username> .
train
false
36,940
def unquote_etag(etag): if (not etag): return (None, None) etag = etag.strip() weak = False if (etag[:2] in ('w/', 'W/')): weak = True etag = etag[2:] if (etag[:1] == etag[(-1):] == '"'): etag = etag[1:(-1)] return (etag, weak)
[ "def", "unquote_etag", "(", "etag", ")", ":", "if", "(", "not", "etag", ")", ":", "return", "(", "None", ",", "None", ")", "etag", "=", "etag", ".", "strip", "(", ")", "weak", "=", "False", "if", "(", "etag", "[", ":", "2", "]", "in", "(", "'w/'", ",", "'W/'", ")", ")", ":", "weak", "=", "True", "etag", "=", "etag", "[", "2", ":", "]", "if", "(", "etag", "[", ":", "1", "]", "==", "etag", "[", "(", "-", "1", ")", ":", "]", "==", "'\"'", ")", ":", "etag", "=", "etag", "[", "1", ":", "(", "-", "1", ")", "]", "return", "(", "etag", ",", "weak", ")" ]
unquote a single etag: .
train
true
36,941
def debris_basin(): return s3_rest_controller()
[ "def", "debris_basin", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
debris basins .
train
false
36,943
def get_attrname(name): return ('_obj_' + name)
[ "def", "get_attrname", "(", "name", ")", ":", "return", "(", "'_obj_'", "+", "name", ")" ]
return the mangled name of the attributes underlying storage .
train
false
36,944
@task(base=BaseInstructorTask) def calculate_students_features_csv(entry_id, xmodule_instance_args): action_name = ugettext_noop('generated') task_fn = partial(upload_students_csv, xmodule_instance_args) return run_main_task(entry_id, task_fn, action_name)
[ "@", "task", "(", "base", "=", "BaseInstructorTask", ")", "def", "calculate_students_features_csv", "(", "entry_id", ",", "xmodule_instance_args", ")", ":", "action_name", "=", "ugettext_noop", "(", "'generated'", ")", "task_fn", "=", "partial", "(", "upload_students_csv", ",", "xmodule_instance_args", ")", "return", "run_main_task", "(", "entry_id", ",", "task_fn", ",", "action_name", ")" ]
compute student profile information for a course and upload the csv to an s3 bucket for download .
train
false
36,947
def requires_good_connection(fun): def _requires_good_connection(*args, **kwargs): try: return fun(*args, **kwargs) except ConnectionError: log.error('Either your ElasticSearch process is not quite ready to rumble, is not running at all, or ES_URLSis set wrong in your .env file.') return _requires_good_connection
[ "def", "requires_good_connection", "(", "fun", ")", ":", "def", "_requires_good_connection", "(", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "fun", "(", "*", "args", ",", "**", "kwargs", ")", "except", "ConnectionError", ":", "log", ".", "error", "(", "'Either your ElasticSearch process is not quite ready to rumble, is not running at all, or ES_URLSis set wrong in your .env file.'", ")", "return", "_requires_good_connection" ]
decorator that logs an error on connection issues 9 out of 10 doctors say that connection errors are usually because es_urls is set wrong .
train
false
36,948
def account_ids_by_ip(ip, after=None, before=None, limit=1000): ips = tup(ip) results = [] flattened_accounts = {} for ip in ips: if (before and (not after)): account_ip = AccountsByIP.get(ip, column_start=before, column_count=(limit + 1), column_reversed=False) account_ip = sorted(account_ip, reverse=True) else: account_ip = AccountsByIP.get(ip, column_start=after, column_count=limit) flattened_account_ip = [j for i in account_ip for j in i.iteritems()] flattened_accounts[ip] = flattened_account_ip for (ip, flattened_account_ip) in flattened_accounts.iteritems(): for (last_visit, account) in flattened_account_ip: results.append((account, last_visit, [ip])) return results
[ "def", "account_ids_by_ip", "(", "ip", ",", "after", "=", "None", ",", "before", "=", "None", ",", "limit", "=", "1000", ")", ":", "ips", "=", "tup", "(", "ip", ")", "results", "=", "[", "]", "flattened_accounts", "=", "{", "}", "for", "ip", "in", "ips", ":", "if", "(", "before", "and", "(", "not", "after", ")", ")", ":", "account_ip", "=", "AccountsByIP", ".", "get", "(", "ip", ",", "column_start", "=", "before", ",", "column_count", "=", "(", "limit", "+", "1", ")", ",", "column_reversed", "=", "False", ")", "account_ip", "=", "sorted", "(", "account_ip", ",", "reverse", "=", "True", ")", "else", ":", "account_ip", "=", "AccountsByIP", ".", "get", "(", "ip", ",", "column_start", "=", "after", ",", "column_count", "=", "limit", ")", "flattened_account_ip", "=", "[", "j", "for", "i", "in", "account_ip", "for", "j", "in", "i", ".", "iteritems", "(", ")", "]", "flattened_accounts", "[", "ip", "]", "=", "flattened_account_ip", "for", "(", "ip", ",", "flattened_account_ip", ")", "in", "flattened_accounts", ".", "iteritems", "(", ")", ":", "for", "(", "last_visit", ",", "account", ")", "in", "flattened_account_ip", ":", "results", ".", "append", "(", "(", "account", ",", "last_visit", ",", "[", "ip", "]", ")", ")", "return", "results" ]
get a list of account ids that an ip has accessed .
train
false
36,949
def read_config(options, args, arglist, parser): config = RawConfigParser() cli_conf = options.config local_dir = os.curdir if (USER_CONFIG and os.path.isfile(USER_CONFIG)): if options.verbose: print ('user configuration: %s' % USER_CONFIG) config.read(USER_CONFIG) parent = tail = (args and os.path.abspath(os.path.commonprefix(args))) while tail: if config.read((os.path.join(parent, fn) for fn in PROJECT_CONFIG)): local_dir = parent if options.verbose: print ('local configuration: in %s' % parent) break (parent, tail) = os.path.split(parent) if (cli_conf and os.path.isfile(cli_conf)): if options.verbose: print ('cli configuration: %s' % cli_conf) config.read(cli_conf) pep8_section = parser.prog if config.has_section(pep8_section): option_list = dict([(o.dest, (o.type or o.action)) for o in parser.option_list]) (new_options, __) = parser.parse_args([]) for opt in config.options(pep8_section): if (opt.replace('_', '-') not in parser.config_options): print (" unknown option '%s' ignored" % opt) continue if (options.verbose > 1): print (' %s = %s' % (opt, config.get(pep8_section, opt))) normalized_opt = opt.replace('-', '_') opt_type = option_list[normalized_opt] if (opt_type in ('int', 'count')): value = config.getint(pep8_section, opt) elif (opt_type == 'string'): value = config.get(pep8_section, opt) if (normalized_opt == 'exclude'): value = normalize_paths(value, local_dir) else: assert (opt_type in ('store_true', 'store_false')) value = config.getboolean(pep8_section, opt) setattr(new_options, normalized_opt, value) (options, __) = parser.parse_args(arglist, values=new_options) options.doctest = options.testsuite = False return options
[ "def", "read_config", "(", "options", ",", "args", ",", "arglist", ",", "parser", ")", ":", "config", "=", "RawConfigParser", "(", ")", "cli_conf", "=", "options", ".", "config", "local_dir", "=", "os", ".", "curdir", "if", "(", "USER_CONFIG", "and", "os", ".", "path", ".", "isfile", "(", "USER_CONFIG", ")", ")", ":", "if", "options", ".", "verbose", ":", "print", "(", "'user configuration: %s'", "%", "USER_CONFIG", ")", "config", ".", "read", "(", "USER_CONFIG", ")", "parent", "=", "tail", "=", "(", "args", "and", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "commonprefix", "(", "args", ")", ")", ")", "while", "tail", ":", "if", "config", ".", "read", "(", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "fn", ")", "for", "fn", "in", "PROJECT_CONFIG", ")", ")", ":", "local_dir", "=", "parent", "if", "options", ".", "verbose", ":", "print", "(", "'local configuration: in %s'", "%", "parent", ")", "break", "(", "parent", ",", "tail", ")", "=", "os", ".", "path", ".", "split", "(", "parent", ")", "if", "(", "cli_conf", "and", "os", ".", "path", ".", "isfile", "(", "cli_conf", ")", ")", ":", "if", "options", ".", "verbose", ":", "print", "(", "'cli configuration: %s'", "%", "cli_conf", ")", "config", ".", "read", "(", "cli_conf", ")", "pep8_section", "=", "parser", ".", "prog", "if", "config", ".", "has_section", "(", "pep8_section", ")", ":", "option_list", "=", "dict", "(", "[", "(", "o", ".", "dest", ",", "(", "o", ".", "type", "or", "o", ".", "action", ")", ")", "for", "o", "in", "parser", ".", "option_list", "]", ")", "(", "new_options", ",", "__", ")", "=", "parser", ".", "parse_args", "(", "[", "]", ")", "for", "opt", "in", "config", ".", "options", "(", "pep8_section", ")", ":", "if", "(", "opt", ".", "replace", "(", "'_'", ",", "'-'", ")", "not", "in", "parser", ".", "config_options", ")", ":", "print", "(", "\" unknown option '%s' ignored\"", "%", "opt", ")", "continue", "if", "(", "options", ".", "verbose", ">", "1", ")", ":", "print", "(", "' %s = %s'", "%", "(", "opt", ",", "config", ".", "get", "(", "pep8_section", ",", "opt", ")", ")", ")", "normalized_opt", "=", "opt", ".", "replace", "(", "'-'", ",", "'_'", ")", "opt_type", "=", "option_list", "[", "normalized_opt", "]", "if", "(", "opt_type", "in", "(", "'int'", ",", "'count'", ")", ")", ":", "value", "=", "config", ".", "getint", "(", "pep8_section", ",", "opt", ")", "elif", "(", "opt_type", "==", "'string'", ")", ":", "value", "=", "config", ".", "get", "(", "pep8_section", ",", "opt", ")", "if", "(", "normalized_opt", "==", "'exclude'", ")", ":", "value", "=", "normalize_paths", "(", "value", ",", "local_dir", ")", "else", ":", "assert", "(", "opt_type", "in", "(", "'store_true'", ",", "'store_false'", ")", ")", "value", "=", "config", ".", "getboolean", "(", "pep8_section", ",", "opt", ")", "setattr", "(", "new_options", ",", "normalized_opt", ",", "value", ")", "(", "options", ",", "__", ")", "=", "parser", ".", "parse_args", "(", "arglist", ",", "values", "=", "new_options", ")", "options", ".", "doctest", "=", "options", ".", "testsuite", "=", "False", "return", "options" ]
reads pylintrc config onto native configparser object .
train
true
36,953
def check_array_additions(evaluator, array): if (array.type not in ('list', 'set')): return set() is_list = (array.type == 'list') try: current_module = array.atom.get_parent_until() except AttributeError: return set() return _check_array_additions(evaluator, array, current_module, is_list)
[ "def", "check_array_additions", "(", "evaluator", ",", "array", ")", ":", "if", "(", "array", ".", "type", "not", "in", "(", "'list'", ",", "'set'", ")", ")", ":", "return", "set", "(", ")", "is_list", "=", "(", "array", ".", "type", "==", "'list'", ")", "try", ":", "current_module", "=", "array", ".", "atom", ".", "get_parent_until", "(", ")", "except", "AttributeError", ":", "return", "set", "(", ")", "return", "_check_array_additions", "(", "evaluator", ",", "array", ",", "current_module", ",", "is_list", ")" ]
just a mapper function for the internal _check_array_additions .
train
false
36,955
def task_created(request): if (not test_user_authenticated(request)): return login(request, next='/cobbler_web/task_created', expired=True) t = get_template('task_created.tmpl') html = t.render(RequestContext(request, {'version': remote.extended_version(request.session['token'])['version'], 'username': username})) return HttpResponse(html)
[ "def", "task_created", "(", "request", ")", ":", "if", "(", "not", "test_user_authenticated", "(", "request", ")", ")", ":", "return", "login", "(", "request", ",", "next", "=", "'/cobbler_web/task_created'", ",", "expired", "=", "True", ")", "t", "=", "get_template", "(", "'task_created.tmpl'", ")", "html", "=", "t", ".", "render", "(", "RequestContext", "(", "request", ",", "{", "'version'", ":", "remote", ".", "extended_version", "(", "request", ".", "session", "[", "'token'", "]", ")", "[", "'version'", "]", ",", "'username'", ":", "username", "}", ")", ")", "return", "HttpResponse", "(", "html", ")" ]
lets the user know what to expect for event updates .
train
false
36,956
def _show_warning(message, category, filename, lineno, file=None, line=None): if (file is None): file = sys.stderr try: file.write(formatwarning(message, category, filename, lineno, line)) except IOError: pass
[ "def", "_show_warning", "(", "message", ",", "category", ",", "filename", ",", "lineno", ",", "file", "=", "None", ",", "line", "=", "None", ")", ":", "if", "(", "file", "is", "None", ")", ":", "file", "=", "sys", ".", "stderr", "try", ":", "file", ".", "write", "(", "formatwarning", "(", "message", ",", "category", ",", "filename", ",", "lineno", ",", "line", ")", ")", "except", "IOError", ":", "pass" ]
hook to write a warning to a file; replace if you like .
train
true
36,957
def get_pkg_info(pkg_path): (src, hsh) = pkg_commit_hash(pkg_path) import networkx import nibabel import numpy import scipy import traits return dict(pkg_path=pkg_path, commit_source=src, commit_hash=hsh, sys_version=sys.version, sys_executable=sys.executable, sys_platform=sys.platform, numpy_version=numpy.__version__, scipy_version=scipy.__version__, networkx_version=networkx.__version__, nibabel_version=nibabel.__version__, traits_version=traits.__version__)
[ "def", "get_pkg_info", "(", "pkg_path", ")", ":", "(", "src", ",", "hsh", ")", "=", "pkg_commit_hash", "(", "pkg_path", ")", "import", "networkx", "import", "nibabel", "import", "numpy", "import", "scipy", "import", "traits", "return", "dict", "(", "pkg_path", "=", "pkg_path", ",", "commit_source", "=", "src", ",", "commit_hash", "=", "hsh", ",", "sys_version", "=", "sys", ".", "version", ",", "sys_executable", "=", "sys", ".", "executable", ",", "sys_platform", "=", "sys", ".", "platform", ",", "numpy_version", "=", "numpy", ".", "__version__", ",", "scipy_version", "=", "scipy", ".", "__version__", ",", "networkx_version", "=", "networkx", ".", "__version__", ",", "nibabel_version", "=", "nibabel", ".", "__version__", ",", "traits_version", "=", "traits", ".", "__version__", ")" ]
return dict describing the context of this package parameters pkg_path : str path containing __init__ .
train
false
36,958
def get_outgoing_email_account(raise_exception_not_set=True, append_to=None): if (not getattr(frappe.local, u'outgoing_email_account', None)): frappe.local.outgoing_email_account = {} if (not frappe.local.outgoing_email_account.get((append_to or u'default'))): email_account = None if append_to: email_account = _get_email_account({u'enable_outgoing': 1, u'enable_incoming': 1, u'append_to': append_to}) if (not email_account): email_account = get_default_outgoing_email_account(raise_exception_not_set=raise_exception_not_set) if ((not email_account) and raise_exception_not_set): frappe.throw(_(u'Please setup default Email Account from Setup > Email > Email Account'), frappe.OutgoingEmailError) if email_account: if (email_account.enable_outgoing and (not getattr(email_account, u'from_site_config', False))): email_account.password = email_account.get_password() email_account.default_sender = email.utils.formataddr((email_account.name, email_account.get(u'email_id'))) frappe.local.outgoing_email_account[(append_to or u'default')] = email_account return frappe.local.outgoing_email_account[(append_to or u'default')]
[ "def", "get_outgoing_email_account", "(", "raise_exception_not_set", "=", "True", ",", "append_to", "=", "None", ")", ":", "if", "(", "not", "getattr", "(", "frappe", ".", "local", ",", "u'outgoing_email_account'", ",", "None", ")", ")", ":", "frappe", ".", "local", ".", "outgoing_email_account", "=", "{", "}", "if", "(", "not", "frappe", ".", "local", ".", "outgoing_email_account", ".", "get", "(", "(", "append_to", "or", "u'default'", ")", ")", ")", ":", "email_account", "=", "None", "if", "append_to", ":", "email_account", "=", "_get_email_account", "(", "{", "u'enable_outgoing'", ":", "1", ",", "u'enable_incoming'", ":", "1", ",", "u'append_to'", ":", "append_to", "}", ")", "if", "(", "not", "email_account", ")", ":", "email_account", "=", "get_default_outgoing_email_account", "(", "raise_exception_not_set", "=", "raise_exception_not_set", ")", "if", "(", "(", "not", "email_account", ")", "and", "raise_exception_not_set", ")", ":", "frappe", ".", "throw", "(", "_", "(", "u'Please setup default Email Account from Setup > Email > Email Account'", ")", ",", "frappe", ".", "OutgoingEmailError", ")", "if", "email_account", ":", "if", "(", "email_account", ".", "enable_outgoing", "and", "(", "not", "getattr", "(", "email_account", ",", "u'from_site_config'", ",", "False", ")", ")", ")", ":", "email_account", ".", "password", "=", "email_account", ".", "get_password", "(", ")", "email_account", ".", "default_sender", "=", "email", ".", "utils", ".", "formataddr", "(", "(", "email_account", ".", "name", ",", "email_account", ".", "get", "(", "u'email_id'", ")", ")", ")", "frappe", ".", "local", ".", "outgoing_email_account", "[", "(", "append_to", "or", "u'default'", ")", "]", "=", "email_account", "return", "frappe", ".", "local", ".", "outgoing_email_account", "[", "(", "append_to", "or", "u'default'", ")", "]" ]
returns outgoing email account based on append_to or the default outgoing account .
train
false
36,960
def _groovy_common_scriptSource(data): scriptSource = XML.Element('scriptSource') if (('command' in data) and ('file' in data)): raise JenkinsJobsException("Use just one of 'command' or 'file'") if ('command' in data): command = XML.SubElement(scriptSource, 'command') command.text = str(data['command']) scriptSource.set('class', 'hudson.plugins.groovy.StringScriptSource') elif ('file' in data): scriptFile = XML.SubElement(scriptSource, 'scriptFile') scriptFile.text = str(data['file']) scriptSource.set('class', 'hudson.plugins.groovy.FileScriptSource') else: raise JenkinsJobsException('A groovy command or file is required') return scriptSource
[ "def", "_groovy_common_scriptSource", "(", "data", ")", ":", "scriptSource", "=", "XML", ".", "Element", "(", "'scriptSource'", ")", "if", "(", "(", "'command'", "in", "data", ")", "and", "(", "'file'", "in", "data", ")", ")", ":", "raise", "JenkinsJobsException", "(", "\"Use just one of 'command' or 'file'\"", ")", "if", "(", "'command'", "in", "data", ")", ":", "command", "=", "XML", ".", "SubElement", "(", "scriptSource", ",", "'command'", ")", "command", ".", "text", "=", "str", "(", "data", "[", "'command'", "]", ")", "scriptSource", ".", "set", "(", "'class'", ",", "'hudson.plugins.groovy.StringScriptSource'", ")", "elif", "(", "'file'", "in", "data", ")", ":", "scriptFile", "=", "XML", ".", "SubElement", "(", "scriptSource", ",", "'scriptFile'", ")", "scriptFile", ".", "text", "=", "str", "(", "data", "[", "'file'", "]", ")", "scriptSource", ".", "set", "(", "'class'", ",", "'hudson.plugins.groovy.FileScriptSource'", ")", "else", ":", "raise", "JenkinsJobsException", "(", "'A groovy command or file is required'", ")", "return", "scriptSource" ]
helper function to generate the xml element common to groovy builders .
train
false
36,961
def cmd_help(args, opts): print 'Available commands:' for (_, func) in sorted(get_commands().items()): print ' ', func.__doc__
[ "def", "cmd_help", "(", "args", ",", "opts", ")", ":", "print", "'Available commands:'", "for", "(", "_", ",", "func", ")", "in", "sorted", "(", "get_commands", "(", ")", ".", "items", "(", ")", ")", ":", "print", "' '", ",", "func", ".", "__doc__" ]
help - list available commands .
train
false
36,962
@when(u'I run "{command}"') @when(u'I run `{command}`') def step_i_run_command(context, command): command_util.ensure_workdir_exists(context) context.command_result = command_shell.run(command, cwd=context.workdir) command_util.workdir_save_coverage_files(context.workdir) if (False and DEBUG): print(u'run_command: {0}'.format(command)) print(u'run_command.output {0}'.format(context.command_result.output))
[ "@", "when", "(", "u'I run \"{command}\"'", ")", "@", "when", "(", "u'I run `{command}`'", ")", "def", "step_i_run_command", "(", "context", ",", "command", ")", ":", "command_util", ".", "ensure_workdir_exists", "(", "context", ")", "context", ".", "command_result", "=", "command_shell", ".", "run", "(", "command", ",", "cwd", "=", "context", ".", "workdir", ")", "command_util", ".", "workdir_save_coverage_files", "(", "context", ".", "workdir", ")", "if", "(", "False", "and", "DEBUG", ")", ":", "print", "(", "u'run_command: {0}'", ".", "format", "(", "command", ")", ")", "print", "(", "u'run_command.output {0}'", ".", "format", "(", "context", ".", "command_result", ".", "output", ")", ")" ]
run a command as subprocess .
train
false
36,963
@pytest.fixture def empty_history(webpage): hist = webpage.history() assert (hist.count() == 0) return hist
[ "@", "pytest", ".", "fixture", "def", "empty_history", "(", "webpage", ")", ":", "hist", "=", "webpage", ".", "history", "(", ")", "assert", "(", "hist", ".", "count", "(", ")", "==", "0", ")", "return", "hist" ]
fixture providing an empty qwebhistory .
train
false
36,964
def HostNameCheck(host): if (not HostNameValid(host)): raise ValueError((_('Invalid hostname: %s') % host)) return str(host).lower()
[ "def", "HostNameCheck", "(", "host", ")", ":", "if", "(", "not", "HostNameValid", "(", "host", ")", ")", ":", "raise", "ValueError", "(", "(", "_", "(", "'Invalid hostname: %s'", ")", "%", "host", ")", ")", "return", "str", "(", "host", ")", ".", "lower", "(", ")" ]
verify that a string is a valid host-name .
train
false
36,965
def skewness(X, condition=None, **kwargs): return smoment(X, 3, condition, **kwargs)
[ "def", "skewness", "(", "X", ",", "condition", "=", "None", ",", "**", "kwargs", ")", ":", "return", "smoment", "(", "X", ",", "3", ",", "condition", ",", "**", "kwargs", ")" ]
measure of the asymmetry of the probability distribution positive skew indicates that most of the values lie to the right of the mean skewness(x) = e( ((x - e(x))/sigma)**3 ) examples .
train
false
36,968
def gauss(m, v, x): if (len(x.shape) == 1): (n, d) = (1, x.shape[0]) else: (n, d) = x.shape S = diag((1 / v)) x = (x - m) y = exp(((-0.5) * diag(dot(x, dot(S, x.T))))) return ((y * ((2 * pi) ** ((- d) / 2.0))) / (sqrt(prod(v)) + 1e-06))
[ "def", "gauss", "(", "m", ",", "v", ",", "x", ")", ":", "if", "(", "len", "(", "x", ".", "shape", ")", "==", "1", ")", ":", "(", "n", ",", "d", ")", "=", "(", "1", ",", "x", ".", "shape", "[", "0", "]", ")", "else", ":", "(", "n", ",", "d", ")", "=", "x", ".", "shape", "S", "=", "diag", "(", "(", "1", "/", "v", ")", ")", "x", "=", "(", "x", "-", "m", ")", "y", "=", "exp", "(", "(", "(", "-", "0.5", ")", "*", "diag", "(", "dot", "(", "x", ",", "dot", "(", "S", ",", "x", ".", "T", ")", ")", ")", ")", ")", "return", "(", "(", "y", "*", "(", "(", "2", "*", "pi", ")", "**", "(", "(", "-", "d", ")", "/", "2.0", ")", ")", ")", "/", "(", "sqrt", "(", "prod", "(", "v", ")", ")", "+", "1e-06", ")", ")" ]
evaluate gaussian in d-dimensions with independent mean m and variance v at the points in x .
train
false
36,969
def _normalize_configuration_objs(configurations): for c in configurations: if (not hasattr(c, 'properties')): c.properties = [] if hasattr(c, 'configurations'): if (not c.configurations): del c.configurations else: _normalize_configuration_objs(c.configurations)
[ "def", "_normalize_configuration_objs", "(", "configurations", ")", ":", "for", "c", "in", "configurations", ":", "if", "(", "not", "hasattr", "(", "c", ",", "'properties'", ")", ")", ":", "c", ".", "properties", "=", "[", "]", "if", "hasattr", "(", "c", ",", "'configurations'", ")", ":", "if", "(", "not", "c", ".", "configurations", ")", ":", "del", "c", ".", "configurations", "else", ":", "_normalize_configuration_objs", "(", "c", ".", "configurations", ")" ]
the api will return an empty properties list for configurations without properties set .
train
false
36,970
def do_batch(value, linecount, fill_with=None): result = [] tmp = [] for item in value: if (len(tmp) == linecount): (yield tmp) tmp = [] tmp.append(item) if tmp: if ((fill_with is not None) and (len(tmp) < linecount)): tmp += ([fill_with] * (linecount - len(tmp))) (yield tmp)
[ "def", "do_batch", "(", "value", ",", "linecount", ",", "fill_with", "=", "None", ")", ":", "result", "=", "[", "]", "tmp", "=", "[", "]", "for", "item", "in", "value", ":", "if", "(", "len", "(", "tmp", ")", "==", "linecount", ")", ":", "(", "yield", "tmp", ")", "tmp", "=", "[", "]", "tmp", ".", "append", "(", "item", ")", "if", "tmp", ":", "if", "(", "(", "fill_with", "is", "not", "None", ")", "and", "(", "len", "(", "tmp", ")", "<", "linecount", ")", ")", ":", "tmp", "+=", "(", "[", "fill_with", "]", "*", "(", "linecount", "-", "len", "(", "tmp", ")", ")", ")", "(", "yield", "tmp", ")" ]
a filter that batches items .
train
true
36,972
def cov_hc1(results): het_scale = ((results.nobs / results.df_resid) * (results.resid ** 2)) cov_hc1 = _HCCM(results, het_scale) return cov_hc1
[ "def", "cov_hc1", "(", "results", ")", ":", "het_scale", "=", "(", "(", "results", ".", "nobs", "/", "results", ".", "df_resid", ")", "*", "(", "results", ".", "resid", "**", "2", ")", ")", "cov_hc1", "=", "_HCCM", "(", "results", ",", "het_scale", ")", "return", "cov_hc1" ]
see statsmodels .
train
false
36,974
def check_sized_string(result, func, cargs): if (not result): raise GEOSException(('Invalid string pointer returned by GEOS C function "%s"' % func.__name__)) s = string_at(result, last_arg_byref(cargs)) free(result) return s
[ "def", "check_sized_string", "(", "result", ",", "func", ",", "cargs", ")", ":", "if", "(", "not", "result", ")", ":", "raise", "GEOSException", "(", "(", "'Invalid string pointer returned by GEOS C function \"%s\"'", "%", "func", ".", "__name__", ")", ")", "s", "=", "string_at", "(", "result", ",", "last_arg_byref", "(", "cargs", ")", ")", "free", "(", "result", ")", "return", "s" ]
error checking for routines that return explicitly sized strings .
train
false
36,976
def directoryPath(filepath): retVal = filepath if filepath: retVal = (ntpath.dirname(filepath) if isWindowsDriveLetterPath(filepath) else posixpath.dirname(filepath)) return retVal
[ "def", "directoryPath", "(", "filepath", ")", ":", "retVal", "=", "filepath", "if", "filepath", ":", "retVal", "=", "(", "ntpath", ".", "dirname", "(", "filepath", ")", "if", "isWindowsDriveLetterPath", "(", "filepath", ")", "else", "posixpath", ".", "dirname", "(", "filepath", ")", ")", "return", "retVal" ]
returns directory path for a given filepath .
train
false
36,977
def package_activity_list_html(context, data_dict): activity_stream = package_activity_list(context, data_dict) offset = int(data_dict.get('offset', 0)) extra_vars = {'controller': 'package', 'action': 'activity', 'id': data_dict['id'], 'offset': offset} return activity_streams.activity_list_to_html(context, activity_stream, extra_vars)
[ "def", "package_activity_list_html", "(", "context", ",", "data_dict", ")", ":", "activity_stream", "=", "package_activity_list", "(", "context", ",", "data_dict", ")", "offset", "=", "int", "(", "data_dict", ".", "get", "(", "'offset'", ",", "0", ")", ")", "extra_vars", "=", "{", "'controller'", ":", "'package'", ",", "'action'", ":", "'activity'", ",", "'id'", ":", "data_dict", "[", "'id'", "]", ",", "'offset'", ":", "offset", "}", "return", "activity_streams", ".", "activity_list_to_html", "(", "context", ",", "activity_stream", ",", "extra_vars", ")" ]
return a packages activity stream as html .
train
false
36,978
@importorskip('pkg_resources') def test_pkg_resources_importable(pyi_builder): pyi_builder.test_source('\n import pkg_resources\n pkg_resources.working_set.require()\n ')
[ "@", "importorskip", "(", "'pkg_resources'", ")", "def", "test_pkg_resources_importable", "(", "pyi_builder", ")", ":", "pyi_builder", ".", "test_source", "(", "'\\n import pkg_resources\\n pkg_resources.working_set.require()\\n '", ")" ]
check that a trivial example using pkg_resources does build .
train
false
36,979
def bytes_to_skip(record_size, range_start): return ((record_size - (range_start % record_size)) % record_size)
[ "def", "bytes_to_skip", "(", "record_size", ",", "range_start", ")", ":", "return", "(", "(", "record_size", "-", "(", "range_start", "%", "record_size", ")", ")", "%", "record_size", ")" ]
assume an object is composed of n records .
train
false
36,980
def trunc_string(text, max_length, suffix='...'): if (len(text) <= max_length): return text else: hard_truncated = text[:(max_length - len(suffix))] word_truncated = hard_truncated.rsplit(' ', 1)[0] return (word_truncated + suffix)
[ "def", "trunc_string", "(", "text", ",", "max_length", ",", "suffix", "=", "'...'", ")", ":", "if", "(", "len", "(", "text", ")", "<=", "max_length", ")", ":", "return", "text", "else", ":", "hard_truncated", "=", "text", "[", ":", "(", "max_length", "-", "len", "(", "suffix", ")", ")", "]", "word_truncated", "=", "hard_truncated", ".", "rsplit", "(", "' '", ",", "1", ")", "[", "0", "]", "return", "(", "word_truncated", "+", "suffix", ")" ]
truncate a string .
train
false
36,981
def get_value(name): try: metrics = get_metrics()[0] name = name.lstrip('ipmi_') result = metrics['data'][name] except Exception: result = 0 return result
[ "def", "get_value", "(", "name", ")", ":", "try", ":", "metrics", "=", "get_metrics", "(", ")", "[", "0", "]", "name", "=", "name", ".", "lstrip", "(", "'ipmi_'", ")", "result", "=", "metrics", "[", "'data'", "]", "[", "name", "]", "except", "Exception", ":", "result", "=", "0", "return", "result" ]
get value for name format: gmond_prefix_ + fs_ + metric .
train
false
36,983
def CheckForBadCharacters(filename, lines, error): for (linenum, line) in enumerate(lines): if (unicode_escape_decode('\\ufffd') in line): error(filename, linenum, 'readability/utf8', 5, 'Line contains invalid UTF-8 (or Unicode replacement character).') if ('\x00' in line): error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
[ "def", "CheckForBadCharacters", "(", "filename", ",", "lines", ",", "error", ")", ":", "for", "(", "linenum", ",", "line", ")", "in", "enumerate", "(", "lines", ")", ":", "if", "(", "unicode_escape_decode", "(", "'\\\\ufffd'", ")", "in", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'readability/utf8'", ",", "5", ",", "'Line contains invalid UTF-8 (or Unicode replacement character).'", ")", "if", "(", "'\\x00'", "in", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'readability/nul'", ",", "5", ",", "'Line contains NUL byte.'", ")" ]
logs an error for each line containing bad characters .
train
true
36,984
@require_GET def activate_watch(request, watch_id, secret): watch = get_object_or_404(Watch, pk=watch_id) question = watch.content_object if ((watch.secret == secret) and isinstance(question, Question)): watch.activate().save() statsd.incr('questions.watches.activate') return render(request, 'questions/activate_watch.html', {'question': question, 'unsubscribe_url': reverse('questions.unsubscribe', args=[watch_id, secret]), 'is_active': watch.is_active})
[ "@", "require_GET", "def", "activate_watch", "(", "request", ",", "watch_id", ",", "secret", ")", ":", "watch", "=", "get_object_or_404", "(", "Watch", ",", "pk", "=", "watch_id", ")", "question", "=", "watch", ".", "content_object", "if", "(", "(", "watch", ".", "secret", "==", "secret", ")", "and", "isinstance", "(", "question", ",", "Question", ")", ")", ":", "watch", ".", "activate", "(", ")", ".", "save", "(", ")", "statsd", ".", "incr", "(", "'questions.watches.activate'", ")", "return", "render", "(", "request", ",", "'questions/activate_watch.html'", ",", "{", "'question'", ":", "question", ",", "'unsubscribe_url'", ":", "reverse", "(", "'questions.unsubscribe'", ",", "args", "=", "[", "watch_id", ",", "secret", "]", ")", ",", "'is_active'", ":", "watch", ".", "is_active", "}", ")" ]
activate watching a question .
train
false
36,986
def find_r_to_boundary_of_closedpath(inside_closedpath, xy, cos_t, sin_t, rmin=0.0, rmax=1.0, tolerence=0.01): (cx, cy) = xy def _f(r): return (((cos_t * r) + cx), ((sin_t * r) + cy)) find_bezier_t_intersecting_with_closedpath(_f, inside_closedpath, t0=rmin, t1=rmax, tolerence=tolerence)
[ "def", "find_r_to_boundary_of_closedpath", "(", "inside_closedpath", ",", "xy", ",", "cos_t", ",", "sin_t", ",", "rmin", "=", "0.0", ",", "rmax", "=", "1.0", ",", "tolerence", "=", "0.01", ")", ":", "(", "cx", ",", "cy", ")", "=", "xy", "def", "_f", "(", "r", ")", ":", "return", "(", "(", "(", "cos_t", "*", "r", ")", "+", "cx", ")", ",", "(", "(", "sin_t", "*", "r", ")", "+", "cy", ")", ")", "find_bezier_t_intersecting_with_closedpath", "(", "_f", ",", "inside_closedpath", ",", "t0", "=", "rmin", ",", "t1", "=", "rmax", ",", "tolerence", "=", "tolerence", ")" ]
find a radius r between *rmin* and *rmax* at which it intersect with the path .
train
false
36,988
def _parse_volumes(data_list): if data_list: return [MountedDataset(dataset_id=UUID(data[u'dataset_id']), mountpoint=data[u'mountpoint']) for data in data_list] else: return None
[ "def", "_parse_volumes", "(", "data_list", ")", ":", "if", "data_list", ":", "return", "[", "MountedDataset", "(", "dataset_id", "=", "UUID", "(", "data", "[", "u'dataset_id'", "]", ")", ",", "mountpoint", "=", "data", "[", "u'mountpoint'", "]", ")", "for", "data", "in", "data_list", "]", "else", ":", "return", "None" ]
parse a list of volume configuration .
train
false
36,989
def auth_allow_anonymous_access(action): @functools.wraps(action) def wrapper(context, data_dict): return action(context, data_dict) wrapper.auth_allow_anonymous_access = True return wrapper
[ "def", "auth_allow_anonymous_access", "(", "action", ")", ":", "@", "functools", ".", "wraps", "(", "action", ")", "def", "wrapper", "(", "context", ",", "data_dict", ")", ":", "return", "action", "(", "context", ",", "data_dict", ")", "wrapper", ".", "auth_allow_anonymous_access", "=", "True", "return", "wrapper" ]
flag an auth function as not requiring a logged in user this means that check_access wont automatically raise a notauthorized exception if an authenticated user is not provided in the context .
train
false
36,990
def update_jsi18n_file(code='en'): translation.activate(code) output_dir = os.path.join(settings.CONTENT_ROOT, 'locale', 'js', 'i18n') ensure_dir(output_dir) output_file = os.path.join(output_dir, ('%s.js' % code)) request = HttpRequest() request.path = output_file request.session = {settings.LANGUAGE_COOKIE_NAME: code} response = javascript_catalog(request, packages=('ka-lite.locale',), domain='djangojs') icu_js = '' for path in settings.LOCALE_PATHS: try: icu_js = open(os.path.join(path, code, ('%s_icu.js' % code)), 'r').read() except IOError: logging.warn('No {code}_icu.js file found in locale_path {path}'.format(code=code, path=path)) output_js = ((response.content + '\n') + icu_js) logging.info('Writing i18nized js file to {0}'.format(output_file)) with open(output_file, 'w') as fp: fp.write(output_js) translation.deactivate()
[ "def", "update_jsi18n_file", "(", "code", "=", "'en'", ")", ":", "translation", ".", "activate", "(", "code", ")", "output_dir", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "CONTENT_ROOT", ",", "'locale'", ",", "'js'", ",", "'i18n'", ")", "ensure_dir", "(", "output_dir", ")", "output_file", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "(", "'%s.js'", "%", "code", ")", ")", "request", "=", "HttpRequest", "(", ")", "request", ".", "path", "=", "output_file", "request", ".", "session", "=", "{", "settings", ".", "LANGUAGE_COOKIE_NAME", ":", "code", "}", "response", "=", "javascript_catalog", "(", "request", ",", "packages", "=", "(", "'ka-lite.locale'", ",", ")", ",", "domain", "=", "'djangojs'", ")", "icu_js", "=", "''", "for", "path", "in", "settings", ".", "LOCALE_PATHS", ":", "try", ":", "icu_js", "=", "open", "(", "os", ".", "path", ".", "join", "(", "path", ",", "code", ",", "(", "'%s_icu.js'", "%", "code", ")", ")", ",", "'r'", ")", ".", "read", "(", ")", "except", "IOError", ":", "logging", ".", "warn", "(", "'No {code}_icu.js file found in locale_path {path}'", ".", "format", "(", "code", "=", "code", ",", "path", "=", "path", ")", ")", "output_js", "=", "(", "(", "response", ".", "content", "+", "'\\n'", ")", "+", "icu_js", ")", "logging", ".", "info", "(", "'Writing i18nized js file to {0}'", ".", "format", "(", "output_file", ")", ")", "with", "open", "(", "output_file", ",", "'w'", ")", "as", "fp", ":", "fp", ".", "write", "(", "output_js", ")", "translation", ".", "deactivate", "(", ")" ]
for efficienys sake .
train
false
36,991
def get_perms_for(roles, perm_doctype=u'DocPerm'): return frappe.db.sql(u'select * from `tab{doctype}` where docstatus=0\n DCTB DCTB and ifnull(permlevel,0)=0\n DCTB DCTB and role in ({roles})'.format(doctype=perm_doctype, roles=u', '.join(([u'%s'] * len(roles)))), tuple(roles), as_dict=1)
[ "def", "get_perms_for", "(", "roles", ",", "perm_doctype", "=", "u'DocPerm'", ")", ":", "return", "frappe", ".", "db", ".", "sql", "(", "u'select * from `tab{doctype}` where docstatus=0\\n DCTB DCTB and ifnull(permlevel,0)=0\\n DCTB DCTB and role in ({roles})'", ".", "format", "(", "doctype", "=", "perm_doctype", ",", "roles", "=", "u', '", ".", "join", "(", "(", "[", "u'%s'", "]", "*", "len", "(", "roles", ")", ")", ")", ")", ",", "tuple", "(", "roles", ")", ",", "as_dict", "=", "1", ")" ]
get perms for given roles .
train
false
36,993
def lms2rgb(lms_Nx3, conversionMatrix=None): lms_3xN = numpy.transpose(lms_Nx3) if (conversionMatrix is None): cones_to_rgb = numpy.asarray([[4.97068857, (-4.14354132), 0.17285275], [(-0.90913894), 2.15671326, (-0.24757432)], [(-0.03976551), (-0.14253782), 1.18230333]]) logging.warning('This monitor has not been color-calibrated. Using default LMS conversion matrix.') else: cones_to_rgb = conversionMatrix rgb = numpy.dot(cones_to_rgb, lms_3xN) return numpy.transpose(rgb)
[ "def", "lms2rgb", "(", "lms_Nx3", ",", "conversionMatrix", "=", "None", ")", ":", "lms_3xN", "=", "numpy", ".", "transpose", "(", "lms_Nx3", ")", "if", "(", "conversionMatrix", "is", "None", ")", ":", "cones_to_rgb", "=", "numpy", ".", "asarray", "(", "[", "[", "4.97068857", ",", "(", "-", "4.14354132", ")", ",", "0.17285275", "]", ",", "[", "(", "-", "0.90913894", ")", ",", "2.15671326", ",", "(", "-", "0.24757432", ")", "]", ",", "[", "(", "-", "0.03976551", ")", ",", "(", "-", "0.14253782", ")", ",", "1.18230333", "]", "]", ")", "logging", ".", "warning", "(", "'This monitor has not been color-calibrated. Using default LMS conversion matrix.'", ")", "else", ":", "cones_to_rgb", "=", "conversionMatrix", "rgb", "=", "numpy", ".", "dot", "(", "cones_to_rgb", ",", "lms_3xN", ")", "return", "numpy", ".", "transpose", "(", "rgb", ")" ]
convert from cone space to rgb .
train
false
36,994
def data2groupcont(x1, x2): if (x2.ndim == 1): x2 = x2[:, None] dummy = data2dummy(x1, returnall=True) return (dummy * x2)
[ "def", "data2groupcont", "(", "x1", ",", "x2", ")", ":", "if", "(", "x2", ".", "ndim", "==", "1", ")", ":", "x2", "=", "x2", "[", ":", ",", "None", "]", "dummy", "=", "data2dummy", "(", "x1", ",", "returnall", "=", "True", ")", "return", "(", "dummy", "*", "x2", ")" ]
create dummy continuous variable parameters x1 : 1d array label or group array x2 : 1d array continuous variable notes useful for group specific slope coefficients in regression .
train
false
36,995
def _bind_module_members(module, data, section): members = {} for (key, val) in module.__dict__.iteritems(): if (not isinstance(val, Config)): continue members[key] = val module.__dict__[key] = val.bind(data, prefix=section) return members
[ "def", "_bind_module_members", "(", "module", ",", "data", ",", "section", ")", ":", "members", "=", "{", "}", "for", "(", "key", ",", "val", ")", "in", "module", ".", "__dict__", ".", "iteritems", "(", ")", ":", "if", "(", "not", "isinstance", "(", "val", ",", "Config", ")", ")", ":", "continue", "members", "[", "key", "]", "=", "val", "module", ".", "__dict__", "[", "key", "]", "=", "val", ".", "bind", "(", "data", ",", "prefix", "=", "section", ")", "return", "members" ]
bind all config instances found inside the given module to the given data .
train
false
36,996
def walk_revctrl(dirname=''): for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): for item in ep.load()(dirname): (yield item)
[ "def", "walk_revctrl", "(", "dirname", "=", "''", ")", ":", "for", "ep", "in", "pkg_resources", ".", "iter_entry_points", "(", "'setuptools.file_finders'", ")", ":", "for", "item", "in", "ep", ".", "load", "(", ")", "(", "dirname", ")", ":", "(", "yield", "item", ")" ]
find all files under revision control .
train
true
36,997
def directive_fun(match, directive): directive_to_alert = dict(note='info', warning='danger') return '<div class="alert alert-{0}"><h4>{1}</h4><p>{2}</p></div>'.format(directive_to_alert[directive], directive.capitalize(), match.group(1).strip())
[ "def", "directive_fun", "(", "match", ",", "directive", ")", ":", "directive_to_alert", "=", "dict", "(", "note", "=", "'info'", ",", "warning", "=", "'danger'", ")", "return", "'<div class=\"alert alert-{0}\"><h4>{1}</h4><p>{2}</p></div>'", ".", "format", "(", "directive_to_alert", "[", "directive", "]", ",", "directive", ".", "capitalize", "(", ")", ",", "match", ".", "group", "(", "1", ")", ".", "strip", "(", ")", ")" ]
helper to fill in directives .
train
true
36,998
def delegate_manager(method): @functools.wraps(method) def wrapped(self, *args, **kwargs): if self._base_manager: return getattr(self._base_manager, method.__name__)(*args, **kwargs) return method(self, *args, **kwargs) return wrapped
[ "def", "delegate_manager", "(", "method", ")", ":", "@", "functools", ".", "wraps", "(", "method", ")", "def", "wrapped", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "self", ".", "_base_manager", ":", "return", "getattr", "(", "self", ".", "_base_manager", ",", "method", ".", "__name__", ")", "(", "*", "args", ",", "**", "kwargs", ")", "return", "method", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped" ]
delegate method calls to base manager .
train
false
36,999
def _auto_fn(name): if ('.' in name): (dialect, driver) = name.split('.') else: dialect = name driver = 'base' if (dialect in _translates): translated = _translates[dialect] util.warn_deprecated(("The '%s' dialect name has been renamed to '%s'" % (dialect, translated))) dialect = translated try: module = __import__(('sqlalchemy.dialects.%s' % (dialect,))).dialects except ImportError: return None module = getattr(module, dialect) if hasattr(module, driver): module = getattr(module, driver) return (lambda : module.dialect) else: return None
[ "def", "_auto_fn", "(", "name", ")", ":", "if", "(", "'.'", "in", "name", ")", ":", "(", "dialect", ",", "driver", ")", "=", "name", ".", "split", "(", "'.'", ")", "else", ":", "dialect", "=", "name", "driver", "=", "'base'", "if", "(", "dialect", "in", "_translates", ")", ":", "translated", "=", "_translates", "[", "dialect", "]", "util", ".", "warn_deprecated", "(", "(", "\"The '%s' dialect name has been renamed to '%s'\"", "%", "(", "dialect", ",", "translated", ")", ")", ")", "dialect", "=", "translated", "try", ":", "module", "=", "__import__", "(", "(", "'sqlalchemy.dialects.%s'", "%", "(", "dialect", ",", ")", ")", ")", ".", "dialects", "except", "ImportError", ":", "return", "None", "module", "=", "getattr", "(", "module", ",", "dialect", ")", "if", "hasattr", "(", "module", ",", "driver", ")", ":", "module", "=", "getattr", "(", "module", ",", "driver", ")", "return", "(", "lambda", ":", "module", ".", "dialect", ")", "else", ":", "return", "None" ]
default dialect importer .
train
false
37,001
def read_datasource(jboss_config, name, profile=None): log.debug('======================== MODULE FUNCTION: jboss7.read_datasource, name=%s', name) return __read_datasource(jboss_config, name, profile)
[ "def", "read_datasource", "(", "jboss_config", ",", "name", ",", "profile", "=", "None", ")", ":", "log", ".", "debug", "(", "'======================== MODULE FUNCTION: jboss7.read_datasource, name=%s'", ",", "name", ")", "return", "__read_datasource", "(", "jboss_config", ",", "name", ",", "profile", ")" ]
read datasource properties in the running jboss instance .
train
true
37,002
def lazify_task(task, start=True): if (not istask(task)): return task (head, tail) = (task[0], task[1:]) if ((not start) and (head in (list, reify))): task = task[1] return lazify_task(start=False, *tail) else: return ((head,) + tuple([lazify_task(arg, False) for arg in tail]))
[ "def", "lazify_task", "(", "task", ",", "start", "=", "True", ")", ":", "if", "(", "not", "istask", "(", "task", ")", ")", ":", "return", "task", "(", "head", ",", "tail", ")", "=", "(", "task", "[", "0", "]", ",", "task", "[", "1", ":", "]", ")", "if", "(", "(", "not", "start", ")", "and", "(", "head", "in", "(", "list", ",", "reify", ")", ")", ")", ":", "task", "=", "task", "[", "1", "]", "return", "lazify_task", "(", "start", "=", "False", ",", "*", "tail", ")", "else", ":", "return", "(", "(", "head", ",", ")", "+", "tuple", "(", "[", "lazify_task", "(", "arg", ",", "False", ")", "for", "arg", "in", "tail", "]", ")", ")" ]
given a task .
train
false
37,003
def project_download_media(request, project_slug, type_, version_slug): version = get_object_or_404(Version.objects.public(user=request.user), project__slug=project_slug, slug=version_slug) privacy_level = getattr(settings, 'DEFAULT_PRIVACY_LEVEL', 'public') if ((privacy_level == 'public') or settings.DEBUG): path = os.path.join(settings.MEDIA_URL, type_, project_slug, version_slug, ('%s.%s' % (project_slug, type_.replace('htmlzip', 'zip')))) return HttpResponseRedirect(path) else: path = version.project.get_production_media_path(type_=type_, version_slug=version_slug).replace(settings.PRODUCTION_ROOT, '/prod_artifacts') (content_type, encoding) = mimetypes.guess_type(path) content_type = (content_type or 'application/octet-stream') response = HttpResponse(content_type=content_type) if encoding: response['Content-Encoding'] = encoding response['X-Accel-Redirect'] = path filename = ('%s-%s.%s' % (project_slug, version_slug, path.split('.')[(-1)])) response['Content-Disposition'] = ('filename=%s' % filename) return response
[ "def", "project_download_media", "(", "request", ",", "project_slug", ",", "type_", ",", "version_slug", ")", ":", "version", "=", "get_object_or_404", "(", "Version", ".", "objects", ".", "public", "(", "user", "=", "request", ".", "user", ")", ",", "project__slug", "=", "project_slug", ",", "slug", "=", "version_slug", ")", "privacy_level", "=", "getattr", "(", "settings", ",", "'DEFAULT_PRIVACY_LEVEL'", ",", "'public'", ")", "if", "(", "(", "privacy_level", "==", "'public'", ")", "or", "settings", ".", "DEBUG", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_URL", ",", "type_", ",", "project_slug", ",", "version_slug", ",", "(", "'%s.%s'", "%", "(", "project_slug", ",", "type_", ".", "replace", "(", "'htmlzip'", ",", "'zip'", ")", ")", ")", ")", "return", "HttpResponseRedirect", "(", "path", ")", "else", ":", "path", "=", "version", ".", "project", ".", "get_production_media_path", "(", "type_", "=", "type_", ",", "version_slug", "=", "version_slug", ")", ".", "replace", "(", "settings", ".", "PRODUCTION_ROOT", ",", "'/prod_artifacts'", ")", "(", "content_type", ",", "encoding", ")", "=", "mimetypes", ".", "guess_type", "(", "path", ")", "content_type", "=", "(", "content_type", "or", "'application/octet-stream'", ")", "response", "=", "HttpResponse", "(", "content_type", "=", "content_type", ")", "if", "encoding", ":", "response", "[", "'Content-Encoding'", "]", "=", "encoding", "response", "[", "'X-Accel-Redirect'", "]", "=", "path", "filename", "=", "(", "'%s-%s.%s'", "%", "(", "project_slug", ",", "version_slug", ",", "path", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", "]", ")", ")", "response", "[", "'Content-Disposition'", "]", "=", "(", "'filename=%s'", "%", "filename", ")", "return", "response" ]
download a specific piece of media .
train
false
37,004
def in6_and(a1, a2): return _in6_bitops(a1, a2, 1)
[ "def", "in6_and", "(", "a1", ",", "a2", ")", ":", "return", "_in6_bitops", "(", "a1", ",", "a2", ",", "1", ")" ]
provides a bit to bit and of provided addresses .
train
false
37,005
def find_sequence(fields, doing_specialization): for f in fields: if f.seq: if (not doing_specialization): return True if (str(f.type) not in _SPECIALIZED_SEQUENCES): return True return False
[ "def", "find_sequence", "(", "fields", ",", "doing_specialization", ")", ":", "for", "f", "in", "fields", ":", "if", "f", ".", "seq", ":", "if", "(", "not", "doing_specialization", ")", ":", "return", "True", "if", "(", "str", "(", "f", ".", "type", ")", "not", "in", "_SPECIALIZED_SEQUENCES", ")", ":", "return", "True", "return", "False" ]
return true if any field uses a sequence .
train
false
37,006
def subfiles(d): r = [] stack = [([], d)] while stack: (p, n) = stack.pop() if os.path.isdir(n): for s in os.listdir(n): if (s[:1] != '.'): stack.append(((copy(p) + [s]), os.path.join(n, s))) else: r.append((p, n)) return r
[ "def", "subfiles", "(", "d", ")", ":", "r", "=", "[", "]", "stack", "=", "[", "(", "[", "]", ",", "d", ")", "]", "while", "stack", ":", "(", "p", ",", "n", ")", "=", "stack", ".", "pop", "(", ")", "if", "os", ".", "path", ".", "isdir", "(", "n", ")", ":", "for", "s", "in", "os", ".", "listdir", "(", "n", ")", ":", "if", "(", "s", "[", ":", "1", "]", "!=", "'.'", ")", ":", "stack", ".", "append", "(", "(", "(", "copy", "(", "p", ")", "+", "[", "s", "]", ")", ",", "os", ".", "path", ".", "join", "(", "n", ",", "s", ")", ")", ")", "else", ":", "r", ".", "append", "(", "(", "p", ",", "n", ")", ")", "return", "r" ]
return list of tuples for all the files in directory d .
train
false
37,008
def convert_image_unsafe(source, dest, out_format, run_as_root=False): _convert_image(source, dest, None, out_format, run_as_root)
[ "def", "convert_image_unsafe", "(", "source", ",", "dest", ",", "out_format", ",", "run_as_root", "=", "False", ")", ":", "_convert_image", "(", "source", ",", "dest", ",", "None", ",", "out_format", ",", "run_as_root", ")" ]
convert image to other format .
train
false
37,009
def get_instance_pci_devs(inst, request_id=None): pci_devices = inst.pci_devices if (pci_devices is None): return [] return [device for device in pci_devices if ((device.request_id == request_id) or (request_id == 'all'))]
[ "def", "get_instance_pci_devs", "(", "inst", ",", "request_id", "=", "None", ")", ":", "pci_devices", "=", "inst", ".", "pci_devices", "if", "(", "pci_devices", "is", "None", ")", ":", "return", "[", "]", "return", "[", "device", "for", "device", "in", "pci_devices", "if", "(", "(", "device", ".", "request_id", "==", "request_id", ")", "or", "(", "request_id", "==", "'all'", ")", ")", "]" ]
get the devices allocated to one or all requests for an instance .
train
false
37,010
def is_hashable(arg): try: hash(arg) except TypeError: return False else: return True
[ "def", "is_hashable", "(", "arg", ")", ":", "try", ":", "hash", "(", "arg", ")", "except", "TypeError", ":", "return", "False", "else", ":", "return", "True" ]
return true if hash will succeed .
train
false
37,012
def addPointOnPathIfFree(path, pathIndex, pixelTable, point, pointIndex, width): if isAddedPointOnPathFree(path, pixelTable, point, pointIndex, width): addPointOnPath(path, pathIndex, pixelTable, point, pointIndex, width)
[ "def", "addPointOnPathIfFree", "(", "path", ",", "pathIndex", ",", "pixelTable", ",", "point", ",", "pointIndex", ",", "width", ")", ":", "if", "isAddedPointOnPathFree", "(", "path", ",", "pixelTable", ",", "point", ",", "pointIndex", ",", "width", ")", ":", "addPointOnPath", "(", "path", ",", "pathIndex", ",", "pixelTable", ",", "point", ",", "pointIndex", ",", "width", ")" ]
add the closest point to a path .
train
false
37,016
def mulled_tags_for(namespace, image): tags = quay_versions(namespace, image) tags = version_sorted(tags) return tags
[ "def", "mulled_tags_for", "(", "namespace", ",", "image", ")", ":", "tags", "=", "quay_versions", "(", "namespace", ",", "image", ")", "tags", "=", "version_sorted", "(", "tags", ")", "return", "tags" ]
fetch remote tags available for supplied image name .
train
false
37,019
def set_tax(request, tax_type, tax_total): request.session[u'tax_type'] = _str(tax_type) request.session[u'tax_total'] = _str(tax_total)
[ "def", "set_tax", "(", "request", ",", "tax_type", ",", "tax_total", ")", ":", "request", ".", "session", "[", "u'tax_type'", "]", "=", "_str", "(", "tax_type", ")", "request", ".", "session", "[", "u'tax_total'", "]", "=", "_str", "(", "tax_total", ")" ]
stores the tax type and total in the session .
train
false
37,020
def logError(err): log.failure(None, err) return err
[ "def", "logError", "(", "err", ")", ":", "log", ".", "failure", "(", "None", ",", "err", ")", "return", "err" ]
log and return failure .
train
false
37,021
def unsorted_unique(lista): return list(set(lista))
[ "def", "unsorted_unique", "(", "lista", ")", ":", "return", "list", "(", "set", "(", "lista", ")", ")" ]
removes duplicates from lista neglecting its initial ordering .
train
false
37,022
def get_required_version(namespace): global _versions return _versions.get(namespace, None)
[ "def", "get_required_version", "(", "namespace", ")", ":", "global", "_versions", "return", "_versions", ".", "get", "(", "namespace", ",", "None", ")" ]
returns the version string for the namespace that was previously required through require_version or none .
train
false
37,023
@bdd.then(bdd.parsers.parse('the cookie {name} should be set to {value}')) def check_cookie(quteproc, name, value): content = quteproc.get_content() data = json.loads(content) print data assert (data['cookies'][name] == value)
[ "@", "bdd", ".", "then", "(", "bdd", ".", "parsers", ".", "parse", "(", "'the cookie {name} should be set to {value}'", ")", ")", "def", "check_cookie", "(", "quteproc", ",", "name", ",", "value", ")", ":", "content", "=", "quteproc", ".", "get_content", "(", ")", "data", "=", "json", ".", "loads", "(", "content", ")", "print", "data", "assert", "(", "data", "[", "'cookies'", "]", "[", "name", "]", "==", "value", ")" ]
check if a given cookie is set correctly .
train
false
37,024
def compute_min_alignment_length(seqs_f, fraction=0.75): med_length = median([len(s) for (_, s) in parse_fasta(seqs_f)]) return int((med_length * fraction))
[ "def", "compute_min_alignment_length", "(", "seqs_f", ",", "fraction", "=", "0.75", ")", ":", "med_length", "=", "median", "(", "[", "len", "(", "s", ")", "for", "(", "_", ",", "s", ")", "in", "parse_fasta", "(", "seqs_f", ")", "]", ")", "return", "int", "(", "(", "med_length", "*", "fraction", ")", ")" ]
compute the min alignment length as n standard deviations below the mean .
train
false
37,026
def open_input_file(filename): if (filename is None): return sys.stdin else: try: return open(filename, 'rb') except IOError as err: fatal_error(str(err))
[ "def", "open_input_file", "(", "filename", ")", ":", "if", "(", "filename", "is", "None", ")", ":", "return", "sys", ".", "stdin", "else", ":", "try", ":", "return", "open", "(", "filename", ",", "'rb'", ")", "except", "IOError", "as", "err", ":", "fatal_error", "(", "str", "(", "err", ")", ")" ]
open file for reading .
train
false
37,027
def remove_port_from_tool_shed_url(tool_shed_url): try: if (tool_shed_url.find(':') > 0): new_tool_shed_url = tool_shed_url.split(':')[0] else: new_tool_shed_url = tool_shed_url return new_tool_shed_url.rstrip('/') except Exception as e: if (tool_shed_url is not None): log.exception('Handled exception removing the port from Tool Shed URL %s:\n%s', str(tool_shed_url), e) return tool_shed_url
[ "def", "remove_port_from_tool_shed_url", "(", "tool_shed_url", ")", ":", "try", ":", "if", "(", "tool_shed_url", ".", "find", "(", "':'", ")", ">", "0", ")", ":", "new_tool_shed_url", "=", "tool_shed_url", ".", "split", "(", "':'", ")", "[", "0", "]", "else", ":", "new_tool_shed_url", "=", "tool_shed_url", "return", "new_tool_shed_url", ".", "rstrip", "(", "'/'", ")", "except", "Exception", "as", "e", ":", "if", "(", "tool_shed_url", "is", "not", "None", ")", ":", "log", ".", "exception", "(", "'Handled exception removing the port from Tool Shed URL %s:\\n%s'", ",", "str", "(", "tool_shed_url", ")", ",", "e", ")", "return", "tool_shed_url" ]
return a partial tool shed url .
train
false
37,028
def notify_usage_exists(context, volume_ref, current_period=False): (begin, end) = utils.last_completed_audit_period() if current_period: audit_start = end audit_end = timeutils.utcnow() else: audit_start = begin audit_end = end extra_usage_info = dict(audit_period_beginning=str(audit_start), audit_period_ending=str(audit_end)) notify_about_volume_usage(context, volume_ref, 'exists', extra_usage_info=extra_usage_info)
[ "def", "notify_usage_exists", "(", "context", ",", "volume_ref", ",", "current_period", "=", "False", ")", ":", "(", "begin", ",", "end", ")", "=", "utils", ".", "last_completed_audit_period", "(", ")", "if", "current_period", ":", "audit_start", "=", "end", "audit_end", "=", "timeutils", ".", "utcnow", "(", ")", "else", ":", "audit_start", "=", "begin", "audit_end", "=", "end", "extra_usage_info", "=", "dict", "(", "audit_period_beginning", "=", "str", "(", "audit_start", ")", ",", "audit_period_ending", "=", "str", "(", "audit_end", ")", ")", "notify_about_volume_usage", "(", "context", ",", "volume_ref", ",", "'exists'", ",", "extra_usage_info", "=", "extra_usage_info", ")" ]
generates exists notification for an instance for usage auditing purposes .
train
false
37,029
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
37,031
def _get_buildout_ver(directory='.'): directory = os.path.abspath(directory) buildoutver = 2 try: files = _find_cfgs(directory) for f in files: with salt.utils.fopen(f) as fic: buildout1re = re.compile('^zc\\.buildout\\s*=\\s*1', RE_F) dfic = fic.read() if (('buildout.dumppick' in dfic) or buildout1re.search(dfic)): buildoutver = 1 bcontent = _get_bootstrap_content(directory) if (('--download-base' in bcontent) or ('--setup-source' in bcontent) or ('--distribute' in bcontent)): buildoutver = 1 except (OSError, IOError): pass return buildoutver
[ "def", "_get_buildout_ver", "(", "directory", "=", "'.'", ")", ":", "directory", "=", "os", ".", "path", ".", "abspath", "(", "directory", ")", "buildoutver", "=", "2", "try", ":", "files", "=", "_find_cfgs", "(", "directory", ")", "for", "f", "in", "files", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "f", ")", "as", "fic", ":", "buildout1re", "=", "re", ".", "compile", "(", "'^zc\\\\.buildout\\\\s*=\\\\s*1'", ",", "RE_F", ")", "dfic", "=", "fic", ".", "read", "(", ")", "if", "(", "(", "'buildout.dumppick'", "in", "dfic", ")", "or", "buildout1re", ".", "search", "(", "dfic", ")", ")", ":", "buildoutver", "=", "1", "bcontent", "=", "_get_bootstrap_content", "(", "directory", ")", "if", "(", "(", "'--download-base'", "in", "bcontent", ")", "or", "(", "'--setup-source'", "in", "bcontent", ")", "or", "(", "'--distribute'", "in", "bcontent", ")", ")", ":", "buildoutver", "=", "1", "except", "(", "OSError", ",", "IOError", ")", ":", "pass", "return", "buildoutver" ]
check for buildout versions .
train
true
37,032
def _csd_from_mt(x_mt, y_mt, weights_x, weights_y): csd = np.sum(((weights_x * x_mt) * (weights_y * y_mt).conj()), axis=(-2)) denom = (np.sqrt((weights_x * weights_x.conj()).real.sum(axis=(-2))) * np.sqrt((weights_y * weights_y.conj()).real.sum(axis=(-2)))) csd *= (2 / denom) return csd
[ "def", "_csd_from_mt", "(", "x_mt", ",", "y_mt", ",", "weights_x", ",", "weights_y", ")", ":", "csd", "=", "np", ".", "sum", "(", "(", "(", "weights_x", "*", "x_mt", ")", "*", "(", "weights_y", "*", "y_mt", ")", ".", "conj", "(", ")", ")", ",", "axis", "=", "(", "-", "2", ")", ")", "denom", "=", "(", "np", ".", "sqrt", "(", "(", "weights_x", "*", "weights_x", ".", "conj", "(", ")", ")", ".", "real", ".", "sum", "(", "axis", "=", "(", "-", "2", ")", ")", ")", "*", "np", ".", "sqrt", "(", "(", "weights_y", "*", "weights_y", ".", "conj", "(", ")", ")", ".", "real", ".", "sum", "(", "axis", "=", "(", "-", "2", ")", ")", ")", ")", "csd", "*=", "(", "2", "/", "denom", ")", "return", "csd" ]
compute csd from tapered spectra .
train
false