id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
45,288
def guess_pygments_highlighter(filename): try: from pygments.lexers import get_lexer_for_filename, get_lexer_by_name from pygments.util import ClassNotFound except ImportError: return TextSH (root, ext) = os.path.splitext(filename) if (ext in custom_extension_lexer_mapping): lexer = get_lexer_by_name(custom_extension_lexer_mapping[ext]) else: try: lexer = get_lexer_for_filename(filename) except ClassNotFound: return TextSH class GuessedPygmentsSH(PygmentsSH, ): _lexer = lexer return GuessedPygmentsSH
[ "def", "guess_pygments_highlighter", "(", "filename", ")", ":", "try", ":", "from", "pygments", ".", "lexers", "import", "get_lexer_for_filename", ",", "get_lexer_by_name", "from", "pygments", ".", "util", "import", "ClassNotFound", "except", "ImportError", ":", "return", "TextSH", "(", "root", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "if", "(", "ext", "in", "custom_extension_lexer_mapping", ")", ":", "lexer", "=", "get_lexer_by_name", "(", "custom_extension_lexer_mapping", "[", "ext", "]", ")", "else", ":", "try", ":", "lexer", "=", "get_lexer_for_filename", "(", "filename", ")", "except", "ClassNotFound", ":", "return", "TextSH", "class", "GuessedPygmentsSH", "(", "PygmentsSH", ",", ")", ":", "_lexer", "=", "lexer", "return", "GuessedPygmentsSH" ]
factory to generate syntax highlighter for the given filename .
train
true
45,289
def is_provider_configured(opts, provider, required_keys=()): if (':' in provider): (alias, driver) = provider.split(':') if (alias not in opts['providers']): return False if (driver not in opts['providers'][alias]): return False for key in required_keys: if (opts['providers'][alias][driver].get(key, None) is None): log.warning("The required '{0}' configuration setting is missing from the '{1}' driver, which is configured under the '{2}' alias.".format(key, provider, alias)) return False return opts['providers'][alias][driver] for (alias, drivers) in six.iteritems(opts['providers']): for (driver, provider_details) in six.iteritems(drivers): if (driver != provider): continue skip_provider = False for key in required_keys: if (provider_details.get(key, None) is None): log.warning("The required '{0}' configuration setting is missing from the '{1}' driver, which is configured under the '{2}' alias.".format(key, provider, alias)) skip_provider = True break if skip_provider: continue return provider_details return False
[ "def", "is_provider_configured", "(", "opts", ",", "provider", ",", "required_keys", "=", "(", ")", ")", ":", "if", "(", "':'", "in", "provider", ")", ":", "(", "alias", ",", "driver", ")", "=", "provider", ".", "split", "(", "':'", ")", "if", "(", "alias", "not", "in", "opts", "[", "'providers'", "]", ")", ":", "return", "False", "if", "(", "driver", "not", "in", "opts", "[", "'providers'", "]", "[", "alias", "]", ")", ":", "return", "False", "for", "key", "in", "required_keys", ":", "if", "(", "opts", "[", "'providers'", "]", "[", "alias", "]", "[", "driver", "]", ".", "get", "(", "key", ",", "None", ")", "is", "None", ")", ":", "log", ".", "warning", "(", "\"The required '{0}' configuration setting is missing from the '{1}' driver, which is configured under the '{2}' alias.\"", ".", "format", "(", "key", ",", "provider", ",", "alias", ")", ")", "return", "False", "return", "opts", "[", "'providers'", "]", "[", "alias", "]", "[", "driver", "]", "for", "(", "alias", ",", "drivers", ")", "in", "six", ".", "iteritems", "(", "opts", "[", "'providers'", "]", ")", ":", "for", "(", "driver", ",", "provider_details", ")", "in", "six", ".", "iteritems", "(", "drivers", ")", ":", "if", "(", "driver", "!=", "provider", ")", ":", "continue", "skip_provider", "=", "False", "for", "key", "in", "required_keys", ":", "if", "(", "provider_details", ".", "get", "(", "key", ",", "None", ")", "is", "None", ")", ":", "log", ".", "warning", "(", "\"The required '{0}' configuration setting is missing from the '{1}' driver, which is configured under the '{2}' alias.\"", ".", "format", "(", "key", ",", "provider", ",", "alias", ")", ")", "skip_provider", "=", "True", "break", "if", "skip_provider", ":", "continue", "return", "provider_details", "return", "False" ]
check and return the first matching and fully configured cloud provider configuration .
train
true
45,291
def compile_plugin_translations(plugin): plugin_folder = os.path.join(current_app.config['PLUGINS_FOLDER'], plugin) translations_folder = os.path.join(plugin_folder, 'translations') subprocess.call(['pybabel', 'compile', '-d', translations_folder])
[ "def", "compile_plugin_translations", "(", "plugin", ")", ":", "plugin_folder", "=", "os", ".", "path", ".", "join", "(", "current_app", ".", "config", "[", "'PLUGINS_FOLDER'", "]", ",", "plugin", ")", "translations_folder", "=", "os", ".", "path", ".", "join", "(", "plugin_folder", ",", "'translations'", ")", "subprocess", ".", "call", "(", "[", "'pybabel'", ",", "'compile'", ",", "'-d'", ",", "translations_folder", "]", ")" ]
compile the plugin translations .
train
false
45,292
@pytest.mark.parametrize('fast_reader', [True, False, 'force']) def test_overlapping_names(fast_reader): t = ascii.read(['a b', '1 2'], names=['b', 'a'], fast_reader=fast_reader) assert (t.colnames == ['b', 'a'])
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'fast_reader'", ",", "[", "True", ",", "False", ",", "'force'", "]", ")", "def", "test_overlapping_names", "(", "fast_reader", ")", ":", "t", "=", "ascii", ".", "read", "(", "[", "'a b'", ",", "'1 2'", "]", ",", "names", "=", "[", "'b'", ",", "'a'", "]", ",", "fast_reader", "=", "fast_reader", ")", "assert", "(", "t", ".", "colnames", "==", "[", "'b'", ",", "'a'", "]", ")" ]
check that the names argument list can overlap with the existing column names .
train
false
45,293
def dump_certificate(type, cert): bio = _new_mem_buf() if (type == FILETYPE_PEM): result_code = _lib.PEM_write_bio_X509(bio, cert._x509) elif (type == FILETYPE_ASN1): result_code = _lib.i2d_X509_bio(bio, cert._x509) elif (type == FILETYPE_TEXT): result_code = _lib.X509_print_ex(bio, cert._x509, 0, 0) else: raise ValueError('type argument must be FILETYPE_PEM, FILETYPE_ASN1, or FILETYPE_TEXT') return _bio_to_string(bio)
[ "def", "dump_certificate", "(", "type", ",", "cert", ")", ":", "bio", "=", "_new_mem_buf", "(", ")", "if", "(", "type", "==", "FILETYPE_PEM", ")", ":", "result_code", "=", "_lib", ".", "PEM_write_bio_X509", "(", "bio", ",", "cert", ".", "_x509", ")", "elif", "(", "type", "==", "FILETYPE_ASN1", ")", ":", "result_code", "=", "_lib", ".", "i2d_X509_bio", "(", "bio", ",", "cert", ".", "_x509", ")", "elif", "(", "type", "==", "FILETYPE_TEXT", ")", ":", "result_code", "=", "_lib", ".", "X509_print_ex", "(", "bio", ",", "cert", ".", "_x509", ",", "0", ",", "0", ")", "else", ":", "raise", "ValueError", "(", "'type argument must be FILETYPE_PEM, FILETYPE_ASN1, or FILETYPE_TEXT'", ")", "return", "_bio_to_string", "(", "bio", ")" ]
dump a certificate to a buffer .
train
true
45,294
def replace_surrogate_encode(mystring): decoded = [] for ch in mystring: code = ord(ch) if (not (55296 <= code <= 56575)): raise exc if (56320 <= code <= 56447): decoded.append(_unichr((code - 56320))) elif (code <= 56575): decoded.append(_unichr((code - 56320))) else: raise NotASurrogateError return str().join(decoded)
[ "def", "replace_surrogate_encode", "(", "mystring", ")", ":", "decoded", "=", "[", "]", "for", "ch", "in", "mystring", ":", "code", "=", "ord", "(", "ch", ")", "if", "(", "not", "(", "55296", "<=", "code", "<=", "56575", ")", ")", ":", "raise", "exc", "if", "(", "56320", "<=", "code", "<=", "56447", ")", ":", "decoded", ".", "append", "(", "_unichr", "(", "(", "code", "-", "56320", ")", ")", ")", "elif", "(", "code", "<=", "56575", ")", ":", "decoded", ".", "append", "(", "_unichr", "(", "(", "code", "-", "56320", ")", ")", ")", "else", ":", "raise", "NotASurrogateError", "return", "str", "(", ")", ".", "join", "(", "decoded", ")" ]
returns a string .
train
true
45,296
def base_decode(v, length, base): if (base == 58): chars = __b58chars elif (base == 43): chars = __b43chars long_value = 0L for (i, c) in enumerate(v[::(-1)]): long_value += (chars.find(c) * (base ** i)) result = '' while (long_value >= 256): (div, mod) = divmod(long_value, 256) result = (chr(mod) + result) long_value = div result = (chr(long_value) + result) nPad = 0 for c in v: if (c == chars[0]): nPad += 1 else: break result = ((chr(0) * nPad) + result) if ((length is not None) and (len(result) != length)): return None return result
[ "def", "base_decode", "(", "v", ",", "length", ",", "base", ")", ":", "if", "(", "base", "==", "58", ")", ":", "chars", "=", "__b58chars", "elif", "(", "base", "==", "43", ")", ":", "chars", "=", "__b43chars", "long_value", "=", "0", "L", "for", "(", "i", ",", "c", ")", "in", "enumerate", "(", "v", "[", ":", ":", "(", "-", "1", ")", "]", ")", ":", "long_value", "+=", "(", "chars", ".", "find", "(", "c", ")", "*", "(", "base", "**", "i", ")", ")", "result", "=", "''", "while", "(", "long_value", ">=", "256", ")", ":", "(", "div", ",", "mod", ")", "=", "divmod", "(", "long_value", ",", "256", ")", "result", "=", "(", "chr", "(", "mod", ")", "+", "result", ")", "long_value", "=", "div", "result", "=", "(", "chr", "(", "long_value", ")", "+", "result", ")", "nPad", "=", "0", "for", "c", "in", "v", ":", "if", "(", "c", "==", "chars", "[", "0", "]", ")", ":", "nPad", "+=", "1", "else", ":", "break", "result", "=", "(", "(", "chr", "(", "0", ")", "*", "nPad", ")", "+", "result", ")", "if", "(", "(", "length", "is", "not", "None", ")", "and", "(", "len", "(", "result", ")", "!=", "length", ")", ")", ":", "return", "None", "return", "result" ]
decode v into a string of len bytes .
train
false
45,297
def i18n_url(url, translate=TRANSLATED_URLS): if translate: return ugettext_lazy(url) return url
[ "def", "i18n_url", "(", "url", ",", "translate", "=", "TRANSLATED_URLS", ")", ":", "if", "translate", ":", "return", "ugettext_lazy", "(", "url", ")", "return", "url" ]
translate or not an url part .
train
false
45,298
def serialize_tag(tag): out = '<' if (tag.tag_type == HtmlTagType.CLOSE_TAG): out += '/' out += tag.tag attributes = [] for (key, val) in tag.attributes.items(): aout = key if (val is not None): aout += ('=' + _quotify(val)) attributes.append(aout) if attributes: out += (' ' + ' '.join(attributes)) if (tag.tag_type == HtmlTagType.UNPAIRED_TAG): out += '/' return (out + '>')
[ "def", "serialize_tag", "(", "tag", ")", ":", "out", "=", "'<'", "if", "(", "tag", ".", "tag_type", "==", "HtmlTagType", ".", "CLOSE_TAG", ")", ":", "out", "+=", "'/'", "out", "+=", "tag", ".", "tag", "attributes", "=", "[", "]", "for", "(", "key", ",", "val", ")", "in", "tag", ".", "attributes", ".", "items", "(", ")", ":", "aout", "=", "key", "if", "(", "val", "is", "not", "None", ")", ":", "aout", "+=", "(", "'='", "+", "_quotify", "(", "val", ")", ")", "attributes", ".", "append", "(", "aout", ")", "if", "attributes", ":", "out", "+=", "(", "' '", "+", "' '", ".", "join", "(", "attributes", ")", ")", "if", "(", "tag", ".", "tag_type", "==", "HtmlTagType", ".", "UNPAIRED_TAG", ")", ":", "out", "+=", "'/'", "return", "(", "out", "+", "'>'", ")" ]
converts a tag into a string when a slice [tag .
train
false
45,299
def last(seq): return tail(1, seq)[0]
[ "def", "last", "(", "seq", ")", ":", "return", "tail", "(", "1", ",", "seq", ")", "[", "0", "]" ]
the last element in a sequence .
train
false
45,300
def _push_status(data, item): status = item['status'].lower() if ('id' in item): if (('already pushed' in status) or ('already exists' in status)): already_pushed = data.setdefault('Layers', {}).setdefault('Already_Pushed', []) already_pushed.append(item['id']) elif (('successfully pushed' in status) or (status == 'pushed')): pushed = data.setdefault('Layers', {}).setdefault('Pushed', []) pushed.append(item['id'])
[ "def", "_push_status", "(", "data", ",", "item", ")", ":", "status", "=", "item", "[", "'status'", "]", ".", "lower", "(", ")", "if", "(", "'id'", "in", "item", ")", ":", "if", "(", "(", "'already pushed'", "in", "status", ")", "or", "(", "'already exists'", "in", "status", ")", ")", ":", "already_pushed", "=", "data", ".", "setdefault", "(", "'Layers'", ",", "{", "}", ")", ".", "setdefault", "(", "'Already_Pushed'", ",", "[", "]", ")", "already_pushed", ".", "append", "(", "item", "[", "'id'", "]", ")", "elif", "(", "(", "'successfully pushed'", "in", "status", ")", "or", "(", "status", "==", "'pushed'", ")", ")", ":", "pushed", "=", "data", ".", "setdefault", "(", "'Layers'", ",", "{", "}", ")", ".", "setdefault", "(", "'Pushed'", ",", "[", "]", ")", "pushed", ".", "append", "(", "item", "[", "'id'", "]", ")" ]
process a status update from a docker push .
train
true
45,301
def talkerIndication(): a = TpPd(pd=6) b = MessageType(mesType=17) c = MobileStationClassmark2() d = MobileId() packet = (((a / b) / c) / d) return packet
[ "def", "talkerIndication", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "6", ")", "b", "=", "MessageType", "(", "mesType", "=", "17", ")", "c", "=", "MobileStationClassmark2", "(", ")", "d", "=", "MobileId", "(", ")", "packet", "=", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "return", "packet" ]
talker indication section 9 .
train
true
45,302
def stde_median(data, axis=None): def _stdemed_1D(data): data = np.sort(data.compressed()) n = len(data) z = 2.5758293035489004 k = int(np.round((((n + 1) / 2.0) - (z * np.sqrt((n / 4.0)))), 0)) return ((data[(n - k)] - data[(k - 1)]) / (2.0 * z)) data = ma.array(data, copy=False, subok=True) if (axis is None): return _stdemed_1D(data) else: if (data.ndim > 2): raise ValueError(("Array 'data' must be at most two dimensional, but got data.ndim = %d" % data.ndim)) return ma.apply_along_axis(_stdemed_1D, axis, data)
[ "def", "stde_median", "(", "data", ",", "axis", "=", "None", ")", ":", "def", "_stdemed_1D", "(", "data", ")", ":", "data", "=", "np", ".", "sort", "(", "data", ".", "compressed", "(", ")", ")", "n", "=", "len", "(", "data", ")", "z", "=", "2.5758293035489004", "k", "=", "int", "(", "np", ".", "round", "(", "(", "(", "(", "n", "+", "1", ")", "/", "2.0", ")", "-", "(", "z", "*", "np", ".", "sqrt", "(", "(", "n", "/", "4.0", ")", ")", ")", ")", ",", "0", ")", ")", "return", "(", "(", "data", "[", "(", "n", "-", "k", ")", "]", "-", "data", "[", "(", "k", "-", "1", ")", "]", ")", "/", "(", "2.0", "*", "z", ")", ")", "data", "=", "ma", ".", "array", "(", "data", ",", "copy", "=", "False", ",", "subok", "=", "True", ")", "if", "(", "axis", "is", "None", ")", ":", "return", "_stdemed_1D", "(", "data", ")", "else", ":", "if", "(", "data", ".", "ndim", ">", "2", ")", ":", "raise", "ValueError", "(", "(", "\"Array 'data' must be at most two dimensional, but got data.ndim = %d\"", "%", "data", ".", "ndim", ")", ")", "return", "ma", ".", "apply_along_axis", "(", "_stdemed_1D", ",", "axis", ",", "data", ")" ]
returns the mckean-schrader estimate of the standard error of the sample median along the given axis .
train
false
45,303
def ip_for_event(event): eth = dpid_to_str(event.dpid, True).split('|')[0].replace('-', ':') return EthAddr(eth)
[ "def", "ip_for_event", "(", "event", ")", ":", "eth", "=", "dpid_to_str", "(", "event", ".", "dpid", ",", "True", ")", ".", "split", "(", "'|'", ")", "[", "0", "]", ".", "replace", "(", "'-'", ",", "':'", ")", "return", "EthAddr", "(", "eth", ")" ]
use a switchs dpid as an ethaddr .
train
false
45,304
@register.filter def bootstrap_setting(value): return get_bootstrap_setting(value)
[ "@", "register", ".", "filter", "def", "bootstrap_setting", "(", "value", ")", ":", "return", "get_bootstrap_setting", "(", "value", ")" ]
a simple way to read bootstrap settings in a template .
train
false
45,305
def order_recursive(data): if isinstance(data, dict): return collections.OrderedDict(sorted(((key, order_recursive(value)) for (key, value) in data.items()), key=(lambda item: item[0]))) if isinstance(data, list): return [order_recursive(value) for value in data] return data
[ "def", "order_recursive", "(", "data", ")", ":", "if", "isinstance", "(", "data", ",", "dict", ")", ":", "return", "collections", ".", "OrderedDict", "(", "sorted", "(", "(", "(", "key", ",", "order_recursive", "(", "value", ")", ")", "for", "(", "key", ",", "value", ")", "in", "data", ".", "items", "(", ")", ")", ",", "key", "=", "(", "lambda", "item", ":", "item", "[", "0", "]", ")", ")", ")", "if", "isinstance", "(", "data", ",", "list", ")", ":", "return", "[", "order_recursive", "(", "value", ")", "for", "value", "in", "data", "]", "return", "data" ]
recursively sort keys of input data and all its nested dictionaries .
train
false
45,306
def make_form_entry(parent, label, borderwidth=None): frame = Frame(parent) frame.pack(fill=X) label = Label(frame, text=label) label.pack(side=LEFT) if (borderwidth is None): entry = Entry(frame, relief=SUNKEN) else: entry = Entry(frame, relief=SUNKEN, borderwidth=borderwidth) entry.pack(side=LEFT, fill=X, expand=1) return (entry, frame)
[ "def", "make_form_entry", "(", "parent", ",", "label", ",", "borderwidth", "=", "None", ")", ":", "frame", "=", "Frame", "(", "parent", ")", "frame", ".", "pack", "(", "fill", "=", "X", ")", "label", "=", "Label", "(", "frame", ",", "text", "=", "label", ")", "label", ".", "pack", "(", "side", "=", "LEFT", ")", "if", "(", "borderwidth", "is", "None", ")", ":", "entry", "=", "Entry", "(", "frame", ",", "relief", "=", "SUNKEN", ")", "else", ":", "entry", "=", "Entry", "(", "frame", ",", "relief", "=", "SUNKEN", ",", "borderwidth", "=", "borderwidth", ")", "entry", ".", "pack", "(", "side", "=", "LEFT", ",", "fill", "=", "X", ",", "expand", "=", "1", ")", "return", "(", "entry", ",", "frame", ")" ]
subroutine to create a form entry .
train
false
45,307
def webserver_for_test(test, url_path, response_content): app = Klein() @app.route(url_path) def _respond(request): return response_content factory = Site(app.resource()) endpoint = serverFromString(reactor, 'tcp:0') listening = endpoint.listen(factory) def stop_port(port): test.addCleanup(port.stopListening) return port listening.addCallback(stop_port) return listening
[ "def", "webserver_for_test", "(", "test", ",", "url_path", ",", "response_content", ")", ":", "app", "=", "Klein", "(", ")", "@", "app", ".", "route", "(", "url_path", ")", "def", "_respond", "(", "request", ")", ":", "return", "response_content", "factory", "=", "Site", "(", "app", ".", "resource", "(", ")", ")", "endpoint", "=", "serverFromString", "(", "reactor", ",", "'tcp:0'", ")", "listening", "=", "endpoint", ".", "listen", "(", "factory", ")", "def", "stop_port", "(", "port", ")", ":", "test", ".", "addCleanup", "(", "port", ".", "stopListening", ")", "return", "port", "listening", ".", "addCallback", "(", "stop_port", ")", "return", "listening" ]
create a webserver that serves response_content from url_path .
train
false
45,308
def circle_perimeter(r, c, radius, method='bresenham', shape=None): return _circle_perimeter(r, c, radius, method, shape)
[ "def", "circle_perimeter", "(", "r", ",", "c", ",", "radius", ",", "method", "=", "'bresenham'", ",", "shape", "=", "None", ")", ":", "return", "_circle_perimeter", "(", "r", ",", "c", ",", "radius", ",", "method", ",", "shape", ")" ]
generate circle perimeter coordinates .
train
false
45,309
def parse_kwargs(parser): name = parser.stream.expect('name').value parser.stream.expect('assign') if parser.stream.current.test('string'): value = parser.parse_expression() else: value = nodes.Const(next(parser.stream).value) return (name, value)
[ "def", "parse_kwargs", "(", "parser", ")", ":", "name", "=", "parser", ".", "stream", ".", "expect", "(", "'name'", ")", ".", "value", "parser", ".", "stream", ".", "expect", "(", "'assign'", ")", "if", "parser", ".", "stream", ".", "current", ".", "test", "(", "'string'", ")", ":", "value", "=", "parser", ".", "parse_expression", "(", ")", "else", ":", "value", "=", "nodes", ".", "Const", "(", "next", "(", "parser", ".", "stream", ")", ".", "value", ")", "return", "(", "name", ",", "value", ")" ]
parses keyword arguments in tags .
train
false
45,311
def run_python_job(job): log('Running python job.\n') sys.path.append(os.path.realpath(job.expt_dir)) params = {} for param in job.param: dbl_vals = param.dbl_val._values int_vals = param.int_val._values str_vals = param.str_val._values if (len(dbl_vals) > 0): params[param.name] = np.array(dbl_vals) elif (len(int_vals) > 0): params[param.name] = np.array(int_vals, dtype=int) elif (len(str_vals) > 0): params[param.name] = str_vals else: raise Exception('Unknown parameter type.') module = __import__(job.name) result = module.main(job.id, params) log(('Got result %f\n' % result)) job.value = result save_job(job)
[ "def", "run_python_job", "(", "job", ")", ":", "log", "(", "'Running python job.\\n'", ")", "sys", ".", "path", ".", "append", "(", "os", ".", "path", ".", "realpath", "(", "job", ".", "expt_dir", ")", ")", "params", "=", "{", "}", "for", "param", "in", "job", ".", "param", ":", "dbl_vals", "=", "param", ".", "dbl_val", ".", "_values", "int_vals", "=", "param", ".", "int_val", ".", "_values", "str_vals", "=", "param", ".", "str_val", ".", "_values", "if", "(", "len", "(", "dbl_vals", ")", ">", "0", ")", ":", "params", "[", "param", ".", "name", "]", "=", "np", ".", "array", "(", "dbl_vals", ")", "elif", "(", "len", "(", "int_vals", ")", ">", "0", ")", ":", "params", "[", "param", ".", "name", "]", "=", "np", ".", "array", "(", "int_vals", ",", "dtype", "=", "int", ")", "elif", "(", "len", "(", "str_vals", ")", ">", "0", ")", ":", "params", "[", "param", ".", "name", "]", "=", "str_vals", "else", ":", "raise", "Exception", "(", "'Unknown parameter type.'", ")", "module", "=", "__import__", "(", "job", ".", "name", ")", "result", "=", "module", ".", "main", "(", "job", ".", "id", ",", "params", ")", "log", "(", "(", "'Got result %f\\n'", "%", "result", ")", ")", "job", ".", "value", "=", "result", "save_job", "(", "job", ")" ]
run a python function .
train
false
45,315
def _write_to_lmdb(db, key, value): success = False while (not success): txn = db.begin(write=True) try: txn.put(key, value) txn.commit() success = True except lmdb.MapFullError: txn.abort() curr_limit = db.info()['map_size'] new_limit = (curr_limit * 2) db.set_mapsize(new_limit)
[ "def", "_write_to_lmdb", "(", "db", ",", "key", ",", "value", ")", ":", "success", "=", "False", "while", "(", "not", "success", ")", ":", "txn", "=", "db", ".", "begin", "(", "write", "=", "True", ")", "try", ":", "txn", ".", "put", "(", "key", ",", "value", ")", "txn", ".", "commit", "(", ")", "success", "=", "True", "except", "lmdb", ".", "MapFullError", ":", "txn", ".", "abort", "(", ")", "curr_limit", "=", "db", ".", "info", "(", ")", "[", "'map_size'", "]", "new_limit", "=", "(", "curr_limit", "*", "2", ")", "db", ".", "set_mapsize", "(", "new_limit", ")" ]
write to db .
train
false
45,316
@print_duration def asyncio_run(urls): pass
[ "@", "print_duration", "def", "asyncio_run", "(", "urls", ")", ":", "pass" ]
download a bunch of urls via async io .
train
false
45,318
def test_mpl_preserve_width(): f = create_figure() (width, height) = f.canvas.get_width_height() s = mplhooks.figure_to_tight_array(f, (0.5 * width), (0.5 * height), True) exp = width (newwidth, newheight) = f.canvas.get_width_height() obs = newwidth plt.close(f) assert (exp == obs)
[ "def", "test_mpl_preserve_width", "(", ")", ":", "f", "=", "create_figure", "(", ")", "(", "width", ",", "height", ")", "=", "f", ".", "canvas", ".", "get_width_height", "(", ")", "s", "=", "mplhooks", ".", "figure_to_tight_array", "(", "f", ",", "(", "0.5", "*", "width", ")", ",", "(", "0.5", "*", "height", ")", ",", "True", ")", "exp", "=", "width", "(", "newwidth", ",", "newheight", ")", "=", "f", ".", "canvas", ".", "get_width_height", "(", ")", "obs", "=", "newwidth", "plt", ".", "close", "(", "f", ")", "assert", "(", "exp", "==", "obs", ")" ]
make sure that the figure preserves width settings .
train
false
45,319
def mergeorder(items, working_dir): if (len(items) < 2): return items middle = (len(items) / 2) left = mergeorder(items[:middle], working_dir) right = mergeorder(items[middle:], working_dir) return mergetree(left, right, working_dir)
[ "def", "mergeorder", "(", "items", ",", "working_dir", ")", ":", "if", "(", "len", "(", "items", ")", "<", "2", ")", ":", "return", "items", "middle", "=", "(", "len", "(", "items", ")", "/", "2", ")", "left", "=", "mergeorder", "(", "items", "[", ":", "middle", "]", ",", "working_dir", ")", "right", "=", "mergeorder", "(", "items", "[", "middle", ":", "]", ",", "working_dir", ")", "return", "mergetree", "(", "left", ",", "right", ",", "working_dir", ")" ]
code taken from URL .
train
false
45,320
@register(u'prefix-meta') def prefix_meta(event): event.cli.input_processor.feed(KeyPress(Keys.Escape))
[ "@", "register", "(", "u'prefix-meta'", ")", "def", "prefix_meta", "(", "event", ")", ":", "event", ".", "cli", ".", "input_processor", ".", "feed", "(", "KeyPress", "(", "Keys", ".", "Escape", ")", ")" ]
metafy the next character typed .
train
false
45,322
def get_component_review_by_repository_review_id_component_id(app, repository_review_id, component_id): sa_session = app.model.context.current return sa_session.query(app.model.ComponentReview).filter(and_((app.model.ComponentReview.table.c.repository_review_id == app.security.decode_id(repository_review_id)), (app.model.ComponentReview.table.c.component_id == app.security.decode_id(component_id)))).first()
[ "def", "get_component_review_by_repository_review_id_component_id", "(", "app", ",", "repository_review_id", ",", "component_id", ")", ":", "sa_session", "=", "app", ".", "model", ".", "context", ".", "current", "return", "sa_session", ".", "query", "(", "app", ".", "model", ".", "ComponentReview", ")", ".", "filter", "(", "and_", "(", "(", "app", ".", "model", ".", "ComponentReview", ".", "table", ".", "c", ".", "repository_review_id", "==", "app", ".", "security", ".", "decode_id", "(", "repository_review_id", ")", ")", ",", "(", "app", ".", "model", ".", "ComponentReview", ".", "table", ".", "c", ".", "component_id", "==", "app", ".", "security", ".", "decode_id", "(", "component_id", ")", ")", ")", ")", ".", "first", "(", ")" ]
get a component_review from the database via repository_review_id and component_id .
train
false
45,323
def validate_auth_info(self, context, user_ref, tenant_ref): if (not user_ref.get('enabled', True)): msg = ('User is disabled: %s' % user_ref['id']) LOG.warning(msg) raise exception.Unauthorized(msg) user_domain_ref = self.identity_api.get_domain(context, user_ref['domain_id']) if (user_domain_ref and (not user_domain_ref.get('enabled', True))): msg = ('Domain is disabled: %s' % user_domain_ref['id']) LOG.warning(msg) raise exception.Unauthorized(msg) if tenant_ref: if (not tenant_ref.get('enabled', True)): msg = ('Tenant is disabled: %s' % tenant_ref['id']) LOG.warning(msg) raise exception.Unauthorized(msg) project_domain_ref = self.identity_api.get_domain(context, tenant_ref['domain_id']) if (project_domain_ref and (not project_domain_ref.get('enabled', True))): msg = ('Domain is disabled: %s' % project_domain_ref['id']) LOG.warning(msg) raise exception.Unauthorized(msg)
[ "def", "validate_auth_info", "(", "self", ",", "context", ",", "user_ref", ",", "tenant_ref", ")", ":", "if", "(", "not", "user_ref", ".", "get", "(", "'enabled'", ",", "True", ")", ")", ":", "msg", "=", "(", "'User is disabled: %s'", "%", "user_ref", "[", "'id'", "]", ")", "LOG", ".", "warning", "(", "msg", ")", "raise", "exception", ".", "Unauthorized", "(", "msg", ")", "user_domain_ref", "=", "self", ".", "identity_api", ".", "get_domain", "(", "context", ",", "user_ref", "[", "'domain_id'", "]", ")", "if", "(", "user_domain_ref", "and", "(", "not", "user_domain_ref", ".", "get", "(", "'enabled'", ",", "True", ")", ")", ")", ":", "msg", "=", "(", "'Domain is disabled: %s'", "%", "user_domain_ref", "[", "'id'", "]", ")", "LOG", ".", "warning", "(", "msg", ")", "raise", "exception", ".", "Unauthorized", "(", "msg", ")", "if", "tenant_ref", ":", "if", "(", "not", "tenant_ref", ".", "get", "(", "'enabled'", ",", "True", ")", ")", ":", "msg", "=", "(", "'Tenant is disabled: %s'", "%", "tenant_ref", "[", "'id'", "]", ")", "LOG", ".", "warning", "(", "msg", ")", "raise", "exception", ".", "Unauthorized", "(", "msg", ")", "project_domain_ref", "=", "self", ".", "identity_api", ".", "get_domain", "(", "context", ",", "tenant_ref", "[", "'domain_id'", "]", ")", "if", "(", "project_domain_ref", "and", "(", "not", "project_domain_ref", ".", "get", "(", "'enabled'", ",", "True", ")", ")", ")", ":", "msg", "=", "(", "'Domain is disabled: %s'", "%", "project_domain_ref", "[", "'id'", "]", ")", "LOG", ".", "warning", "(", "msg", ")", "raise", "exception", ".", "Unauthorized", "(", "msg", ")" ]
validate user and tenant auth info .
train
false
45,324
def generate_password_hash(password, rounds=None): if (rounds is None): rounds = settings.BCRYPT_LOG_ROUNDS if (not password): raise ValueError('Password must be non-empty.') pw_hash = bcrypt.hashpw(unicode(password).encode('utf-8'), bcrypt.gensalt(rounds)) return pw_hash
[ "def", "generate_password_hash", "(", "password", ",", "rounds", "=", "None", ")", ":", "if", "(", "rounds", "is", "None", ")", ":", "rounds", "=", "settings", ".", "BCRYPT_LOG_ROUNDS", "if", "(", "not", "password", ")", ":", "raise", "ValueError", "(", "'Password must be non-empty.'", ")", "pw_hash", "=", "bcrypt", ".", "hashpw", "(", "unicode", "(", "password", ")", ".", "encode", "(", "'utf-8'", ")", ",", "bcrypt", ".", "gensalt", "(", "rounds", ")", ")", "return", "pw_hash" ]
generates a password hash using bcrypt .
train
false
45,325
def get_temp_filename(): with NamedTemporaryFile() as tempfile: return tempfile.name
[ "def", "get_temp_filename", "(", ")", ":", "with", "NamedTemporaryFile", "(", ")", "as", "tempfile", ":", "return", "tempfile", ".", "name" ]
get a unique .
train
false
45,326
def start_http_server(listen_port): _verify_environment() logging.info("HTTP server is starting, port: {}, test-UUID: '{}'".format(listen_port, os.environ[TEST_UUID_VARNAME])) test_server = ThreadingSimpleServer(('', listen_port), TestHTTPRequestHandler) def sigterm_handler(_signo, _stack_frame): test_server.server_close() logging.info('HTTP server is terminating') sys.exit(0) signal.signal(signal.SIGTERM, sigterm_handler) signal.signal(signal.SIGINT, sigterm_handler) test_server.serve_forever()
[ "def", "start_http_server", "(", "listen_port", ")", ":", "_verify_environment", "(", ")", "logging", ".", "info", "(", "\"HTTP server is starting, port: {}, test-UUID: '{}'\"", ".", "format", "(", "listen_port", ",", "os", ".", "environ", "[", "TEST_UUID_VARNAME", "]", ")", ")", "test_server", "=", "ThreadingSimpleServer", "(", "(", "''", ",", "listen_port", ")", ",", "TestHTTPRequestHandler", ")", "def", "sigterm_handler", "(", "_signo", ",", "_stack_frame", ")", ":", "test_server", ".", "server_close", "(", ")", "logging", ".", "info", "(", "'HTTP server is terminating'", ")", "sys", ".", "exit", "(", "0", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "sigterm_handler", ")", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "sigterm_handler", ")", "test_server", ".", "serve_forever", "(", ")" ]
start the test server this function makes sure that the environment is sane and signals are properly handled .
train
false
45,327
def _get_unicode_checker(): if hasattr(_get_unicode_checker, 'UnicodeOutputChecker'): return _get_unicode_checker.UnicodeOutputChecker() import doctest import re class UnicodeOutputChecker(doctest.OutputChecker, ): '\n Copied from doctest_nose_plugin.py from the nltk project:\n https://github.com/nltk/nltk\n ' _literal_re = re.compile('(\\W|^)[uU]([rR]?[\\\'\\"])', re.UNICODE) def check_output(self, want, got, optionflags): res = doctest.OutputChecker.check_output(self, want, got, optionflags) if res: return True if (not (optionflags & _get_allow_unicode_flag())): return False else: def remove_u_prefixes(txt): return re.sub(self._literal_re, '\\1\\2', txt) want = remove_u_prefixes(want) got = remove_u_prefixes(got) res = doctest.OutputChecker.check_output(self, want, got, optionflags) return res _get_unicode_checker.UnicodeOutputChecker = UnicodeOutputChecker return _get_unicode_checker.UnicodeOutputChecker()
[ "def", "_get_unicode_checker", "(", ")", ":", "if", "hasattr", "(", "_get_unicode_checker", ",", "'UnicodeOutputChecker'", ")", ":", "return", "_get_unicode_checker", ".", "UnicodeOutputChecker", "(", ")", "import", "doctest", "import", "re", "class", "UnicodeOutputChecker", "(", "doctest", ".", "OutputChecker", ",", ")", ":", "_literal_re", "=", "re", ".", "compile", "(", "'(\\\\W|^)[uU]([rR]?[\\\\\\'\\\\\"])'", ",", "re", ".", "UNICODE", ")", "def", "check_output", "(", "self", ",", "want", ",", "got", ",", "optionflags", ")", ":", "res", "=", "doctest", ".", "OutputChecker", ".", "check_output", "(", "self", ",", "want", ",", "got", ",", "optionflags", ")", "if", "res", ":", "return", "True", "if", "(", "not", "(", "optionflags", "&", "_get_allow_unicode_flag", "(", ")", ")", ")", ":", "return", "False", "else", ":", "def", "remove_u_prefixes", "(", "txt", ")", ":", "return", "re", ".", "sub", "(", "self", ".", "_literal_re", ",", "'\\\\1\\\\2'", ",", "txt", ")", "want", "=", "remove_u_prefixes", "(", "want", ")", "got", "=", "remove_u_prefixes", "(", "got", ")", "res", "=", "doctest", ".", "OutputChecker", ".", "check_output", "(", "self", ",", "want", ",", "got", ",", "optionflags", ")", "return", "res", "_get_unicode_checker", ".", "UnicodeOutputChecker", "=", "UnicodeOutputChecker", "return", "_get_unicode_checker", ".", "UnicodeOutputChecker", "(", ")" ]
returns a doctest .
train
false
45,328
def update_item(name, id_, field=None, value=None, postdata=None): if (field and value): if postdata: raise SaltInvocationError('Either a field and a value, or a chunk of POST data, may be specified, but not both.') postdata = {name.title(): {field: value}} if (postdata is None): raise SaltInvocationError('Either a field and a value, or a chunk of POST data must be specified.') (status, result) = _query(action=name, command=id_, method='POST', data=json.dumps(postdata)) return result
[ "def", "update_item", "(", "name", ",", "id_", ",", "field", "=", "None", ",", "value", "=", "None", ",", "postdata", "=", "None", ")", ":", "if", "(", "field", "and", "value", ")", ":", "if", "postdata", ":", "raise", "SaltInvocationError", "(", "'Either a field and a value, or a chunk of POST data, may be specified, but not both.'", ")", "postdata", "=", "{", "name", ".", "title", "(", ")", ":", "{", "field", ":", "value", "}", "}", "if", "(", "postdata", "is", "None", ")", ":", "raise", "SaltInvocationError", "(", "'Either a field and a value, or a chunk of POST data must be specified.'", ")", "(", "status", ",", "result", ")", "=", "_query", "(", "action", "=", "name", ",", "command", "=", "id_", ",", "method", "=", "'POST'", ",", "data", "=", "json", ".", "dumps", "(", "postdata", ")", ")", "return", "result" ]
update an item .
train
true
45,330
def submit_reset_problem_attempts_in_entrance_exam(request, usage_key, student): modulestore().get_item(usage_key) task_type = 'reset_problem_attempts' task_class = reset_problem_attempts (task_input, task_key) = encode_entrance_exam_and_student_input(usage_key, student) return submit_task(request, task_type, task_class, usage_key.course_key, task_input, task_key)
[ "def", "submit_reset_problem_attempts_in_entrance_exam", "(", "request", ",", "usage_key", ",", "student", ")", ":", "modulestore", "(", ")", ".", "get_item", "(", "usage_key", ")", "task_type", "=", "'reset_problem_attempts'", "task_class", "=", "reset_problem_attempts", "(", "task_input", ",", "task_key", ")", "=", "encode_entrance_exam_and_student_input", "(", "usage_key", ",", "student", ")", "return", "submit_task", "(", "request", ",", "task_type", ",", "task_class", ",", "usage_key", ".", "course_key", ",", "task_input", ",", "task_key", ")" ]
request to have attempts reset for a entrance exam as a background task .
train
false
45,331
def user_avatar(request, user, size): user = get_object_or_404(User, username=user) if (user.email == u'noreply@weblate.org'): return redirect(get_fallback_avatar_url(size)) response = HttpResponse(content_type=u'image/png', content=get_avatar_image(request, user, size)) patch_response_headers(response, ((3600 * 24) * 7)) return response
[ "def", "user_avatar", "(", "request", ",", "user", ",", "size", ")", ":", "user", "=", "get_object_or_404", "(", "User", ",", "username", "=", "user", ")", "if", "(", "user", ".", "email", "==", "u'noreply@weblate.org'", ")", ":", "return", "redirect", "(", "get_fallback_avatar_url", "(", "size", ")", ")", "response", "=", "HttpResponse", "(", "content_type", "=", "u'image/png'", ",", "content", "=", "get_avatar_image", "(", "request", ",", "user", ",", "size", ")", ")", "patch_response_headers", "(", "response", ",", "(", "(", "3600", "*", "24", ")", "*", "7", ")", ")", "return", "response" ]
user avatar page .
train
false
45,332
def idrac_general(blade_name, command, idrac_password=None, host=None, admin_username=None, admin_password=None): module_network = network_info(host, admin_username, admin_password, blade_name) if (idrac_password is not None): password = idrac_password else: password = admin_password idrac_ip = module_network['Network']['IP Address'] ret = __execute_ret(command, host=idrac_ip, admin_username='root', admin_password=password) if (ret['retcode'] == 0): return ret['stdout'] else: return ret
[ "def", "idrac_general", "(", "blade_name", ",", "command", ",", "idrac_password", "=", "None", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "module_network", "=", "network_info", "(", "host", ",", "admin_username", ",", "admin_password", ",", "blade_name", ")", "if", "(", "idrac_password", "is", "not", "None", ")", ":", "password", "=", "idrac_password", "else", ":", "password", "=", "admin_password", "idrac_ip", "=", "module_network", "[", "'Network'", "]", "[", "'IP Address'", "]", "ret", "=", "__execute_ret", "(", "command", ",", "host", "=", "idrac_ip", ",", "admin_username", "=", "'root'", ",", "admin_password", "=", "password", ")", "if", "(", "ret", "[", "'retcode'", "]", "==", "0", ")", ":", "return", "ret", "[", "'stdout'", "]", "else", ":", "return", "ret" ]
run a generic racadm command against a particular blade in a chassis .
train
true
45,333
def handle_redirects(func): MAX_REDIRECTS = 5 @functools.wraps(func) def wrapped(self, method, url, body, headers): for _ in xrange(MAX_REDIRECTS): try: return func(self, method, url, body, headers) except exception.RedirectException as redirect: if (redirect.url is None): raise exception.InvalidRedirect() url = redirect.url raise exception.MaxRedirectsExceeded(redirects=MAX_REDIRECTS) return wrapped
[ "def", "handle_redirects", "(", "func", ")", ":", "MAX_REDIRECTS", "=", "5", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "self", ",", "method", ",", "url", ",", "body", ",", "headers", ")", ":", "for", "_", "in", "xrange", "(", "MAX_REDIRECTS", ")", ":", "try", ":", "return", "func", "(", "self", ",", "method", ",", "url", ",", "body", ",", "headers", ")", "except", "exception", ".", "RedirectException", "as", "redirect", ":", "if", "(", "redirect", ".", "url", "is", "None", ")", ":", "raise", "exception", ".", "InvalidRedirect", "(", ")", "url", "=", "redirect", ".", "url", "raise", "exception", ".", "MaxRedirectsExceeded", "(", "redirects", "=", "MAX_REDIRECTS", ")", "return", "wrapped" ]
wrap the _do_request function to handle http redirects .
train
false
45,334
def TCVoltsToTemp(TCType, TCVolts, CJTempK): if (os.name == 'nt'): staticLib = ctypes.windll.LoadLibrary('labjackud') pTCTempK = ctypes.c_double() ec = staticLib.TCVoltsToTemp(ctypes.c_long(TCType), ctypes.c_double(TCVolts), ctypes.c_double(CJTempK), ctypes.byref(pTCTempK)) if (ec != 0): raise LabJackException(ec) return pTCTempK.value else: raise LabJackException(0, 'Function only supported for Windows')
[ "def", "TCVoltsToTemp", "(", "TCType", ",", "TCVolts", ",", "CJTempK", ")", ":", "if", "(", "os", ".", "name", "==", "'nt'", ")", ":", "staticLib", "=", "ctypes", ".", "windll", ".", "LoadLibrary", "(", "'labjackud'", ")", "pTCTempK", "=", "ctypes", ".", "c_double", "(", ")", "ec", "=", "staticLib", ".", "TCVoltsToTemp", "(", "ctypes", ".", "c_long", "(", "TCType", ")", ",", "ctypes", ".", "c_double", "(", "TCVolts", ")", ",", "ctypes", ".", "c_double", "(", "CJTempK", ")", ",", "ctypes", ".", "byref", "(", "pTCTempK", ")", ")", "if", "(", "ec", "!=", "0", ")", ":", "raise", "LabJackException", "(", "ec", ")", "return", "pTCTempK", ".", "value", "else", ":", "raise", "LabJackException", "(", "0", ",", "'Function only supported for Windows'", ")" ]
converts a thermo couple voltage reading to an appropriate temperature reading .
train
false
45,335
def _throw_no_creds(): msg = 'An infoblox server, username, and password must be specified or configured via pillar' raise SaltInvocationError(msg)
[ "def", "_throw_no_creds", "(", ")", ":", "msg", "=", "'An infoblox server, username, and password must be specified or configured via pillar'", "raise", "SaltInvocationError", "(", "msg", ")" ]
helper function to log no credentials found error .
train
false
45,336
@hook.command() def xkcd(text): return xkcd_search(text)
[ "@", "hook", ".", "command", "(", ")", "def", "xkcd", "(", "text", ")", ":", "return", "xkcd_search", "(", "text", ")" ]
xkcd <search term> - search for xkcd comic matching <search term> .
train
false
45,337
def get_enabled(): return _get_svc_list('YES')
[ "def", "get_enabled", "(", ")", ":", "return", "_get_svc_list", "(", "'YES'", ")" ]
return a list of service that are enabled on boot cli example: .
train
false
45,338
def compute_memory_extents(context, builder, lower, upper, data): data_ptr_as_int = builder.ptrtoint(data, lower.type) start = builder.add(data_ptr_as_int, lower) end = builder.add(data_ptr_as_int, upper) return (start, end)
[ "def", "compute_memory_extents", "(", "context", ",", "builder", ",", "lower", ",", "upper", ",", "data", ")", ":", "data_ptr_as_int", "=", "builder", ".", "ptrtoint", "(", "data", ",", "lower", ".", "type", ")", "start", "=", "builder", ".", "add", "(", "data_ptr_as_int", ",", "lower", ")", "end", "=", "builder", ".", "add", "(", "data_ptr_as_int", ",", "upper", ")", "return", "(", "start", ",", "end", ")" ]
given [lower .
train
false
45,339
def mutagen_call(action, path, func, *args, **kwargs): try: return func(*args, **kwargs) except mutagen.MutagenError as exc: log.debug(u'%s failed: %s', action, six.text_type(exc)) raise UnreadableFileError(path, six.text_type(exc)) except Exception as exc: log.debug(u'%s', traceback.format_exc()) log.error(u'uncaught Mutagen exception in %s: %s', action, exc) raise MutagenError(path, exc)
[ "def", "mutagen_call", "(", "action", ",", "path", ",", "func", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "except", "mutagen", ".", "MutagenError", "as", "exc", ":", "log", ".", "debug", "(", "u'%s failed: %s'", ",", "action", ",", "six", ".", "text_type", "(", "exc", ")", ")", "raise", "UnreadableFileError", "(", "path", ",", "six", ".", "text_type", "(", "exc", ")", ")", "except", "Exception", "as", "exc", ":", "log", ".", "debug", "(", "u'%s'", ",", "traceback", ".", "format_exc", "(", ")", ")", "log", ".", "error", "(", "u'uncaught Mutagen exception in %s: %s'", ",", "action", ",", "exc", ")", "raise", "MutagenError", "(", "path", ",", "exc", ")" ]
call a mutagen function with appropriate error handling .
train
true
45,340
def validate_image(values): status = values.get('status') status = values.get('status', None) if (not status): msg = 'Image status is required.' raise exception.Invalid(msg) if (status not in STATUSES): msg = ("Invalid image status '%s' for image." % status) raise exception.Invalid(msg) return values
[ "def", "validate_image", "(", "values", ")", ":", "status", "=", "values", ".", "get", "(", "'status'", ")", "status", "=", "values", ".", "get", "(", "'status'", ",", "None", ")", "if", "(", "not", "status", ")", ":", "msg", "=", "'Image status is required.'", "raise", "exception", ".", "Invalid", "(", "msg", ")", "if", "(", "status", "not", "in", "STATUSES", ")", ":", "msg", "=", "(", "\"Invalid image status '%s' for image.\"", "%", "status", ")", "raise", "exception", ".", "Invalid", "(", "msg", ")", "return", "values" ]
validates the incoming data and raises a invalid exception if anything is out of order .
train
false
45,341
def check_job_permission(view_func): def decorate(request, *args, **kwargs): jobid = kwargs['job'] try: job = get_job(request, job_id=jobid) except ApplicationNotRunning as e: LOG.warn(('Job %s has not yet been accepted by the RM, will poll for status.' % jobid)) return job_not_assigned(request, jobid, request.path) if ((not SHARE_JOBS.get()) and (not request.user.is_superuser) and (job.user != request.user.username) and (not can_view_job(request.user.username, job))): raise PopupException((_("You don't have permission to access job %(id)s.") % {'id': jobid})) kwargs['job'] = job return view_func(request, *args, **kwargs) return wraps(view_func)(decorate)
[ "def", "check_job_permission", "(", "view_func", ")", ":", "def", "decorate", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "jobid", "=", "kwargs", "[", "'job'", "]", "try", ":", "job", "=", "get_job", "(", "request", ",", "job_id", "=", "jobid", ")", "except", "ApplicationNotRunning", "as", "e", ":", "LOG", ".", "warn", "(", "(", "'Job %s has not yet been accepted by the RM, will poll for status.'", "%", "jobid", ")", ")", "return", "job_not_assigned", "(", "request", ",", "jobid", ",", "request", ".", "path", ")", "if", "(", "(", "not", "SHARE_JOBS", ".", "get", "(", ")", ")", "and", "(", "not", "request", ".", "user", ".", "is_superuser", ")", "and", "(", "job", ".", "user", "!=", "request", ".", "user", ".", "username", ")", "and", "(", "not", "can_view_job", "(", "request", ".", "user", ".", "username", ",", "job", ")", ")", ")", ":", "raise", "PopupException", "(", "(", "_", "(", "\"You don't have permission to access job %(id)s.\"", ")", "%", "{", "'id'", ":", "jobid", "}", ")", ")", "kwargs", "[", "'job'", "]", "=", "job", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wraps", "(", "view_func", ")", "(", "decorate", ")" ]
ensure that the user has access to the job .
train
false
45,343
def create_theme_images(theme, placement, hash_): color = random.choice(ImageColor.colormap.keys()) image = Image.new('RGB', (3000, 200), color) tmp_path = os.path.join(settings.TMP_PATH, 'persona_{placement}'.format(placement=placement)) if (not os.path.exists(tmp_path)): os.makedirs(tmp_path) tmp_loc = os.path.join(tmp_path, hash_) image.save(tmp_loc, 'jpeg') media_path = os.path.join(user_media_path('addons'), str(theme.id)) if (not os.path.exists(media_path)): os.makedirs(media_path) media_loc = os.path.join(media_path, hash_) image.save(media_loc, 'jpeg')
[ "def", "create_theme_images", "(", "theme", ",", "placement", ",", "hash_", ")", ":", "color", "=", "random", ".", "choice", "(", "ImageColor", ".", "colormap", ".", "keys", "(", ")", ")", "image", "=", "Image", ".", "new", "(", "'RGB'", ",", "(", "3000", ",", "200", ")", ",", "color", ")", "tmp_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "TMP_PATH", ",", "'persona_{placement}'", ".", "format", "(", "placement", "=", "placement", ")", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "tmp_path", ")", ")", ":", "os", ".", "makedirs", "(", "tmp_path", ")", "tmp_loc", "=", "os", ".", "path", ".", "join", "(", "tmp_path", ",", "hash_", ")", "image", ".", "save", "(", "tmp_loc", ",", "'jpeg'", ")", "media_path", "=", "os", ".", "path", ".", "join", "(", "user_media_path", "(", "'addons'", ")", ",", "str", "(", "theme", ".", "id", ")", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "media_path", ")", ")", ":", "os", ".", "makedirs", "(", "media_path", ")", "media_loc", "=", "os", ".", "path", ".", "join", "(", "media_path", ",", "hash_", ")", "image", ".", "save", "(", "media_loc", ",", "'jpeg'", ")" ]
generates 2 images .
train
false
45,344
def prepare_params(modeline, fileconfig, options): params = dict(skip=False, ignore=[], select=[], linters=[]) if options: params['ignore'] = list(options.ignore) params['select'] = list(options.select) for config in filter(None, [modeline, fileconfig]): for key in ('ignore', 'select', 'linters'): params[key] += process_value(key, config.get(key, [])) params['skip'] = bool(int(config.get('skip', False))) params['ignore'] = set(params['ignore']) params['select'] = set(params['select']) return params
[ "def", "prepare_params", "(", "modeline", ",", "fileconfig", ",", "options", ")", ":", "params", "=", "dict", "(", "skip", "=", "False", ",", "ignore", "=", "[", "]", ",", "select", "=", "[", "]", ",", "linters", "=", "[", "]", ")", "if", "options", ":", "params", "[", "'ignore'", "]", "=", "list", "(", "options", ".", "ignore", ")", "params", "[", "'select'", "]", "=", "list", "(", "options", ".", "select", ")", "for", "config", "in", "filter", "(", "None", ",", "[", "modeline", ",", "fileconfig", "]", ")", ":", "for", "key", "in", "(", "'ignore'", ",", "'select'", ",", "'linters'", ")", ":", "params", "[", "key", "]", "+=", "process_value", "(", "key", ",", "config", ".", "get", "(", "key", ",", "[", "]", ")", ")", "params", "[", "'skip'", "]", "=", "bool", "(", "int", "(", "config", ".", "get", "(", "'skip'", ",", "False", ")", ")", ")", "params", "[", "'ignore'", "]", "=", "set", "(", "params", "[", "'ignore'", "]", ")", "params", "[", "'select'", "]", "=", "set", "(", "params", "[", "'select'", "]", ")", "return", "params" ]
prepare and merge a params from modelines and configs .
train
true
45,345
def auth_functions_list(): return _AuthFunctions.keys()
[ "def", "auth_functions_list", "(", ")", ":", "return", "_AuthFunctions", ".", "keys", "(", ")" ]
returns a list of the names of the auth functions available .
train
false
45,346
def trend_xor_encode(str): key = 2824028996 ret = '' pad = (4 - (len(str) % 4)) if (pad == 4): pad = 0 str += ('\x00' * pad) while str: dword = struct.unpack('<L', str[:4])[0] str = str[4:] dword ^= key ret += struct.pack('<L', dword) key = dword return ret
[ "def", "trend_xor_encode", "(", "str", ")", ":", "key", "=", "2824028996", "ret", "=", "''", "pad", "=", "(", "4", "-", "(", "len", "(", "str", ")", "%", "4", ")", ")", "if", "(", "pad", "==", "4", ")", ":", "pad", "=", "0", "str", "+=", "(", "'\\x00'", "*", "pad", ")", "while", "str", ":", "dword", "=", "struct", ".", "unpack", "(", "'<L'", ",", "str", "[", ":", "4", "]", ")", "[", "0", "]", "str", "=", "str", "[", "4", ":", "]", "dword", "^=", "key", "ret", "+=", "struct", ".", "pack", "(", "'<L'", ",", "dword", ")", "key", "=", "dword", "return", "ret" ]
simple bidirectional xor "encryption" routine used by this service .
train
false
45,347
def image_and_format_from_data(data): ba = QByteArray(data) buf = QBuffer(ba) buf.open(QBuffer.ReadOnly) r = QImageReader(buf) fmt = bytes(r.format()).decode(u'utf-8') return (r.read(), fmt)
[ "def", "image_and_format_from_data", "(", "data", ")", ":", "ba", "=", "QByteArray", "(", "data", ")", "buf", "=", "QBuffer", "(", "ba", ")", "buf", ".", "open", "(", "QBuffer", ".", "ReadOnly", ")", "r", "=", "QImageReader", "(", "buf", ")", "fmt", "=", "bytes", "(", "r", ".", "format", "(", ")", ")", ".", "decode", "(", "u'utf-8'", ")", "return", "(", "r", ".", "read", "(", ")", ",", "fmt", ")" ]
create an image object from the specified data which should be a bytestring and also return the format of the image .
train
false
45,349
def latest_content(url): try: html = lxml.html.parse(url) res = html.xpath('//div[@id="artibody"]/p') if ct.PY3: sarr = [etree.tostring(node).decode('utf-8') for node in res] else: sarr = [etree.tostring(node) for node in res] sarr = ''.join(sarr).replace('&#12288;', '') html_content = lxml.html.fromstring(sarr) content = html_content.text_content() return content except Exception as er: print str(er)
[ "def", "latest_content", "(", "url", ")", ":", "try", ":", "html", "=", "lxml", ".", "html", ".", "parse", "(", "url", ")", "res", "=", "html", ".", "xpath", "(", "'//div[@id=\"artibody\"]/p'", ")", "if", "ct", ".", "PY3", ":", "sarr", "=", "[", "etree", ".", "tostring", "(", "node", ")", ".", "decode", "(", "'utf-8'", ")", "for", "node", "in", "res", "]", "else", ":", "sarr", "=", "[", "etree", ".", "tostring", "(", "node", ")", "for", "node", "in", "res", "]", "sarr", "=", "''", ".", "join", "(", "sarr", ")", ".", "replace", "(", "'&#12288;'", ",", "''", ")", "html_content", "=", "lxml", ".", "html", ".", "fromstring", "(", "sarr", ")", "content", "=", "html_content", ".", "text_content", "(", ")", "return", "content", "except", "Exception", "as", "er", ":", "print", "str", "(", "er", ")" ]
parameter url:新闻链接 return string:返回新闻的文字内容 .
train
false
45,351
def processor_architecture(): if (architecture() == '32bit'): return 'x86' else: return 'amd64'
[ "def", "processor_architecture", "(", ")", ":", "if", "(", "architecture", "(", ")", "==", "'32bit'", ")", ":", "return", "'x86'", "else", ":", "return", "'amd64'" ]
detect processor architecture for assembly manifest .
train
false
45,352
def schedule_tricks(observer, tricks, pathname, recursive): for trick in tricks: for (name, value) in list(trick.items()): TrickClass = load_class(name) handler = TrickClass(**value) trick_pathname = (getattr(handler, 'source_directory', None) or pathname) observer.schedule(handler, trick_pathname, recursive)
[ "def", "schedule_tricks", "(", "observer", ",", "tricks", ",", "pathname", ",", "recursive", ")", ":", "for", "trick", "in", "tricks", ":", "for", "(", "name", ",", "value", ")", "in", "list", "(", "trick", ".", "items", "(", ")", ")", ":", "TrickClass", "=", "load_class", "(", "name", ")", "handler", "=", "TrickClass", "(", "**", "value", ")", "trick_pathname", "=", "(", "getattr", "(", "handler", ",", "'source_directory'", ",", "None", ")", "or", "pathname", ")", "observer", ".", "schedule", "(", "handler", ",", "trick_pathname", ",", "recursive", ")" ]
schedules tricks with the specified observer and for the given watch path .
train
false
45,353
def should_profile(): if os.environ['SERVER_SOFTWARE'].startswith('Devel'): return _config.should_profile_development() else: return _config.should_profile_production()
[ "def", "should_profile", "(", ")", ":", "if", "os", ".", "environ", "[", "'SERVER_SOFTWARE'", "]", ".", "startswith", "(", "'Devel'", ")", ":", "return", "_config", ".", "should_profile_development", "(", ")", "else", ":", "return", "_config", ".", "should_profile_production", "(", ")" ]
returns true if the current request should be profiles .
train
false
45,355
def parse_fuzzy_item(source, constraints): saved_pos = source.pos try: parse_cost_constraint(source, constraints) except ParseError: source.pos = saved_pos parse_cost_equation(source, constraints)
[ "def", "parse_fuzzy_item", "(", "source", ",", "constraints", ")", ":", "saved_pos", "=", "source", ".", "pos", "try", ":", "parse_cost_constraint", "(", "source", ",", "constraints", ")", "except", "ParseError", ":", "source", ".", "pos", "=", "saved_pos", "parse_cost_equation", "(", "source", ",", "constraints", ")" ]
parses a fuzzy setting item .
train
false
45,356
def _parse_date_hungarian(dateString): m = _hungarian_date_format_re.match(dateString) if (not m): return try: month = _hungarian_months[m.group(2)] day = m.group(3) if (len(day) == 1): day = ('0' + day) hour = m.group(4) if (len(hour) == 1): hour = ('0' + hour) except: return w3dtfdate = ('%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s%(zonediff)s' % {'year': m.group(1), 'month': month, 'day': day, 'hour': hour, 'minute': m.group(5), 'zonediff': m.group(6)}) if _debug: sys.stderr.write(('Hungarian date parsed as: %s\n' % w3dtfdate)) return _parse_date_w3dtf(w3dtfdate)
[ "def", "_parse_date_hungarian", "(", "dateString", ")", ":", "m", "=", "_hungarian_date_format_re", ".", "match", "(", "dateString", ")", "if", "(", "not", "m", ")", ":", "return", "try", ":", "month", "=", "_hungarian_months", "[", "m", ".", "group", "(", "2", ")", "]", "day", "=", "m", ".", "group", "(", "3", ")", "if", "(", "len", "(", "day", ")", "==", "1", ")", ":", "day", "=", "(", "'0'", "+", "day", ")", "hour", "=", "m", ".", "group", "(", "4", ")", "if", "(", "len", "(", "hour", ")", "==", "1", ")", ":", "hour", "=", "(", "'0'", "+", "hour", ")", "except", ":", "return", "w3dtfdate", "=", "(", "'%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s%(zonediff)s'", "%", "{", "'year'", ":", "m", ".", "group", "(", "1", ")", ",", "'month'", ":", "month", ",", "'day'", ":", "day", ",", "'hour'", ":", "hour", ",", "'minute'", ":", "m", ".", "group", "(", "5", ")", ",", "'zonediff'", ":", "m", ".", "group", "(", "6", ")", "}", ")", "if", "_debug", ":", "sys", ".", "stderr", ".", "write", "(", "(", "'Hungarian date parsed as: %s\\n'", "%", "w3dtfdate", ")", ")", "return", "_parse_date_w3dtf", "(", "w3dtfdate", ")" ]
parse a string according to a hungarian 8-bit date format .
train
false
45,358
def _do_ros(df, observations, censorship, transform_in, transform_out): cohn = cohn_numbers(df, observations=observations, censorship=censorship) modeled = _ros_sort(df, observations=observations, censorship=censorship) modeled.loc[:, 'det_limit_index'] = modeled[observations].apply(_detection_limit_index, args=(cohn,)) modeled.loc[:, 'rank'] = _ros_group_rank(modeled, 'det_limit_index', censorship) modeled.loc[:, 'plot_pos'] = plotting_positions(modeled, censorship, cohn) modeled.loc[:, 'Zprelim'] = stats.norm.ppf(modeled['plot_pos']) return _impute(modeled, observations, censorship, transform_in, transform_out)
[ "def", "_do_ros", "(", "df", ",", "observations", ",", "censorship", ",", "transform_in", ",", "transform_out", ")", ":", "cohn", "=", "cohn_numbers", "(", "df", ",", "observations", "=", "observations", ",", "censorship", "=", "censorship", ")", "modeled", "=", "_ros_sort", "(", "df", ",", "observations", "=", "observations", ",", "censorship", "=", "censorship", ")", "modeled", ".", "loc", "[", ":", ",", "'det_limit_index'", "]", "=", "modeled", "[", "observations", "]", ".", "apply", "(", "_detection_limit_index", ",", "args", "=", "(", "cohn", ",", ")", ")", "modeled", ".", "loc", "[", ":", ",", "'rank'", "]", "=", "_ros_group_rank", "(", "modeled", ",", "'det_limit_index'", ",", "censorship", ")", "modeled", ".", "loc", "[", ":", ",", "'plot_pos'", "]", "=", "plotting_positions", "(", "modeled", ",", "censorship", ",", "cohn", ")", "modeled", ".", "loc", "[", ":", ",", "'Zprelim'", "]", "=", "stats", ".", "norm", ".", "ppf", "(", "modeled", "[", "'plot_pos'", "]", ")", "return", "_impute", "(", "modeled", ",", "observations", ",", "censorship", ",", "transform_in", ",", "transform_out", ")" ]
dataframe-centric function to impute censored valies with ros .
train
false
45,362
def alphanumeric_key(s): k = [(int(c) if c.isdigit() else c) for c in re.split('([0-9]+)', s)] return k
[ "def", "alphanumeric_key", "(", "s", ")", ":", "k", "=", "[", "(", "int", "(", "c", ")", "if", "c", ".", "isdigit", "(", ")", "else", "c", ")", "for", "c", "in", "re", ".", "split", "(", "'([0-9]+)'", ",", "s", ")", "]", "return", "k" ]
convert string to list of strings and ints that gives intuitive sorting .
train
false
45,364
def xldate_as_datetime(xldate, datemode): if datemode: epoch = epoch_1904 elif (xldate < 60): epoch = epoch_1900 else: epoch = epoch_1900_minus_1 days = int(xldate) fraction = (xldate - days) seconds = int(round((fraction * 86400000.0))) (seconds, milliseconds) = divmod(seconds, 1000) return (epoch + datetime.timedelta(days, seconds, 0, milliseconds))
[ "def", "xldate_as_datetime", "(", "xldate", ",", "datemode", ")", ":", "if", "datemode", ":", "epoch", "=", "epoch_1904", "elif", "(", "xldate", "<", "60", ")", ":", "epoch", "=", "epoch_1900", "else", ":", "epoch", "=", "epoch_1900_minus_1", "days", "=", "int", "(", "xldate", ")", "fraction", "=", "(", "xldate", "-", "days", ")", "seconds", "=", "int", "(", "round", "(", "(", "fraction", "*", "86400000.0", ")", ")", ")", "(", "seconds", ",", "milliseconds", ")", "=", "divmod", "(", "seconds", ",", "1000", ")", "return", "(", "epoch", "+", "datetime", ".", "timedelta", "(", "days", ",", "seconds", ",", "0", ",", "milliseconds", ")", ")" ]
convert an excel date/time number into a :class:datetime .
train
false
45,365
def is_platform_file(path): if ((not os.path.exists(path)) or os.path.islink(path)): return False with open(path, 'rb') as fileobj: bytes = fileobj.read(MAGIC_LEN) if (bytes == FAT_MAGIC_BYTES): fileobj.seek(0) header = mach_o.fat_header.from_fileobj(fileobj, _endian_='>') if (header.nfat_arch < 1): return False arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_='>') fileobj.seek(arch.offset) bytes = fileobj.read(MAGIC_LEN) for magic in MAGIC: if (bytes == magic): return True return False
[ "def", "is_platform_file", "(", "path", ")", ":", "if", "(", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", "or", "os", ".", "path", ".", "islink", "(", "path", ")", ")", ":", "return", "False", "with", "open", "(", "path", ",", "'rb'", ")", "as", "fileobj", ":", "bytes", "=", "fileobj", ".", "read", "(", "MAGIC_LEN", ")", "if", "(", "bytes", "==", "FAT_MAGIC_BYTES", ")", ":", "fileobj", ".", "seek", "(", "0", ")", "header", "=", "mach_o", ".", "fat_header", ".", "from_fileobj", "(", "fileobj", ",", "_endian_", "=", "'>'", ")", "if", "(", "header", ".", "nfat_arch", "<", "1", ")", ":", "return", "False", "arch", "=", "mach_o", ".", "fat_arch", ".", "from_fileobj", "(", "fileobj", ",", "_endian_", "=", "'>'", ")", "fileobj", ".", "seek", "(", "arch", ".", "offset", ")", "bytes", "=", "fileobj", ".", "read", "(", "MAGIC_LEN", ")", "for", "magic", "in", "MAGIC", ":", "if", "(", "bytes", "==", "magic", ")", ":", "return", "True", "return", "False" ]
return true if the file is mach-o .
train
true
45,366
def _send_inventories(response, resource_provider, inventories): response.status = 200 response.body = encodeutils.to_utf8(jsonutils.dumps(_serialize_inventories(inventories, resource_provider.generation))) response.content_type = 'application/json' return response
[ "def", "_send_inventories", "(", "response", ",", "resource_provider", ",", "inventories", ")", ":", "response", ".", "status", "=", "200", "response", ".", "body", "=", "encodeutils", ".", "to_utf8", "(", "jsonutils", ".", "dumps", "(", "_serialize_inventories", "(", "inventories", ",", "resource_provider", ".", "generation", ")", ")", ")", "response", ".", "content_type", "=", "'application/json'", "return", "response" ]
send a json representation of a list of inventories .
train
false
45,367
def role_exists(role, **kwargs): return (len(tsql_query(query='sp_helprole "{0}"'.format(role), as_dict=True, **kwargs)) == 1)
[ "def", "role_exists", "(", "role", ",", "**", "kwargs", ")", ":", "return", "(", "len", "(", "tsql_query", "(", "query", "=", "'sp_helprole \"{0}\"'", ".", "format", "(", "role", ")", ",", "as_dict", "=", "True", ",", "**", "kwargs", ")", ")", "==", "1", ")" ]
checks if a role exists .
train
true
45,368
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
45,369
def logout_then_login(request, login_url=None, extra_context=_sentinel): if (extra_context is not _sentinel): warnings.warn('The unused `extra_context` parameter to `logout_then_login` is deprecated.', RemovedInDjango21Warning) if (not login_url): login_url = settings.LOGIN_URL login_url = resolve_url(login_url) return LogoutView.as_view(next_page=login_url)(request)
[ "def", "logout_then_login", "(", "request", ",", "login_url", "=", "None", ",", "extra_context", "=", "_sentinel", ")", ":", "if", "(", "extra_context", "is", "not", "_sentinel", ")", ":", "warnings", ".", "warn", "(", "'The unused `extra_context` parameter to `logout_then_login` is deprecated.'", ",", "RemovedInDjango21Warning", ")", "if", "(", "not", "login_url", ")", ":", "login_url", "=", "settings", ".", "LOGIN_URL", "login_url", "=", "resolve_url", "(", "login_url", ")", "return", "LogoutView", ".", "as_view", "(", "next_page", "=", "login_url", ")", "(", "request", ")" ]
logs out the user if he is logged in .
train
false
45,370
def handler_for_name(fq_name): resolved_name = for_name(fq_name) if isinstance(resolved_name, (type, types.ClassType)): return resolved_name() elif isinstance(resolved_name, types.MethodType): return getattr(resolved_name.im_class(), resolved_name.__name__) else: return resolved_name
[ "def", "handler_for_name", "(", "fq_name", ")", ":", "resolved_name", "=", "for_name", "(", "fq_name", ")", "if", "isinstance", "(", "resolved_name", ",", "(", "type", ",", "types", ".", "ClassType", ")", ")", ":", "return", "resolved_name", "(", ")", "elif", "isinstance", "(", "resolved_name", ",", "types", ".", "MethodType", ")", ":", "return", "getattr", "(", "resolved_name", ".", "im_class", "(", ")", ",", "resolved_name", ".", "__name__", ")", "else", ":", "return", "resolved_name" ]
resolves and instantiates handler by fully qualified name .
train
true
45,371
def removedirs(name): rmdir(name) (head, tail) = path.split(name) if (not tail): (head, tail) = path.split(head) while (head and tail): try: rmdir(head) except error: break (head, tail) = path.split(head)
[ "def", "removedirs", "(", "name", ")", ":", "rmdir", "(", "name", ")", "(", "head", ",", "tail", ")", "=", "path", ".", "split", "(", "name", ")", "if", "(", "not", "tail", ")", ":", "(", "head", ",", "tail", ")", "=", "path", ".", "split", "(", "head", ")", "while", "(", "head", "and", "tail", ")", ":", "try", ":", "rmdir", "(", "head", ")", "except", "error", ":", "break", "(", "head", ",", "tail", ")", "=", "path", ".", "split", "(", "head", ")" ]
removedirs super-rmdir; remove a leaf directory and all empty intermediate ones .
train
false
45,372
def _to_utf8(value): if isinstance(value, str): return value return value.encode('utf-8')
[ "def", "_to_utf8", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "str", ")", ":", "return", "value", "return", "value", ".", "encode", "(", "'utf-8'", ")" ]
encodes a unicode value to utf-8 if not yet encoded .
train
false
45,374
@secure_required def signout(request, next_page=userena_settings.USERENA_REDIRECT_ON_SIGNOUT, template_name='userena/signout.html', *args, **kwargs): if (request.user.is_authenticated() and userena_settings.USERENA_USE_MESSAGES): messages.success(request, _('You have been signed out.'), fail_silently=True) userena_signals.account_signout.send(sender=None, user=request.user) return Signout(request, next_page, template_name, *args, **kwargs)
[ "@", "secure_required", "def", "signout", "(", "request", ",", "next_page", "=", "userena_settings", ".", "USERENA_REDIRECT_ON_SIGNOUT", ",", "template_name", "=", "'userena/signout.html'", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "request", ".", "user", ".", "is_authenticated", "(", ")", "and", "userena_settings", ".", "USERENA_USE_MESSAGES", ")", ":", "messages", ".", "success", "(", "request", ",", "_", "(", "'You have been signed out.'", ")", ",", "fail_silently", "=", "True", ")", "userena_signals", ".", "account_signout", ".", "send", "(", "sender", "=", "None", ",", "user", "=", "request", ".", "user", ")", "return", "Signout", "(", "request", ",", "next_page", ",", "template_name", ",", "*", "args", ",", "**", "kwargs", ")" ]
sign out .
train
true
45,375
def header_table_size(table): return sum((((32 + len(name)) + len(value)) for (name, value) in table))
[ "def", "header_table_size", "(", "table", ")", ":", "return", "sum", "(", "(", "(", "(", "32", "+", "len", "(", "name", ")", ")", "+", "len", "(", "value", ")", ")", "for", "(", "name", ",", "value", ")", "in", "table", ")", ")" ]
calculates the size of the header table as defined by the http/2 specification .
train
false
45,376
def any_unfulfilled_milestones(course_id, user_id): if (not settings.FEATURES.get('MILESTONES_APP')): return False return bool(get_course_milestones_fulfillment_paths(course_id, {'id': user_id}))
[ "def", "any_unfulfilled_milestones", "(", "course_id", ",", "user_id", ")", ":", "if", "(", "not", "settings", ".", "FEATURES", ".", "get", "(", "'MILESTONES_APP'", ")", ")", ":", "return", "False", "return", "bool", "(", "get_course_milestones_fulfillment_paths", "(", "course_id", ",", "{", "'id'", ":", "user_id", "}", ")", ")" ]
returns a boolean if user has any unfulfilled milestones .
train
false
45,377
def getLargestCenterOutsetLoopFromLoopRegardless(loop, radius): global globalDecreasingRadiusMultipliers for decreasingRadiusMultiplier in globalDecreasingRadiusMultipliers: decreasingRadius = (radius * decreasingRadiusMultiplier) largestCenterOutsetLoop = getLargestCenterOutsetLoopFromLoop(loop, decreasingRadius) if (largestCenterOutsetLoop != None): return largestCenterOutsetLoop return CenterOutset(loop, loop)
[ "def", "getLargestCenterOutsetLoopFromLoopRegardless", "(", "loop", ",", "radius", ")", ":", "global", "globalDecreasingRadiusMultipliers", "for", "decreasingRadiusMultiplier", "in", "globalDecreasingRadiusMultipliers", ":", "decreasingRadius", "=", "(", "radius", "*", "decreasingRadiusMultiplier", ")", "largestCenterOutsetLoop", "=", "getLargestCenterOutsetLoopFromLoop", "(", "loop", ",", "decreasingRadius", ")", "if", "(", "largestCenterOutsetLoop", "!=", "None", ")", ":", "return", "largestCenterOutsetLoop", "return", "CenterOutset", "(", "loop", ",", "loop", ")" ]
get the largest circle outset loop from the loop .
train
false
45,378
def beta_from_targets(dataset, **kwargs): return beta_from_design(dataset.y, **kwargs)
[ "def", "beta_from_targets", "(", "dataset", ",", "**", "kwargs", ")", ":", "return", "beta_from_design", "(", "dataset", ".", "y", ",", "**", "kwargs", ")" ]
returns the marginal precision of the targets in a dataset .
train
false
45,379
def get_cluster_cpu_times(reactor, runner, nodes, inits, processes): return gather_deferreds(list((get_node_cpu_times(reactor, runner, node, init, processes) for (node, init) in zip(nodes, inits))))
[ "def", "get_cluster_cpu_times", "(", "reactor", ",", "runner", ",", "nodes", ",", "inits", ",", "processes", ")", ":", "return", "gather_deferreds", "(", "list", "(", "(", "get_node_cpu_times", "(", "reactor", ",", "runner", ",", "node", ",", "init", ",", "processes", ")", "for", "(", "node", ",", "init", ")", "in", "zip", "(", "nodes", ",", "inits", ")", ")", ")", ")" ]
get the cpu times for processes running on a cluster .
train
false
45,380
def delete_objects(context, model, **kwargs): with context.session.begin(subtransactions=True): db_objs = get_objects(context, model, **kwargs) for db_obj in db_objs: context.session.delete(db_obj) return len(db_objs)
[ "def", "delete_objects", "(", "context", ",", "model", ",", "**", "kwargs", ")", ":", "with", "context", ".", "session", ".", "begin", "(", "subtransactions", "=", "True", ")", ":", "db_objs", "=", "get_objects", "(", "context", ",", "model", ",", "**", "kwargs", ")", "for", "db_obj", "in", "db_objs", ":", "context", ".", "session", ".", "delete", "(", "db_obj", ")", "return", "len", "(", "db_objs", ")" ]
delete matching objects .
train
false
45,381
def summer(): rc(u'image', cmap=u'summer') im = gci() if (im is not None): im.set_cmap(cm.summer)
[ "def", "summer", "(", ")", ":", "rc", "(", "u'image'", ",", "cmap", "=", "u'summer'", ")", "im", "=", "gci", "(", ")", "if", "(", "im", "is", "not", "None", ")", ":", "im", ".", "set_cmap", "(", "cm", ".", "summer", ")" ]
set the default colormap to summer and apply to current image if any .
train
false
45,382
def test_find_number_8(): s = 'ryleh -14e7$$!$' r = find_number(s) assert (s[r[0]:r[1]] == '-14e7')
[ "def", "test_find_number_8", "(", ")", ":", "s", "=", "'ryleh -14e7$$!$'", "r", "=", "find_number", "(", "s", ")", "assert", "(", "s", "[", "r", "[", "0", "]", ":", "r", "[", "1", "]", "]", "==", "'-14e7'", ")" ]
tests that we find numbers with exponents and negative signs .
train
false
45,383
def _test_autocast_numpy(): assert (config.cast_policy == 'numpy') def ok(z): assert (tensor.constant(z).dtype == numpy.asarray(z).dtype) for x in (([(2 ** i) for i in xrange(63)] + [0, L(0), L(1), L(((2 ** 63) - 1))]) + [0.0, 1.0, 1.1, 1.5]): n_x = numpy.asarray(x) ok(x) ok((- x)) ok((x - 1)) ok(((- x) + 1)) ok(n_x)
[ "def", "_test_autocast_numpy", "(", ")", ":", "assert", "(", "config", ".", "cast_policy", "==", "'numpy'", ")", "def", "ok", "(", "z", ")", ":", "assert", "(", "tensor", ".", "constant", "(", "z", ")", ".", "dtype", "==", "numpy", ".", "asarray", "(", "z", ")", ".", "dtype", ")", "for", "x", "in", "(", "(", "[", "(", "2", "**", "i", ")", "for", "i", "in", "xrange", "(", "63", ")", "]", "+", "[", "0", ",", "L", "(", "0", ")", ",", "L", "(", "1", ")", ",", "L", "(", "(", "(", "2", "**", "63", ")", "-", "1", ")", ")", "]", ")", "+", "[", "0.0", ",", "1.0", ",", "1.1", ",", "1.5", "]", ")", ":", "n_x", "=", "numpy", ".", "asarray", "(", "x", ")", "ok", "(", "x", ")", "ok", "(", "(", "-", "x", ")", ")", "ok", "(", "(", "x", "-", "1", ")", ")", "ok", "(", "(", "(", "-", "x", ")", "+", "1", ")", ")", "ok", "(", "n_x", ")" ]
called from test_autocast .
train
false
45,384
def comment_was_posted(request): obj = None if request.GET.has_key('c'): (content_type_id, object_id) = request.GET['c'].split(':') try: content_type = ContentType.objects.get(pk=content_type_id) obj = content_type.get_object_for_this_type(pk=object_id) except ObjectDoesNotExist: pass return render_to_response('comments/posted.html', {'object': obj}, context_instance=RequestContext(request))
[ "def", "comment_was_posted", "(", "request", ")", ":", "obj", "=", "None", "if", "request", ".", "GET", ".", "has_key", "(", "'c'", ")", ":", "(", "content_type_id", ",", "object_id", ")", "=", "request", ".", "GET", "[", "'c'", "]", ".", "split", "(", "':'", ")", "try", ":", "content_type", "=", "ContentType", ".", "objects", ".", "get", "(", "pk", "=", "content_type_id", ")", "obj", "=", "content_type", ".", "get_object_for_this_type", "(", "pk", "=", "object_id", ")", "except", "ObjectDoesNotExist", ":", "pass", "return", "render_to_response", "(", "'comments/posted.html'", ",", "{", "'object'", ":", "obj", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")" ]
display "comment was posted" success page templates: comment_posted context: object the object the comment was posted on .
train
false
45,385
def deepvalues(mapping): values = vals_sorted_by_key(mapping) for obj in values: mapping = False try: obj.items except AttributeError: pass else: mapping = True for subobj in deepvalues(obj): (yield subobj) if (not mapping): (yield obj)
[ "def", "deepvalues", "(", "mapping", ")", ":", "values", "=", "vals_sorted_by_key", "(", "mapping", ")", "for", "obj", "in", "values", ":", "mapping", "=", "False", "try", ":", "obj", ".", "items", "except", "AttributeError", ":", "pass", "else", ":", "mapping", "=", "True", "for", "subobj", "in", "deepvalues", "(", "obj", ")", ":", "(", "yield", "subobj", ")", "if", "(", "not", "mapping", ")", ":", "(", "yield", "obj", ")" ]
iterates over nested mapping .
train
true
45,386
def format_docstring(owner_name, docstring, formatters): format_params = {} for (target, doc_for_target) in iteritems(formatters): regex = re.compile(((('^(\\s*)' + '({') + target) + '})$'), re.MULTILINE) matches = regex.findall(docstring) if (not matches): raise ValueError("Couldn't find template for parameter {!r} in docstring for {}.\nParameter name must be alone on a line surrounded by braces.".format(target, owner_name)) elif (len(matches) > 1): raise ValueError("Couldn't found multiple templates for parameter {!r}in docstring for {}.\nParameter should only appear once.".format(target, owner_name)) (leading_whitespace, _) = matches[0] format_params[target] = pad_lines_after_first(leading_whitespace, doc_for_target) return docstring.format(**format_params)
[ "def", "format_docstring", "(", "owner_name", ",", "docstring", ",", "formatters", ")", ":", "format_params", "=", "{", "}", "for", "(", "target", ",", "doc_for_target", ")", "in", "iteritems", "(", "formatters", ")", ":", "regex", "=", "re", ".", "compile", "(", "(", "(", "(", "'^(\\\\s*)'", "+", "'({'", ")", "+", "target", ")", "+", "'})$'", ")", ",", "re", ".", "MULTILINE", ")", "matches", "=", "regex", ".", "findall", "(", "docstring", ")", "if", "(", "not", "matches", ")", ":", "raise", "ValueError", "(", "\"Couldn't find template for parameter {!r} in docstring for {}.\\nParameter name must be alone on a line surrounded by braces.\"", ".", "format", "(", "target", ",", "owner_name", ")", ")", "elif", "(", "len", "(", "matches", ")", ">", "1", ")", ":", "raise", "ValueError", "(", "\"Couldn't found multiple templates for parameter {!r}in docstring for {}.\\nParameter should only appear once.\"", ".", "format", "(", "target", ",", "owner_name", ")", ")", "(", "leading_whitespace", ",", "_", ")", "=", "matches", "[", "0", "]", "format_params", "[", "target", "]", "=", "pad_lines_after_first", "(", "leading_whitespace", ",", "doc_for_target", ")", "return", "docstring", ".", "format", "(", "**", "format_params", ")" ]
template formatters into docstring .
train
true
45,387
def shorten_name(name, max_length): if (len(name) <= max_length): return name (q, r) = divmod((max_length - 3), 2) return ((name[:(q + r)] + '...') + name[(- q):])
[ "def", "shorten_name", "(", "name", ",", "max_length", ")", ":", "if", "(", "len", "(", "name", ")", "<=", "max_length", ")", ":", "return", "name", "(", "q", ",", "r", ")", "=", "divmod", "(", "(", "max_length", "-", "3", ")", ",", "2", ")", "return", "(", "(", "name", "[", ":", "(", "q", "+", "r", ")", "]", "+", "'...'", ")", "+", "name", "[", "(", "-", "q", ")", ":", "]", ")" ]
shortens a name to the given number of characters .
train
false
45,388
def _themes_queue(request, flagged=False, rereview=False): themes = _get_themes(request, request.user, flagged=flagged, rereview=rereview) ThemeReviewFormset = formset_factory(forms.ThemeReviewForm) formset = ThemeReviewFormset(initial=[{'theme': _rereview_to_theme(rereview, theme).id} for theme in themes]) return render(request, 'editors/themes/queue.html', context(**{'actions': get_actions_json(), 'formset': formset, 'flagged': flagged, 'reject_reasons': rvw.THEME_REJECT_REASONS, 'rereview': rereview, 'reviewable': True, 'theme_formsets': zip(themes, formset), 'theme_count': len(themes), 'tab': ('flagged' if flagged else ('rereview' if rereview else 'pending'))}))
[ "def", "_themes_queue", "(", "request", ",", "flagged", "=", "False", ",", "rereview", "=", "False", ")", ":", "themes", "=", "_get_themes", "(", "request", ",", "request", ".", "user", ",", "flagged", "=", "flagged", ",", "rereview", "=", "rereview", ")", "ThemeReviewFormset", "=", "formset_factory", "(", "forms", ".", "ThemeReviewForm", ")", "formset", "=", "ThemeReviewFormset", "(", "initial", "=", "[", "{", "'theme'", ":", "_rereview_to_theme", "(", "rereview", ",", "theme", ")", ".", "id", "}", "for", "theme", "in", "themes", "]", ")", "return", "render", "(", "request", ",", "'editors/themes/queue.html'", ",", "context", "(", "**", "{", "'actions'", ":", "get_actions_json", "(", ")", ",", "'formset'", ":", "formset", ",", "'flagged'", ":", "flagged", ",", "'reject_reasons'", ":", "rvw", ".", "THEME_REJECT_REASONS", ",", "'rereview'", ":", "rereview", ",", "'reviewable'", ":", "True", ",", "'theme_formsets'", ":", "zip", "(", "themes", ",", "formset", ")", ",", "'theme_count'", ":", "len", "(", "themes", ")", ",", "'tab'", ":", "(", "'flagged'", "if", "flagged", "else", "(", "'rereview'", "if", "rereview", "else", "'pending'", ")", ")", "}", ")", ")" ]
themes queue in interactive format .
train
false
45,389
def _is_type(t): return (lambda x: isinstance(x.value, t))
[ "def", "_is_type", "(", "t", ")", ":", "return", "(", "lambda", "x", ":", "isinstance", "(", "x", ".", "value", ",", "t", ")", ")" ]
factory for a type checking function of type t or tuple of types .
train
false
45,390
def hsplit(ary, indices_or_sections): if (ary.ndim == 0): raise ValueError('Cannot hsplit a zero-dimensional array') if (ary.ndim == 1): return split(ary, indices_or_sections, 0) else: return split(ary, indices_or_sections, 1)
[ "def", "hsplit", "(", "ary", ",", "indices_or_sections", ")", ":", "if", "(", "ary", ".", "ndim", "==", "0", ")", ":", "raise", "ValueError", "(", "'Cannot hsplit a zero-dimensional array'", ")", "if", "(", "ary", ".", "ndim", "==", "1", ")", ":", "return", "split", "(", "ary", ",", "indices_or_sections", ",", "0", ")", "else", ":", "return", "split", "(", "ary", ",", "indices_or_sections", ",", "1", ")" ]
splits an array into multiple sub arrays horizontally .
train
false
45,391
def get_account_id(region=None, key=None, keyid=None, profile=None): cache_key = 'boto_iam.account_id' if (cache_key not in __context__): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: ret = conn.get_user() arn = ret['get_user_response']['get_user_result']['user']['arn'] account_id = arn.split(':')[4] except boto.exception.BotoServerError: timeout = boto.config.getfloat('Boto', 'metadata_service_timeout', 1.0) attempts = boto.config.getint('Boto', 'metadata_service_num_attempts', 1) identity = boto.utils.get_instance_identity(timeout=timeout, num_retries=attempts) try: account_id = identity['document']['accountId'] except KeyError: log.error('Failed to get account id from instance_identity in boto_iam.get_account_id.') __context__[cache_key] = account_id return __context__[cache_key]
[ "def", "get_account_id", "(", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "cache_key", "=", "'boto_iam.account_id'", "if", "(", "cache_key", "not", "in", "__context__", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "ret", "=", "conn", ".", "get_user", "(", ")", "arn", "=", "ret", "[", "'get_user_response'", "]", "[", "'get_user_result'", "]", "[", "'user'", "]", "[", "'arn'", "]", "account_id", "=", "arn", ".", "split", "(", "':'", ")", "[", "4", "]", "except", "boto", ".", "exception", ".", "BotoServerError", ":", "timeout", "=", "boto", ".", "config", ".", "getfloat", "(", "'Boto'", ",", "'metadata_service_timeout'", ",", "1.0", ")", "attempts", "=", "boto", ".", "config", ".", "getint", "(", "'Boto'", ",", "'metadata_service_num_attempts'", ",", "1", ")", "identity", "=", "boto", ".", "utils", ".", "get_instance_identity", "(", "timeout", "=", "timeout", ",", "num_retries", "=", "attempts", ")", "try", ":", "account_id", "=", "identity", "[", "'document'", "]", "[", "'accountId'", "]", "except", "KeyError", ":", "log", ".", "error", "(", "'Failed to get account id from instance_identity in boto_iam.get_account_id.'", ")", "__context__", "[", "cache_key", "]", "=", "account_id", "return", "__context__", "[", "cache_key", "]" ]
retrieve the aws account id for the authenticated user or role .
train
true
45,393
@register.filter(name='dateformat') def do_dateformat(value, use_format='c'): try: use_format = get_format(use_format) except AttributeError: pass return dateformat.format(value, use_format)
[ "@", "register", ".", "filter", "(", "name", "=", "'dateformat'", ")", "def", "do_dateformat", "(", "value", ",", "use_format", "=", "'c'", ")", ":", "try", ":", "use_format", "=", "get_format", "(", "use_format", ")", "except", "AttributeError", ":", "pass", "return", "dateformat", ".", "format", "(", "value", ",", "use_format", ")" ]
formats a value date using format .
train
false
45,394
def stftfreq(wsize, sfreq=None): n_freq = ((wsize // 2) + 1) freqs = fftfreq(wsize) freqs = np.abs(freqs[:n_freq]) if (sfreq is not None): freqs *= float(sfreq) return freqs
[ "def", "stftfreq", "(", "wsize", ",", "sfreq", "=", "None", ")", ":", "n_freq", "=", "(", "(", "wsize", "//", "2", ")", "+", "1", ")", "freqs", "=", "fftfreq", "(", "wsize", ")", "freqs", "=", "np", ".", "abs", "(", "freqs", "[", ":", "n_freq", "]", ")", "if", "(", "sfreq", "is", "not", "None", ")", ":", "freqs", "*=", "float", "(", "sfreq", ")", "return", "freqs" ]
frequencies of stft transformation .
train
false
45,395
def _get_required_string(parsed, flags): (req_offset, required) = parsed.get_required_string(bool((flags & REVERSE))) if required: required.required = True if (req_offset >= UNLIMITED): req_offset = (-1) req_flags = required.case_flags if (not (flags & UNICODE)): req_flags &= (~ UNICODE) req_chars = required.folded_characters else: req_offset = 0 req_chars = () req_flags = 0 return (req_offset, req_chars, req_flags)
[ "def", "_get_required_string", "(", "parsed", ",", "flags", ")", ":", "(", "req_offset", ",", "required", ")", "=", "parsed", ".", "get_required_string", "(", "bool", "(", "(", "flags", "&", "REVERSE", ")", ")", ")", "if", "required", ":", "required", ".", "required", "=", "True", "if", "(", "req_offset", ">=", "UNLIMITED", ")", ":", "req_offset", "=", "(", "-", "1", ")", "req_flags", "=", "required", ".", "case_flags", "if", "(", "not", "(", "flags", "&", "UNICODE", ")", ")", ":", "req_flags", "&=", "(", "~", "UNICODE", ")", "req_chars", "=", "required", ".", "folded_characters", "else", ":", "req_offset", "=", "0", "req_chars", "=", "(", ")", "req_flags", "=", "0", "return", "(", "req_offset", ",", "req_chars", ",", "req_flags", ")" ]
gets the required string and related info of a parsed pattern .
train
false
45,396
def chi2_kernel(X, Y=None, gamma=1.0): K = additive_chi2_kernel(X, Y) K *= gamma return np.exp(K, K)
[ "def", "chi2_kernel", "(", "X", ",", "Y", "=", "None", ",", "gamma", "=", "1.0", ")", ":", "K", "=", "additive_chi2_kernel", "(", "X", ",", "Y", ")", "K", "*=", "gamma", "return", "np", ".", "exp", "(", "K", ",", "K", ")" ]
computes the exponential chi-squared kernel x and y .
train
false
45,398
def make_staging_area(sr_path): staging_path = tempfile.mkdtemp(dir=sr_path) return staging_path
[ "def", "make_staging_area", "(", "sr_path", ")", ":", "staging_path", "=", "tempfile", ".", "mkdtemp", "(", "dir", "=", "sr_path", ")", "return", "staging_path" ]
the staging area is a place where we can temporarily store and manipulate vhds .
train
false
45,399
def find_possible_tools_from_path(path, recursive=False, enable_beta_formats=False): possible_tool_files = [] for possible_tool_file in _find_tool_files(path, recursive=recursive, enable_beta_formats=enable_beta_formats): try: does_look_like_a_tool = looks_like_a_tool(possible_tool_file, enable_beta_formats=enable_beta_formats) except IOError: continue if does_look_like_a_tool: possible_tool_files.append(possible_tool_file) return possible_tool_files
[ "def", "find_possible_tools_from_path", "(", "path", ",", "recursive", "=", "False", ",", "enable_beta_formats", "=", "False", ")", ":", "possible_tool_files", "=", "[", "]", "for", "possible_tool_file", "in", "_find_tool_files", "(", "path", ",", "recursive", "=", "recursive", ",", "enable_beta_formats", "=", "enable_beta_formats", ")", ":", "try", ":", "does_look_like_a_tool", "=", "looks_like_a_tool", "(", "possible_tool_file", ",", "enable_beta_formats", "=", "enable_beta_formats", ")", "except", "IOError", ":", "continue", "if", "does_look_like_a_tool", ":", "possible_tool_files", ".", "append", "(", "possible_tool_file", ")", "return", "possible_tool_files" ]
walk a directory and find potential tool files .
train
false
45,402
def new_date(d): return date(d.year, d.month, d.day)
[ "def", "new_date", "(", "d", ")", ":", "return", "date", "(", "d", ".", "year", ",", "d", ".", "month", ",", "d", ".", "day", ")" ]
generate a safe date from a datetime .
train
false
45,403
def _get_full_customization_args(customization_args, ca_specs): for ca_spec in ca_specs: if (ca_spec.name not in customization_args): customization_args[ca_spec.name] = {'value': ca_spec.default_value} return customization_args
[ "def", "_get_full_customization_args", "(", "customization_args", ",", "ca_specs", ")", ":", "for", "ca_spec", "in", "ca_specs", ":", "if", "(", "ca_spec", ".", "name", "not", "in", "customization_args", ")", ":", "customization_args", "[", "ca_spec", ".", "name", "]", "=", "{", "'value'", ":", "ca_spec", ".", "default_value", "}", "return", "customization_args" ]
populates the given customization_args dict with default values if any of the expected customization_args are missing .
train
false
45,404
def uuid3(namespace, name): try: from hashlib import md5 except ImportError: from md5 import md5 hash = md5((namespace.bytes + name)).digest() return UUID(bytes=hash[:16], version=3)
[ "def", "uuid3", "(", "namespace", ",", "name", ")", ":", "try", ":", "from", "hashlib", "import", "md5", "except", "ImportError", ":", "from", "md5", "import", "md5", "hash", "=", "md5", "(", "(", "namespace", ".", "bytes", "+", "name", ")", ")", ".", "digest", "(", ")", "return", "UUID", "(", "bytes", "=", "hash", "[", ":", "16", "]", ",", "version", "=", "3", ")" ]
generate a uuid from the md5 hash of a namespace uuid and a name .
train
true
45,405
def make_cgitb_middleware(app, global_conf, display=NoDefault, logdir=None, context=5, format='html'): from paste.deploy.converters import asbool if (display is not NoDefault): display = asbool(display) if ('debug' in global_conf): global_conf['debug'] = asbool(global_conf['debug']) return CgitbMiddleware(app, global_conf=global_conf, display=display, logdir=logdir, context=context, format=format)
[ "def", "make_cgitb_middleware", "(", "app", ",", "global_conf", ",", "display", "=", "NoDefault", ",", "logdir", "=", "None", ",", "context", "=", "5", ",", "format", "=", "'html'", ")", ":", "from", "paste", ".", "deploy", ".", "converters", "import", "asbool", "if", "(", "display", "is", "not", "NoDefault", ")", ":", "display", "=", "asbool", "(", "display", ")", "if", "(", "'debug'", "in", "global_conf", ")", ":", "global_conf", "[", "'debug'", "]", "=", "asbool", "(", "global_conf", "[", "'debug'", "]", ")", "return", "CgitbMiddleware", "(", "app", ",", "global_conf", "=", "global_conf", ",", "display", "=", "display", ",", "logdir", "=", "logdir", ",", "context", "=", "context", ",", "format", "=", "format", ")" ]
wraps the application in the cgitb error catcher .
train
false
45,406
def nms(dets, thresh, force_cpu=False): if (dets.shape[0] == 0): return [] if (cfg.USE_GPU_NMS and (not force_cpu)): return gpu_nms(dets, thresh, device_id=cfg.GPU_ID) else: return cpu_nms(dets, thresh)
[ "def", "nms", "(", "dets", ",", "thresh", ",", "force_cpu", "=", "False", ")", ":", "if", "(", "dets", ".", "shape", "[", "0", "]", "==", "0", ")", ":", "return", "[", "]", "if", "(", "cfg", ".", "USE_GPU_NMS", "and", "(", "not", "force_cpu", ")", ")", ":", "return", "gpu_nms", "(", "dets", ",", "thresh", ",", "device_id", "=", "cfg", ".", "GPU_ID", ")", "else", ":", "return", "cpu_nms", "(", "dets", ",", "thresh", ")" ]
dispatch to either cpu or gpu nms implementations .
train
false
45,407
def set_cflags(value): return set_var('CFLAGS', value)
[ "def", "set_cflags", "(", "value", ")", ":", "return", "set_var", "(", "'CFLAGS'", ",", "value", ")" ]
set the cflags variable return a dict containing the new value for variable:: {<variable>: {old: <old-value> .
train
false
45,408
def getVector3IfNone(vector3): if (vector3 == None): return Vector3() return vector3
[ "def", "getVector3IfNone", "(", "vector3", ")", ":", "if", "(", "vector3", "==", "None", ")", ":", "return", "Vector3", "(", ")", "return", "vector3" ]
get new vector3 if the original vector3 is none .
train
false
45,409
def LOG_LEVEL(x): with context.local(log_level=x): context.defaults['log_level'] = context.log_level
[ "def", "LOG_LEVEL", "(", "x", ")", ":", "with", "context", ".", "local", "(", "log_level", "=", "x", ")", ":", "context", ".", "defaults", "[", "'log_level'", "]", "=", "context", ".", "log_level" ]
sets the logging verbosity used via context .
train
false
45,411
def test_find_number_7(): s = 'sdglk421.e6' r = find_number(s) assert (s[r[0]:r[1]] == '421.e6'), s[r[0]:r[1]]
[ "def", "test_find_number_7", "(", ")", ":", "s", "=", "'sdglk421.e6'", "r", "=", "find_number", "(", "s", ")", "assert", "(", "s", "[", "r", "[", "0", "]", ":", "r", "[", "1", "]", "]", "==", "'421.e6'", ")", ",", "s", "[", "r", "[", "0", "]", ":", "r", "[", "1", "]", "]" ]
tests that we find decimal numbers with exponents .
train
false