id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
49,379
def openssl_verify(cafile, certificatefile, **kwargs): command = ['openssl', 'verify', '-CAfile', cafile, certificatefile] try: result = run_process(command, **kwargs) return (result.output.strip() == '{}: OK'.format(certificatefile)) except CalledProcessError as e: result = run_process(['openssl', 'x509', '-text', '-in', cafile], **kwargs) cafile_info = result.output result = run_process(['openssl', 'x509', '-text', '-in', certificatefile], **kwargs) certificate_info = result.output error = str(e) error = ((((error + '\n') + cafile_info) + '\n') + certificate_info) Message.new(message_type='flocker.ca.functional:openssl_verify_error', error=error).write(Logger()) return False
[ "def", "openssl_verify", "(", "cafile", ",", "certificatefile", ",", "**", "kwargs", ")", ":", "command", "=", "[", "'openssl'", ",", "'verify'", ",", "'-CAfile'", ",", "cafile", ",", "certificatefile", "]", "try", ":", "result", "=", "run_process", "(", "command", ",", "**", "kwargs", ")", "return", "(", "result", ".", "output", ".", "strip", "(", ")", "==", "'{}: OK'", ".", "format", "(", "certificatefile", ")", ")", "except", "CalledProcessError", "as", "e", ":", "result", "=", "run_process", "(", "[", "'openssl'", ",", "'x509'", ",", "'-text'", ",", "'-in'", ",", "cafile", "]", ",", "**", "kwargs", ")", "cafile_info", "=", "result", ".", "output", "result", "=", "run_process", "(", "[", "'openssl'", ",", "'x509'", ",", "'-text'", ",", "'-in'", ",", "certificatefile", "]", ",", "**", "kwargs", ")", "certificate_info", "=", "result", ".", "output", "error", "=", "str", "(", "e", ")", "error", "=", "(", "(", "(", "(", "error", "+", "'\\n'", ")", "+", "cafile_info", ")", "+", "'\\n'", ")", "+", "certificate_info", ")", "Message", ".", "new", "(", "message_type", "=", "'flocker.ca.functional:openssl_verify_error'", ",", "error", "=", "error", ")", ".", "write", "(", "Logger", "(", ")", ")", "return", "False" ]
use openssl cli to verify a certificate was signed by a given certificate authority .
train
false
49,380
def validate_bitmap(value): if (value is None): return if (value.file.content_type not in ALLOWED_IMAGES): raise ValidationError((_('Not supported image type: %s') % value.file.content_type)) (width, height) = value.file.image.size if ((width > 2000) or (height > 2000)): raise ValidationError(_('Image is too big, please scale it down or crop relevant part!'))
[ "def", "validate_bitmap", "(", "value", ")", ":", "if", "(", "value", "is", "None", ")", ":", "return", "if", "(", "value", ".", "file", ".", "content_type", "not", "in", "ALLOWED_IMAGES", ")", ":", "raise", "ValidationError", "(", "(", "_", "(", "'Not supported image type: %s'", ")", "%", "value", ".", "file", ".", "content_type", ")", ")", "(", "width", ",", "height", ")", "=", "value", ".", "file", ".", "image", ".", "size", "if", "(", "(", "width", ">", "2000", ")", "or", "(", "height", ">", "2000", ")", ")", ":", "raise", "ValidationError", "(", "_", "(", "'Image is too big, please scale it down or crop relevant part!'", ")", ")" ]
validates bitmap .
train
false
49,381
def test_uniform_basic(): (yield (check_uniform_basic, False)) (yield (check_uniform_basic, False, True)) (yield (check_uniform_basic, True))
[ "def", "test_uniform_basic", "(", ")", ":", "(", "yield", "(", "check_uniform_basic", ",", "False", ")", ")", "(", "yield", "(", "check_uniform_basic", ",", "False", ",", "True", ")", ")", "(", "yield", "(", "check_uniform_basic", ",", "True", ")", ")" ]
run the tests for uniform with different settings for the shape tuple passed in .
train
false
49,382
@api_versions.wraps('2.26') @utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) @utils.arg('tags', metavar='<tags>', nargs='+', help=_('Tag(s) to set.')) def do_server_tag_set(cs, args): server = _find_server(cs, args.server) server.set_tags(args.tags)
[ "@", "api_versions", ".", "wraps", "(", "'2.26'", ")", "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "@", "utils", ".", "arg", "(", "'tags'", ",", "metavar", "=", "'<tags>'", ",", "nargs", "=", "'+'", ",", "help", "=", "_", "(", "'Tag(s) to set.'", ")", ")", "def", "do_server_tag_set", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "args", ".", "server", ")", "server", ".", "set_tags", "(", "args", ".", "tags", ")" ]
set list of tags to a server .
train
false
49,383
def makedirs_with_parent_perms(p): p = os.path.abspath(p) parent = os.path.dirname(p) if ((not os.path.exists(p)) and p and (parent != p)): makedirs_with_parent_perms(parent) s = os.stat(parent) os.mkdir(p) s2 = os.stat(p) if ((s.st_uid != s2.st_uid) or (s.st_gid != s2.st_gid)): os.chown(p, s.st_uid, s.st_gid) if (s.st_mode != s2.st_mode): os.chmod(p, s.st_mode)
[ "def", "makedirs_with_parent_perms", "(", "p", ")", ":", "p", "=", "os", ".", "path", ".", "abspath", "(", "p", ")", "parent", "=", "os", ".", "path", ".", "dirname", "(", "p", ")", "if", "(", "(", "not", "os", ".", "path", ".", "exists", "(", "p", ")", ")", "and", "p", "and", "(", "parent", "!=", "p", ")", ")", ":", "makedirs_with_parent_perms", "(", "parent", ")", "s", "=", "os", ".", "stat", "(", "parent", ")", "os", ".", "mkdir", "(", "p", ")", "s2", "=", "os", ".", "stat", "(", "p", ")", "if", "(", "(", "s", ".", "st_uid", "!=", "s2", ".", "st_uid", ")", "or", "(", "s", ".", "st_gid", "!=", "s2", ".", "st_gid", ")", ")", ":", "os", ".", "chown", "(", "p", ",", "s", ".", "st_uid", ",", "s", ".", "st_gid", ")", "if", "(", "s", ".", "st_mode", "!=", "s2", ".", "st_mode", ")", ":", "os", ".", "chmod", "(", "p", ",", "s", ".", "st_mode", ")" ]
create the directory using the permissions of the nearest parent directory .
train
false
49,384
def object_summary_load(self, *args, **kwargs): response = self.meta.client.head_object(Bucket=self.bucket_name, Key=self.key) if ('ContentLength' in response): response['Size'] = response.pop('ContentLength') self.meta.data = response
[ "def", "object_summary_load", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "response", "=", "self", ".", "meta", ".", "client", ".", "head_object", "(", "Bucket", "=", "self", ".", "bucket_name", ",", "Key", "=", "self", ".", "key", ")", "if", "(", "'ContentLength'", "in", "response", ")", ":", "response", "[", "'Size'", "]", "=", "response", ".", "pop", "(", "'ContentLength'", ")", "self", ".", "meta", ".", "data", "=", "response" ]
calls s3 .
train
false
49,386
@core_helper def flash_notice(message, allow_html=False): flash(message, category='alert-info', allow_html=allow_html)
[ "@", "core_helper", "def", "flash_notice", "(", "message", ",", "allow_html", "=", "False", ")", ":", "flash", "(", "message", ",", "category", "=", "'alert-info'", ",", "allow_html", "=", "allow_html", ")" ]
show a flash message of type notice .
train
false
49,387
def issueTicketAndKey(srvState): log.info('Issuing new session ticket and master key.') masterKey = mycrypto.strongRandom(const.MASTER_KEY_LENGTH) newTicket = SessionTicket(masterKey, srvState).issue() return (masterKey + newTicket)
[ "def", "issueTicketAndKey", "(", "srvState", ")", ":", "log", ".", "info", "(", "'Issuing new session ticket and master key.'", ")", "masterKey", "=", "mycrypto", ".", "strongRandom", "(", "const", ".", "MASTER_KEY_LENGTH", ")", "newTicket", "=", "SessionTicket", "(", "masterKey", ",", "srvState", ")", ".", "issue", "(", ")", "return", "(", "masterKey", "+", "newTicket", ")" ]
issue a new session ticket and append it to the according master key .
train
false
49,389
@pytest.mark.parametrize('js_enabled, expected', [(True, 2.0), (False, 2.0)]) def test_element_js_webkit(webview, js_enabled, expected): from PyQt5.QtWebKit import QWebSettings webview.settings().setAttribute(QWebSettings.JavascriptEnabled, js_enabled) elem = webview.page().mainFrame().documentElement() result = elem.evaluateJavaScript('1 + 1') assert (result == expected)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'js_enabled, expected'", ",", "[", "(", "True", ",", "2.0", ")", ",", "(", "False", ",", "2.0", ")", "]", ")", "def", "test_element_js_webkit", "(", "webview", ",", "js_enabled", ",", "expected", ")", ":", "from", "PyQt5", ".", "QtWebKit", "import", "QWebSettings", "webview", ".", "settings", "(", ")", ".", "setAttribute", "(", "QWebSettings", ".", "JavascriptEnabled", ",", "js_enabled", ")", "elem", "=", "webview", ".", "page", "(", ")", ".", "mainFrame", "(", ")", ".", "documentElement", "(", ")", "result", "=", "elem", ".", "evaluateJavaScript", "(", "'1 + 1'", ")", "assert", "(", "result", "==", "expected", ")" ]
with qtwebkit .
train
false
49,390
def tex_coords(top, bottom, side): top = tex_coord(*top) bottom = tex_coord(*bottom) side = tex_coord(*side) result = [] result.extend(top) result.extend(bottom) result.extend((side * 4)) return result
[ "def", "tex_coords", "(", "top", ",", "bottom", ",", "side", ")", ":", "top", "=", "tex_coord", "(", "*", "top", ")", "bottom", "=", "tex_coord", "(", "*", "bottom", ")", "side", "=", "tex_coord", "(", "*", "side", ")", "result", "=", "[", "]", "result", ".", "extend", "(", "top", ")", "result", ".", "extend", "(", "bottom", ")", "result", ".", "extend", "(", "(", "side", "*", "4", ")", ")", "return", "result" ]
return a list of the texture squares for the top .
train
false
49,391
@requires_application() def test_capabilities(): with Canvas() as c: capabilities = c.context.shared.parser.capabilities assert (capabilities['max_texture_size'] is not None) assert (capabilities['gl_version'] != 'unknown')
[ "@", "requires_application", "(", ")", "def", "test_capabilities", "(", ")", ":", "with", "Canvas", "(", ")", "as", "c", ":", "capabilities", "=", "c", ".", "context", ".", "shared", ".", "parser", ".", "capabilities", "assert", "(", "capabilities", "[", "'max_texture_size'", "]", "is", "not", "None", ")", "assert", "(", "capabilities", "[", "'gl_version'", "]", "!=", "'unknown'", ")" ]
test glir capability reporting .
train
false
49,392
def _data_to_xml(data): xml = '' for element in data: name = element[0] val = element[1] if (len(element) > 2): converter = element[2] else: converter = None if (val is not None): if (converter is not None): text = _str(converter(_str(val))) else: text = _str(val) xml += ''.join(['<', name, '>', text, '</', name, '>']) return xml
[ "def", "_data_to_xml", "(", "data", ")", ":", "xml", "=", "''", "for", "element", "in", "data", ":", "name", "=", "element", "[", "0", "]", "val", "=", "element", "[", "1", "]", "if", "(", "len", "(", "element", ")", ">", "2", ")", ":", "converter", "=", "element", "[", "2", "]", "else", ":", "converter", "=", "None", "if", "(", "val", "is", "not", "None", ")", ":", "if", "(", "converter", "is", "not", "None", ")", ":", "text", "=", "_str", "(", "converter", "(", "_str", "(", "val", ")", ")", ")", "else", ":", "text", "=", "_str", "(", "val", ")", "xml", "+=", "''", ".", "join", "(", "[", "'<'", ",", "name", ",", "'>'", ",", "text", ",", "'</'", ",", "name", ",", "'>'", "]", ")", "return", "xml" ]
creates an xml fragment from the specified data .
train
true
49,393
def test_context_taking(): def get_canvas(c): return c.shared.ref cb = DummyCanvasBackend() c = GLContext() assert (c.shared.name is None) assert_raises(RuntimeError, get_canvas, c) assert_in('None backend', repr(c.shared)) c.shared.add_ref('test-foo', cb) assert (c.shared.ref is cb) assert_in('test-foo backend', repr(c.shared)) c.shared.add_ref('test-foo', cb) assert (len(c.shared._refs) == 2) cb = DummyCanvasBackend() gc.collect() assert_raises(RuntimeError, get_canvas, c)
[ "def", "test_context_taking", "(", ")", ":", "def", "get_canvas", "(", "c", ")", ":", "return", "c", ".", "shared", ".", "ref", "cb", "=", "DummyCanvasBackend", "(", ")", "c", "=", "GLContext", "(", ")", "assert", "(", "c", ".", "shared", ".", "name", "is", "None", ")", "assert_raises", "(", "RuntimeError", ",", "get_canvas", ",", "c", ")", "assert_in", "(", "'None backend'", ",", "repr", "(", "c", ".", "shared", ")", ")", "c", ".", "shared", ".", "add_ref", "(", "'test-foo'", ",", "cb", ")", "assert", "(", "c", ".", "shared", ".", "ref", "is", "cb", ")", "assert_in", "(", "'test-foo backend'", ",", "repr", "(", "c", ".", "shared", ")", ")", "c", ".", "shared", ".", "add_ref", "(", "'test-foo'", ",", "cb", ")", "assert", "(", "len", "(", "c", ".", "shared", ".", "_refs", ")", "==", "2", ")", "cb", "=", "DummyCanvasBackend", "(", ")", "gc", ".", "collect", "(", ")", "assert_raises", "(", "RuntimeError", ",", "get_canvas", ",", "c", ")" ]
test glcontext ownership and taking .
train
false
49,394
def get_backend(): return os.environ.get('BACKEND_ID', None)
[ "def", "get_backend", "(", ")", ":", "return", "os", ".", "environ", ".", "get", "(", "'BACKEND_ID'", ",", "None", ")" ]
get the name of the backend handling this request .
train
false
49,395
def Summarize(live, firsts, others): mean = live.prglngth.mean() var = live.prglngth.var() std = live.prglngth.std() print('Live mean', mean) print('Live variance', var) print('Live std', std) mean1 = firsts.prglngth.mean() mean2 = others.prglngth.mean() var1 = firsts.prglngth.var() var2 = others.prglngth.var() print('Mean') print('First babies', mean1) print('Others', mean2) print('Variance') print('First babies', var1) print('Others', var2) print('Difference in weeks', (mean1 - mean2)) print('Difference in hours', (((mean1 - mean2) * 7) * 24)) print('Difference relative to 39 weeks', (((mean1 - mean2) / 39) * 100)) d = thinkstats2.CohenEffectSize(firsts.prglngth, others.prglngth) print('Cohen d', d)
[ "def", "Summarize", "(", "live", ",", "firsts", ",", "others", ")", ":", "mean", "=", "live", ".", "prglngth", ".", "mean", "(", ")", "var", "=", "live", ".", "prglngth", ".", "var", "(", ")", "std", "=", "live", ".", "prglngth", ".", "std", "(", ")", "print", "(", "'Live mean'", ",", "mean", ")", "print", "(", "'Live variance'", ",", "var", ")", "print", "(", "'Live std'", ",", "std", ")", "mean1", "=", "firsts", ".", "prglngth", ".", "mean", "(", ")", "mean2", "=", "others", ".", "prglngth", ".", "mean", "(", ")", "var1", "=", "firsts", ".", "prglngth", ".", "var", "(", ")", "var2", "=", "others", ".", "prglngth", ".", "var", "(", ")", "print", "(", "'Mean'", ")", "print", "(", "'First babies'", ",", "mean1", ")", "print", "(", "'Others'", ",", "mean2", ")", "print", "(", "'Variance'", ")", "print", "(", "'First babies'", ",", "var1", ")", "print", "(", "'Others'", ",", "var2", ")", "print", "(", "'Difference in weeks'", ",", "(", "mean1", "-", "mean2", ")", ")", "print", "(", "'Difference in hours'", ",", "(", "(", "(", "mean1", "-", "mean2", ")", "*", "7", ")", "*", "24", ")", ")", "print", "(", "'Difference relative to 39 weeks'", ",", "(", "(", "(", "mean1", "-", "mean2", ")", "/", "39", ")", "*", "100", ")", ")", "d", "=", "thinkstats2", ".", "CohenEffectSize", "(", "firsts", ".", "prglngth", ",", "others", ".", "prglngth", ")", "print", "(", "'Cohen d'", ",", "d", ")" ]
print various summary statistics .
train
false
49,396
def doc(phenny, input): name = input.group(1) name = name.lower() if phenny.doc.has_key(name): phenny.reply(phenny.doc[name][0]) if phenny.doc[name][1]: phenny.say(('e.g. ' + phenny.doc[name][1]))
[ "def", "doc", "(", "phenny", ",", "input", ")", ":", "name", "=", "input", ".", "group", "(", "1", ")", "name", "=", "name", ".", "lower", "(", ")", "if", "phenny", ".", "doc", ".", "has_key", "(", "name", ")", ":", "phenny", ".", "reply", "(", "phenny", ".", "doc", "[", "name", "]", "[", "0", "]", ")", "if", "phenny", ".", "doc", "[", "name", "]", "[", "1", "]", ":", "phenny", ".", "say", "(", "(", "'e.g. '", "+", "phenny", ".", "doc", "[", "name", "]", "[", "1", "]", ")", ")" ]
shows a commands documentation .
train
false
49,397
def alert_recipient(): s3.prep = (lambda r: ((r.method == 'options') and (r.representation == 's3json'))) return s3_rest_controller()
[ "def", "alert_recipient", "(", ")", ":", "s3", ".", "prep", "=", "(", "lambda", "r", ":", "(", "(", "r", ".", "method", "==", "'options'", ")", "and", "(", "r", ".", "representation", "==", "'s3json'", ")", ")", ")", "return", "s3_rest_controller", "(", ")" ]
restful crud controller for options .
train
false
49,398
def _setup_cmap(cmap, n_axes=1, norm=False): if (cmap == 'interactive'): cmap = (('Reds' if norm else 'RdBu_r'), True) elif (not isinstance(cmap, tuple)): if (cmap is None): cmap = ('Reds' if norm else 'RdBu_r') cmap = (cmap, (False if (n_axes > 2) else True)) return cmap
[ "def", "_setup_cmap", "(", "cmap", ",", "n_axes", "=", "1", ",", "norm", "=", "False", ")", ":", "if", "(", "cmap", "==", "'interactive'", ")", ":", "cmap", "=", "(", "(", "'Reds'", "if", "norm", "else", "'RdBu_r'", ")", ",", "True", ")", "elif", "(", "not", "isinstance", "(", "cmap", ",", "tuple", ")", ")", ":", "if", "(", "cmap", "is", "None", ")", ":", "cmap", "=", "(", "'Reds'", "if", "norm", "else", "'RdBu_r'", ")", "cmap", "=", "(", "cmap", ",", "(", "False", "if", "(", "n_axes", ">", "2", ")", "else", "True", ")", ")", "return", "cmap" ]
function for setting color map interactivity .
train
false
49,399
def zip(*args, **kwargs): args = [list(iterable) for iterable in args] n = max(map(len, args)) v = kwargs.get('default', None) return _zip(*[(i + ([v] * (n - len(i)))) for i in args])
[ "def", "zip", "(", "*", "args", ",", "**", "kwargs", ")", ":", "args", "=", "[", "list", "(", "iterable", ")", "for", "iterable", "in", "args", "]", "n", "=", "max", "(", "map", "(", "len", ",", "args", ")", ")", "v", "=", "kwargs", ".", "get", "(", "'default'", ",", "None", ")", "return", "_zip", "(", "*", "[", "(", "i", "+", "(", "[", "v", "]", "*", "(", "n", "-", "len", "(", "i", ")", ")", ")", ")", "for", "i", "in", "args", "]", ")" ]
returns a list of tuples .
train
true
49,400
@verbose def _band_pass_filter(ica, sources, target, l_freq, h_freq, verbose=None): if ((l_freq is not None) and (h_freq is not None)): logger.info('... filtering ICA sources') kw = dict(phase='zero-double', filter_length='10s', fir_window='hann', l_trans_bandwidth=0.5, h_trans_bandwidth=0.5) sources = filter_data(sources, ica.info['sfreq'], l_freq, h_freq, **kw) logger.info('... filtering target') target = filter_data(target, ica.info['sfreq'], l_freq, h_freq, **kw) elif ((l_freq is not None) or (h_freq is not None)): raise ValueError('Must specify both pass bands') return (sources, target)
[ "@", "verbose", "def", "_band_pass_filter", "(", "ica", ",", "sources", ",", "target", ",", "l_freq", ",", "h_freq", ",", "verbose", "=", "None", ")", ":", "if", "(", "(", "l_freq", "is", "not", "None", ")", "and", "(", "h_freq", "is", "not", "None", ")", ")", ":", "logger", ".", "info", "(", "'... filtering ICA sources'", ")", "kw", "=", "dict", "(", "phase", "=", "'zero-double'", ",", "filter_length", "=", "'10s'", ",", "fir_window", "=", "'hann'", ",", "l_trans_bandwidth", "=", "0.5", ",", "h_trans_bandwidth", "=", "0.5", ")", "sources", "=", "filter_data", "(", "sources", ",", "ica", ".", "info", "[", "'sfreq'", "]", ",", "l_freq", ",", "h_freq", ",", "**", "kw", ")", "logger", ".", "info", "(", "'... filtering target'", ")", "target", "=", "filter_data", "(", "target", ",", "ica", ".", "info", "[", "'sfreq'", "]", ",", "l_freq", ",", "h_freq", ",", "**", "kw", ")", "elif", "(", "(", "l_freq", "is", "not", "None", ")", "or", "(", "h_freq", "is", "not", "None", ")", ")", ":", "raise", "ValueError", "(", "'Must specify both pass bands'", ")", "return", "(", "sources", ",", "target", ")" ]
optionally band-pass filter the data .
train
false
49,401
def check_bar_match(old_bar, new_bar): tests = [] tests += ((new_bar['orientation'] == old_bar['orientation']),) tests += ((new_bar['facecolor'] == old_bar['facecolor']),) if (new_bar['orientation'] == 'v'): new_width = (new_bar['x1'] - new_bar['x0']) old_width = (old_bar['x1'] - old_bar['x0']) tests += (((new_width - old_width) < 1e-06),) tests += ((new_bar['y0'] == old_bar['y0']),) elif (new_bar['orientation'] == 'h'): new_height = (new_bar['y1'] - new_bar['y0']) old_height = (old_bar['y1'] - old_bar['y0']) tests += (((new_height - old_height) < 1e-06),) tests += ((new_bar['x0'] == old_bar['x0']),) if all(tests): return True else: return False
[ "def", "check_bar_match", "(", "old_bar", ",", "new_bar", ")", ":", "tests", "=", "[", "]", "tests", "+=", "(", "(", "new_bar", "[", "'orientation'", "]", "==", "old_bar", "[", "'orientation'", "]", ")", ",", ")", "tests", "+=", "(", "(", "new_bar", "[", "'facecolor'", "]", "==", "old_bar", "[", "'facecolor'", "]", ")", ",", ")", "if", "(", "new_bar", "[", "'orientation'", "]", "==", "'v'", ")", ":", "new_width", "=", "(", "new_bar", "[", "'x1'", "]", "-", "new_bar", "[", "'x0'", "]", ")", "old_width", "=", "(", "old_bar", "[", "'x1'", "]", "-", "old_bar", "[", "'x0'", "]", ")", "tests", "+=", "(", "(", "(", "new_width", "-", "old_width", ")", "<", "1e-06", ")", ",", ")", "tests", "+=", "(", "(", "new_bar", "[", "'y0'", "]", "==", "old_bar", "[", "'y0'", "]", ")", ",", ")", "elif", "(", "new_bar", "[", "'orientation'", "]", "==", "'h'", ")", ":", "new_height", "=", "(", "new_bar", "[", "'y1'", "]", "-", "new_bar", "[", "'y0'", "]", ")", "old_height", "=", "(", "old_bar", "[", "'y1'", "]", "-", "old_bar", "[", "'y0'", "]", ")", "tests", "+=", "(", "(", "(", "new_height", "-", "old_height", ")", "<", "1e-06", ")", ",", ")", "tests", "+=", "(", "(", "new_bar", "[", "'x0'", "]", "==", "old_bar", "[", "'x0'", "]", ")", ",", ")", "if", "all", "(", "tests", ")", ":", "return", "True", "else", ":", "return", "False" ]
check if two bars belong in the same collection .
train
false
49,402
def create_vbd(session, vm_ref, vdi_ref, userdevice, vbd_type='disk', read_only=False, bootable=False, osvol=False): vbd_rec = {} vbd_rec['VM'] = vm_ref vbd_rec['VDI'] = vdi_ref vbd_rec['userdevice'] = str(userdevice) vbd_rec['bootable'] = bootable vbd_rec['mode'] = ((read_only and 'RO') or 'RW') vbd_rec['type'] = vbd_type vbd_rec['unpluggable'] = True vbd_rec['empty'] = False vbd_rec['other_config'] = {} vbd_rec['qos_algorithm_type'] = '' vbd_rec['qos_algorithm_params'] = {} vbd_rec['qos_supported_algorithms'] = [] LOG.debug(_('Creating %(vbd_type)s-type VBD for VM %(vm_ref)s, VDI %(vdi_ref)s ... '), locals()) vbd_ref = session.call_xenapi('VBD.create', vbd_rec) LOG.debug(_('Created VBD %(vbd_ref)s for VM %(vm_ref)s, VDI %(vdi_ref)s.'), locals()) if osvol: session.call_xenapi('VBD.add_to_other_config', vbd_ref, 'osvol', 'True') return vbd_ref
[ "def", "create_vbd", "(", "session", ",", "vm_ref", ",", "vdi_ref", ",", "userdevice", ",", "vbd_type", "=", "'disk'", ",", "read_only", "=", "False", ",", "bootable", "=", "False", ",", "osvol", "=", "False", ")", ":", "vbd_rec", "=", "{", "}", "vbd_rec", "[", "'VM'", "]", "=", "vm_ref", "vbd_rec", "[", "'VDI'", "]", "=", "vdi_ref", "vbd_rec", "[", "'userdevice'", "]", "=", "str", "(", "userdevice", ")", "vbd_rec", "[", "'bootable'", "]", "=", "bootable", "vbd_rec", "[", "'mode'", "]", "=", "(", "(", "read_only", "and", "'RO'", ")", "or", "'RW'", ")", "vbd_rec", "[", "'type'", "]", "=", "vbd_type", "vbd_rec", "[", "'unpluggable'", "]", "=", "True", "vbd_rec", "[", "'empty'", "]", "=", "False", "vbd_rec", "[", "'other_config'", "]", "=", "{", "}", "vbd_rec", "[", "'qos_algorithm_type'", "]", "=", "''", "vbd_rec", "[", "'qos_algorithm_params'", "]", "=", "{", "}", "vbd_rec", "[", "'qos_supported_algorithms'", "]", "=", "[", "]", "LOG", ".", "debug", "(", "_", "(", "'Creating %(vbd_type)s-type VBD for VM %(vm_ref)s, VDI %(vdi_ref)s ... '", ")", ",", "locals", "(", ")", ")", "vbd_ref", "=", "session", ".", "call_xenapi", "(", "'VBD.create'", ",", "vbd_rec", ")", "LOG", ".", "debug", "(", "_", "(", "'Created VBD %(vbd_ref)s for VM %(vm_ref)s, VDI %(vdi_ref)s.'", ")", ",", "locals", "(", ")", ")", "if", "osvol", ":", "session", ".", "call_xenapi", "(", "'VBD.add_to_other_config'", ",", "vbd_ref", ",", "'osvol'", ",", "'True'", ")", "return", "vbd_ref" ]
create a vbd record and returns its reference .
train
false
49,404
def validate_extends_file_path(service_name, extends_options, filename): error_prefix = (u"Invalid 'extends' configuration for %s:" % service_name) if ((u'file' not in extends_options) and (filename is None)): raise ConfigurationError((u"%s you need to specify a 'file', e.g. 'file: something.yml'" % error_prefix))
[ "def", "validate_extends_file_path", "(", "service_name", ",", "extends_options", ",", "filename", ")", ":", "error_prefix", "=", "(", "u\"Invalid 'extends' configuration for %s:\"", "%", "service_name", ")", "if", "(", "(", "u'file'", "not", "in", "extends_options", ")", "and", "(", "filename", "is", "None", ")", ")", ":", "raise", "ConfigurationError", "(", "(", "u\"%s you need to specify a 'file', e.g. 'file: something.yml'\"", "%", "error_prefix", ")", ")" ]
the service to be extended must either be defined in the config key file .
train
false
49,405
def get_weekly_dashboard_stats(user_id): model = user_models.UserStatsModel.get(user_id, strict=False) if (model and model.weekly_creator_stats_list): return model.weekly_creator_stats_list else: return None
[ "def", "get_weekly_dashboard_stats", "(", "user_id", ")", ":", "model", "=", "user_models", ".", "UserStatsModel", ".", "get", "(", "user_id", ",", "strict", "=", "False", ")", "if", "(", "model", "and", "model", ".", "weekly_creator_stats_list", ")", ":", "return", "model", ".", "weekly_creator_stats_list", "else", ":", "return", "None" ]
gets weekly dashboard stats for a given user_id .
train
false
49,406
@register.assignment_tag def has_usable_review_ui(user, review_request, file_attachment): review_ui = file_attachment.review_ui try: return (review_ui and review_ui.is_enabled_for(user=user, review_request=review_request, file_attachment=file_attachment)) except Exception as e: logging.error(u'Error when calling is_enabled_for FileAttachmentReviewUI %r: %s', review_ui, e, exc_info=1) return False
[ "@", "register", ".", "assignment_tag", "def", "has_usable_review_ui", "(", "user", ",", "review_request", ",", "file_attachment", ")", ":", "review_ui", "=", "file_attachment", ".", "review_ui", "try", ":", "return", "(", "review_ui", "and", "review_ui", ".", "is_enabled_for", "(", "user", "=", "user", ",", "review_request", "=", "review_request", ",", "file_attachment", "=", "file_attachment", ")", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "u'Error when calling is_enabled_for FileAttachmentReviewUI %r: %s'", ",", "review_ui", ",", "e", ",", "exc_info", "=", "1", ")", "return", "False" ]
returns whether a review ui is set and can be used .
train
false
49,407
def all_pairs_matching_predicate(values, pred): return filter((lambda pair: pred(*pair)), product(values, repeat=2))
[ "def", "all_pairs_matching_predicate", "(", "values", ",", "pred", ")", ":", "return", "filter", "(", "(", "lambda", "pair", ":", "pred", "(", "*", "pair", ")", ")", ",", "product", "(", "values", ",", "repeat", "=", "2", ")", ")" ]
return an iterator of all pairs .
train
false
49,408
def has_parameter(param, params): return params.has_key(param)
[ "def", "has_parameter", "(", "param", ",", "params", ")", ":", "return", "params", ".", "has_key", "(", "param", ")" ]
checks whether the parameter param is present in the params map .
train
false
49,409
def _path(from_object, to_object): if (from_object._root != to_object._root): raise ValueError(((('No connecting path found between ' + str(from_object)) + ' and ') + str(to_object))) other_path = [] obj = to_object while (obj._parent is not None): other_path.append(obj) obj = obj._parent other_path.append(obj) object_set = set(other_path) from_path = [] obj = from_object while (obj not in object_set): from_path.append(obj) obj = obj._parent index = len(from_path) i = other_path.index(obj) while (i >= 0): from_path.append(other_path[i]) i -= 1 return (index, from_path)
[ "def", "_path", "(", "from_object", ",", "to_object", ")", ":", "if", "(", "from_object", ".", "_root", "!=", "to_object", ".", "_root", ")", ":", "raise", "ValueError", "(", "(", "(", "(", "'No connecting path found between '", "+", "str", "(", "from_object", ")", ")", "+", "' and '", ")", "+", "str", "(", "to_object", ")", ")", ")", "other_path", "=", "[", "]", "obj", "=", "to_object", "while", "(", "obj", ".", "_parent", "is", "not", "None", ")", ":", "other_path", ".", "append", "(", "obj", ")", "obj", "=", "obj", ".", "_parent", "other_path", ".", "append", "(", "obj", ")", "object_set", "=", "set", "(", "other_path", ")", "from_path", "=", "[", "]", "obj", "=", "from_object", "while", "(", "obj", "not", "in", "object_set", ")", ":", "from_path", ".", "append", "(", "obj", ")", "obj", "=", "obj", ".", "_parent", "index", "=", "len", "(", "from_path", ")", "i", "=", "other_path", ".", "index", "(", "obj", ")", "while", "(", "i", ">=", "0", ")", ":", "from_path", ".", "append", "(", "other_path", "[", "i", "]", ")", "i", "-=", "1", "return", "(", "index", ",", "from_path", ")" ]
calculates the path of objects starting from from_object to to_object .
train
false
49,410
def mk_body(**kwargs): return json.dumps(kwargs, ensure_ascii=False)
[ "def", "mk_body", "(", "**", "kwargs", ")", ":", "return", "json", ".", "dumps", "(", "kwargs", ",", "ensure_ascii", "=", "False", ")" ]
convenience function creates and dumps dictionary to string .
train
false
49,411
def secure_pad(buf): key = urandom(5) buf = (bytes([19, 51, 123, 238, 240]) + buf) buf = (buf + urandom((16 - (len(buf) % 16)))) enc = xor(buf, key) return enc
[ "def", "secure_pad", "(", "buf", ")", ":", "key", "=", "urandom", "(", "5", ")", "buf", "=", "(", "bytes", "(", "[", "19", ",", "51", ",", "123", ",", "238", ",", "240", "]", ")", "+", "buf", ")", "buf", "=", "(", "buf", "+", "urandom", "(", "(", "16", "-", "(", "len", "(", "buf", ")", "%", "16", ")", ")", ")", ")", "enc", "=", "xor", "(", "buf", ",", "key", ")", "return", "enc" ]
ensure message is padded to block size .
train
false
49,412
def s_checksum(block_name, algorithm='crc32', length=0, endian='<', name=None): if (block_name in blocks.CURRENT.block_stack): raise sex.SullyRuntimeError('CAN N0T ADD A CHECKSUM FOR A BLOCK CURRENTLY IN THE STACK') checksum = blocks.checksum(block_name, blocks.CURRENT, algorithm, length, endian, name) blocks.CURRENT.push(checksum)
[ "def", "s_checksum", "(", "block_name", ",", "algorithm", "=", "'crc32'", ",", "length", "=", "0", ",", "endian", "=", "'<'", ",", "name", "=", "None", ")", ":", "if", "(", "block_name", "in", "blocks", ".", "CURRENT", ".", "block_stack", ")", ":", "raise", "sex", ".", "SullyRuntimeError", "(", "'CAN N0T ADD A CHECKSUM FOR A BLOCK CURRENTLY IN THE STACK'", ")", "checksum", "=", "blocks", ".", "checksum", "(", "block_name", ",", "blocks", ".", "CURRENT", ",", "algorithm", ",", "length", ",", "endian", ",", "name", ")", "blocks", ".", "CURRENT", ".", "push", "(", "checksum", ")" ]
create a checksum block bound to the block with the specified name .
train
false
49,413
def transport_rheader(r, tabs=[]): if (r.representation != 'html'): return None settings = current.deployment_settings s3db = current.s3db (tablename, record) = s3_rheader_resource(r) table = s3db.table(tablename) rheader = None rheader_fields = [] if record: T = current.T if (tablename == 'transport_border_crossing'): if (not tabs): tabs = [(T('Details'), None), (T('Control Points'), 'border_control_point')] rheader_fields = [['name'], ['location_id']] else: if (not tabs): tabs = [(T('Details'), None)] rheader_fields = [['name'], ['location_id']] if settings.has_module('req'): tabs.extend(s3db.req_tabs(r)) if settings.has_module('inv'): tabs.extend(s3db.inv_tabs(r)) rheader = S3ResourceHeader(rheader_fields, tabs)(r, table=table, record=record) return rheader
[ "def", "transport_rheader", "(", "r", ",", "tabs", "=", "[", "]", ")", ":", "if", "(", "r", ".", "representation", "!=", "'html'", ")", ":", "return", "None", "settings", "=", "current", ".", "deployment_settings", "s3db", "=", "current", ".", "s3db", "(", "tablename", ",", "record", ")", "=", "s3_rheader_resource", "(", "r", ")", "table", "=", "s3db", ".", "table", "(", "tablename", ")", "rheader", "=", "None", "rheader_fields", "=", "[", "]", "if", "record", ":", "T", "=", "current", ".", "T", "if", "(", "tablename", "==", "'transport_border_crossing'", ")", ":", "if", "(", "not", "tabs", ")", ":", "tabs", "=", "[", "(", "T", "(", "'Details'", ")", ",", "None", ")", ",", "(", "T", "(", "'Control Points'", ")", ",", "'border_control_point'", ")", "]", "rheader_fields", "=", "[", "[", "'name'", "]", ",", "[", "'location_id'", "]", "]", "else", ":", "if", "(", "not", "tabs", ")", ":", "tabs", "=", "[", "(", "T", "(", "'Details'", ")", ",", "None", ")", "]", "rheader_fields", "=", "[", "[", "'name'", "]", ",", "[", "'location_id'", "]", "]", "if", "settings", ".", "has_module", "(", "'req'", ")", ":", "tabs", ".", "extend", "(", "s3db", ".", "req_tabs", "(", "r", ")", ")", "if", "settings", ".", "has_module", "(", "'inv'", ")", ":", "tabs", ".", "extend", "(", "s3db", ".", "inv_tabs", "(", "r", ")", ")", "rheader", "=", "S3ResourceHeader", "(", "rheader_fields", ",", "tabs", ")", "(", "r", ",", "table", "=", "table", ",", "record", "=", "record", ")", "return", "rheader" ]
transport module resource headers .
train
false
49,415
def get_unique_value(type, id): return {u'File': os.path.abspath(create_tempfile()), u'Boolean': True, u'Number': abs(hash(id)), u'String': id}[type]
[ "def", "get_unique_value", "(", "type", ",", "id", ")", ":", "return", "{", "u'File'", ":", "os", ".", "path", ".", "abspath", "(", "create_tempfile", "(", ")", ")", ",", "u'Boolean'", ":", "True", ",", "u'Number'", ":", "abs", "(", "hash", "(", "id", ")", ")", ",", "u'String'", ":", "id", "}", "[", "type", "]" ]
returns a unique value of type type .
train
false
49,416
def _check_maxshield(allow_maxshield): msg = 'This file contains raw Internal Active Shielding data. It may be distorted. Elekta recommends it be run through MaxFilter to produce reliable results. Consider closing the file and running MaxFilter on the data.' if allow_maxshield: if (not (isinstance(allow_maxshield, string_types) and (allow_maxshield == 'yes'))): warn(msg) allow_maxshield = 'yes' else: msg += ' Use allow_maxshield=True if you are sure you want to load the data despite this warning.' raise ValueError(msg)
[ "def", "_check_maxshield", "(", "allow_maxshield", ")", ":", "msg", "=", "'This file contains raw Internal Active Shielding data. It may be distorted. Elekta recommends it be run through MaxFilter to produce reliable results. Consider closing the file and running MaxFilter on the data.'", "if", "allow_maxshield", ":", "if", "(", "not", "(", "isinstance", "(", "allow_maxshield", ",", "string_types", ")", "and", "(", "allow_maxshield", "==", "'yes'", ")", ")", ")", ":", "warn", "(", "msg", ")", "allow_maxshield", "=", "'yes'", "else", ":", "msg", "+=", "' Use allow_maxshield=True if you are sure you want to load the data despite this warning.'", "raise", "ValueError", "(", "msg", ")" ]
warn or error about maxshield .
train
false
49,417
def response_redirect(redirect_url, script_redirect=False): if script_redirect: return ScriptRedirect(redirect_url) return HttpResponseRedirect(redirect_url)
[ "def", "response_redirect", "(", "redirect_url", ",", "script_redirect", "=", "False", ")", ":", "if", "script_redirect", ":", "return", "ScriptRedirect", "(", "redirect_url", ")", "return", "HttpResponseRedirect", "(", "redirect_url", ")" ]
abstract away canvas redirects .
train
false
49,418
def find_lang_postfix(project, filename): name = os.path.splitext(os.path.basename(filename))[0] if LANGCODE_RE.match(name): return project.lang_mapper.get_pootle_code(name) match = LANGCODE_POSTFIX_RE.match(name) if match: return project.lang_mapper.get_pootle_code(match.groups()[0]) for code in Language.objects.values_list('code', flat=True): code = project.lang_mapper.get_upstream_code(code) if (name.endswith(('-' + code)) or name.endswith(('_' + code)) or name.endswith(('.' + code)) or name.lower().endswith(('-' + code.lower())) or name.endswith(('_' + code)) or name.endswith(('.' + code))): return code
[ "def", "find_lang_postfix", "(", "project", ",", "filename", ")", ":", "name", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "filename", ")", ")", "[", "0", "]", "if", "LANGCODE_RE", ".", "match", "(", "name", ")", ":", "return", "project", ".", "lang_mapper", ".", "get_pootle_code", "(", "name", ")", "match", "=", "LANGCODE_POSTFIX_RE", ".", "match", "(", "name", ")", "if", "match", ":", "return", "project", ".", "lang_mapper", ".", "get_pootle_code", "(", "match", ".", "groups", "(", ")", "[", "0", "]", ")", "for", "code", "in", "Language", ".", "objects", ".", "values_list", "(", "'code'", ",", "flat", "=", "True", ")", ":", "code", "=", "project", ".", "lang_mapper", ".", "get_upstream_code", "(", "code", ")", "if", "(", "name", ".", "endswith", "(", "(", "'-'", "+", "code", ")", ")", "or", "name", ".", "endswith", "(", "(", "'_'", "+", "code", ")", ")", "or", "name", ".", "endswith", "(", "(", "'.'", "+", "code", ")", ")", "or", "name", ".", "lower", "(", ")", ".", "endswith", "(", "(", "'-'", "+", "code", ".", "lower", "(", ")", ")", ")", "or", "name", ".", "endswith", "(", "(", "'_'", "+", "code", ")", ")", "or", "name", ".", "endswith", "(", "(", "'.'", "+", "code", ")", ")", ")", ":", "return", "code" ]
finds the language code at end of a filename .
train
false
49,419
def apply_rebroadcast_opt(rval): changed = True while (changed and rval.owner): changed = False rval2 = theano.tensor.opt.local_useless_rebroadcast.transform(rval.owner) if rval2: assert (len(rval2) == 1) rval = rval2[0] changed = True if rval.owner: rval2 = theano.tensor.opt.local_rebroadcast_lift.transform(rval.owner) if rval2: assert (len(rval2) == 1) rval = rval2[0] changed = True return rval
[ "def", "apply_rebroadcast_opt", "(", "rval", ")", ":", "changed", "=", "True", "while", "(", "changed", "and", "rval", ".", "owner", ")", ":", "changed", "=", "False", "rval2", "=", "theano", ".", "tensor", ".", "opt", ".", "local_useless_rebroadcast", ".", "transform", "(", "rval", ".", "owner", ")", "if", "rval2", ":", "assert", "(", "len", "(", "rval2", ")", "==", "1", ")", "rval", "=", "rval2", "[", "0", "]", "changed", "=", "True", "if", "rval", ".", "owner", ":", "rval2", "=", "theano", ".", "tensor", ".", "opt", ".", "local_rebroadcast_lift", ".", "transform", "(", "rval", ".", "owner", ")", "if", "rval2", ":", "assert", "(", "len", "(", "rval2", ")", "==", "1", ")", "rval", "=", "rval2", "[", "0", "]", "changed", "=", "True", "return", "rval" ]
apply as many times as required the optimization local_useless_rebroadcast and local_rebroadcast_lift .
train
false
49,420
@gen.coroutine def LoadSystemUsers(client): @gen.coroutine def _LoadUser(user_dict): identity_key = ('Email:%s' % user_dict['email']) identity = (yield gen.Task(Identity.Query, client, identity_key, None, must_exist=False)) if ((identity is None) or (identity.user_id is None)): raise gen.Return(None) user = (yield gen.Task(User.Query, client, identity.user_id, None)) raise gen.Return(user) global NARRATOR_USER NARRATOR_USER = (yield _LoadUser(_NARRATOR_USER_DICT)) if (NARRATOR_USER is not None): (yield _SetWelcomeIds(NARRATOR_USER, NARRATOR_UPLOAD_PHOTOS)) (yield _SetWelcomeIds(NARRATOR_USER, NARRATOR_UPLOAD_PHOTOS_2)) (yield _SetWelcomeIds(NARRATOR_USER, NARRATOR_UPLOAD_PHOTOS_3))
[ "@", "gen", ".", "coroutine", "def", "LoadSystemUsers", "(", "client", ")", ":", "@", "gen", ".", "coroutine", "def", "_LoadUser", "(", "user_dict", ")", ":", "identity_key", "=", "(", "'Email:%s'", "%", "user_dict", "[", "'email'", "]", ")", "identity", "=", "(", "yield", "gen", ".", "Task", "(", "Identity", ".", "Query", ",", "client", ",", "identity_key", ",", "None", ",", "must_exist", "=", "False", ")", ")", "if", "(", "(", "identity", "is", "None", ")", "or", "(", "identity", ".", "user_id", "is", "None", ")", ")", ":", "raise", "gen", ".", "Return", "(", "None", ")", "user", "=", "(", "yield", "gen", ".", "Task", "(", "User", ".", "Query", ",", "client", ",", "identity", ".", "user_id", ",", "None", ")", ")", "raise", "gen", ".", "Return", "(", "user", ")", "global", "NARRATOR_USER", "NARRATOR_USER", "=", "(", "yield", "_LoadUser", "(", "_NARRATOR_USER_DICT", ")", ")", "if", "(", "NARRATOR_USER", "is", "not", "None", ")", ":", "(", "yield", "_SetWelcomeIds", "(", "NARRATOR_USER", ",", "NARRATOR_UPLOAD_PHOTOS", ")", ")", "(", "yield", "_SetWelcomeIds", "(", "NARRATOR_USER", ",", "NARRATOR_UPLOAD_PHOTOS_2", ")", ")", "(", "yield", "_SetWelcomeIds", "(", "NARRATOR_USER", ",", "NARRATOR_UPLOAD_PHOTOS_3", ")", ")" ]
loads all system users into memory before the server starts .
train
false
49,421
def iam_profile(vm_): return config.get_cloud_config_value('iam_profile', vm_, __opts__, search_global=False)
[ "def", "iam_profile", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'iam_profile'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")" ]
return the iam profile .
train
false
49,422
def _is_leader(locale, user): from kitsune.wiki.models import Locale try: locale_team = Locale.objects.get(locale=locale) except Locale.DoesNotExist: log.warning(('Locale not created for %s' % locale)) return False return (user in locale_team.leaders.all())
[ "def", "_is_leader", "(", "locale", ",", "user", ")", ":", "from", "kitsune", ".", "wiki", ".", "models", "import", "Locale", "try", ":", "locale_team", "=", "Locale", ".", "objects", ".", "get", "(", "locale", "=", "locale", ")", "except", "Locale", ".", "DoesNotExist", ":", "log", ".", "warning", "(", "(", "'Locale not created for %s'", "%", "locale", ")", ")", "return", "False", "return", "(", "user", "in", "locale_team", ".", "leaders", ".", "all", "(", ")", ")" ]
checks if the user is a leader for the given locale .
train
false
49,423
def with_scopes(credentials, scopes): if (HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials)): return google.auth.credentials.with_scopes_if_required(credentials, scopes) else: try: if credentials.create_scoped_required(): return credentials.create_scoped(scopes) else: return credentials except AttributeError: return credentials
[ "def", "with_scopes", "(", "credentials", ",", "scopes", ")", ":", "if", "(", "HAS_GOOGLE_AUTH", "and", "isinstance", "(", "credentials", ",", "google", ".", "auth", ".", "credentials", ".", "Credentials", ")", ")", ":", "return", "google", ".", "auth", ".", "credentials", ".", "with_scopes_if_required", "(", "credentials", ",", "scopes", ")", "else", ":", "try", ":", "if", "credentials", ".", "create_scoped_required", "(", ")", ":", "return", "credentials", ".", "create_scoped", "(", "scopes", ")", "else", ":", "return", "credentials", "except", "AttributeError", ":", "return", "credentials" ]
scopes the credentials if necessary .
train
false
49,425
@open_tab def extract_items(data, socket): (project, spider, sample) = (data['project'], data['spider'], data.get('sample')) if (not all((project, spider))): return {'type': 'raw'} c = ItemChecker(socket, project, spider, sample) (items, changes, changed_values, links) = c.extract() return {'links': links, 'items': items, 'changes': changes, 'changed': changed_values, 'type': ('js' if c.using_js else 'raw')}
[ "@", "open_tab", "def", "extract_items", "(", "data", ",", "socket", ")", ":", "(", "project", ",", "spider", ",", "sample", ")", "=", "(", "data", "[", "'project'", "]", ",", "data", "[", "'spider'", "]", ",", "data", ".", "get", "(", "'sample'", ")", ")", "if", "(", "not", "all", "(", "(", "project", ",", "spider", ")", ")", ")", ":", "return", "{", "'type'", ":", "'raw'", "}", "c", "=", "ItemChecker", "(", "socket", ",", "project", ",", "spider", ",", "sample", ")", "(", "items", ",", "changes", ",", "changed_values", ",", "links", ")", "=", "c", ".", "extract", "(", ")", "return", "{", "'links'", ":", "links", ",", "'items'", ":", "items", ",", "'changes'", ":", "changes", ",", "'changed'", ":", "changed_values", ",", "'type'", ":", "(", "'js'", "if", "c", ".", "using_js", "else", "'raw'", ")", "}" ]
use latest annotations to extract items from current page .
train
false
49,426
def test_ncr_fit_sample_with_indices(): ncr = NeighbourhoodCleaningRule(return_indices=True, random_state=RND_SEED) (X_resampled, y_resampled, idx_under) = ncr.fit_sample(X, Y) X_gt = np.array([[(-1.20809175), (-1.49917302)], [(-0.60497017), (-0.66630228)], [(-0.91735824), 0.93110278], [(-0.20413357), 0.64628718], [0.35967591, 2.61186964], [(-1.55581933), 1.09609604], [1.55157493, (-1.6981518)]]) y_gt = np.array([0, 0, 1, 1, 2, 1, 2]) idx_gt = np.array([10, 11, 3, 5, 7, 13, 14]) assert_array_equal(X_resampled, X_gt) assert_array_equal(y_resampled, y_gt) assert_array_equal(idx_under, idx_gt)
[ "def", "test_ncr_fit_sample_with_indices", "(", ")", ":", "ncr", "=", "NeighbourhoodCleaningRule", "(", "return_indices", "=", "True", ",", "random_state", "=", "RND_SEED", ")", "(", "X_resampled", ",", "y_resampled", ",", "idx_under", ")", "=", "ncr", ".", "fit_sample", "(", "X", ",", "Y", ")", "X_gt", "=", "np", ".", "array", "(", "[", "[", "(", "-", "1.20809175", ")", ",", "(", "-", "1.49917302", ")", "]", ",", "[", "(", "-", "0.60497017", ")", ",", "(", "-", "0.66630228", ")", "]", ",", "[", "(", "-", "0.91735824", ")", ",", "0.93110278", "]", ",", "[", "(", "-", "0.20413357", ")", ",", "0.64628718", "]", ",", "[", "0.35967591", ",", "2.61186964", "]", ",", "[", "(", "-", "1.55581933", ")", ",", "1.09609604", "]", ",", "[", "1.55157493", ",", "(", "-", "1.6981518", ")", "]", "]", ")", "y_gt", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "1", ",", "1", ",", "2", ",", "1", ",", "2", "]", ")", "idx_gt", "=", "np", ".", "array", "(", "[", "10", ",", "11", ",", "3", ",", "5", ",", "7", ",", "13", ",", "14", "]", ")", "assert_array_equal", "(", "X_resampled", ",", "X_gt", ")", "assert_array_equal", "(", "y_resampled", ",", "y_gt", ")", "assert_array_equal", "(", "idx_under", ",", "idx_gt", ")" ]
test the fit sample routine with indices support .
train
false
49,428
def map_sample_ids(sample_ids, sample_id_map): try: result = [sample_id_map[sample_id] for sample_id in sample_ids] except KeyError: raise KeyError(('Unknown sample ID: %s' % sample_id)) return result
[ "def", "map_sample_ids", "(", "sample_ids", ",", "sample_id_map", ")", ":", "try", ":", "result", "=", "[", "sample_id_map", "[", "sample_id", "]", "for", "sample_id", "in", "sample_ids", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "(", "'Unknown sample ID: %s'", "%", "sample_id", ")", ")", "return", "result" ]
map sample ids to new values in sample_id_map .
train
false
49,429
def classImplementsOnly(cls, *interfaces): spec = implementedBy(cls) spec.declared = () spec.inherit = None classImplements(cls, *interfaces)
[ "def", "classImplementsOnly", "(", "cls", ",", "*", "interfaces", ")", ":", "spec", "=", "implementedBy", "(", "cls", ")", "spec", ".", "declared", "=", "(", ")", "spec", ".", "inherit", "=", "None", "classImplements", "(", "cls", ",", "*", "interfaces", ")" ]
declare the only interfaces implemented by instances of a class the arguments after the class are one or more interfaces or interface specifications .
train
false
49,432
def dmp_ground_trunc(f, p, u, K): if (not u): return dup_trunc(f, p, K) v = (u - 1) return dmp_strip([dmp_ground_trunc(c, p, v, K) for c in f], u)
[ "def", "dmp_ground_trunc", "(", "f", ",", "p", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_trunc", "(", "f", ",", "p", ",", "K", ")", "v", "=", "(", "u", "-", "1", ")", "return", "dmp_strip", "(", "[", "dmp_ground_trunc", "(", "c", ",", "p", ",", "v", ",", "K", ")", "for", "c", "in", "f", "]", ",", "u", ")" ]
reduce a k[x] polynomial modulo a constant p in k .
train
false
49,433
def _Cfunction(name, flags, errcheck, *types): if (hasattr(dll, name) and (name in _Globals)): p = ctypes.CFUNCTYPE(*types) f = p((name, dll), flags) if (errcheck is not None): f.errcheck = errcheck if __debug__: _Cfunctions[name] = f else: _Globals[name] = f return f raise NameError(('no function %r' % (name,)))
[ "def", "_Cfunction", "(", "name", ",", "flags", ",", "errcheck", ",", "*", "types", ")", ":", "if", "(", "hasattr", "(", "dll", ",", "name", ")", "and", "(", "name", "in", "_Globals", ")", ")", ":", "p", "=", "ctypes", ".", "CFUNCTYPE", "(", "*", "types", ")", "f", "=", "p", "(", "(", "name", ",", "dll", ")", ",", "flags", ")", "if", "(", "errcheck", "is", "not", "None", ")", ":", "f", ".", "errcheck", "=", "errcheck", "if", "__debug__", ":", "_Cfunctions", "[", "name", "]", "=", "f", "else", ":", "_Globals", "[", "name", "]", "=", "f", "return", "f", "raise", "NameError", "(", "(", "'no function %r'", "%", "(", "name", ",", ")", ")", ")" ]
new ctypes function binding .
train
true
49,434
def _get_all_tags(conn, load_balancer_names=None): params = {} if load_balancer_names: conn.build_list_params(params, load_balancer_names, 'LoadBalancerNames.member.%d') tags = conn.get_object('DescribeTags', params, __utils__['boto_elb_tag.get_tag_descriptions'](), verb='POST') if tags[load_balancer_names]: return tags[load_balancer_names] else: return None
[ "def", "_get_all_tags", "(", "conn", ",", "load_balancer_names", "=", "None", ")", ":", "params", "=", "{", "}", "if", "load_balancer_names", ":", "conn", ".", "build_list_params", "(", "params", ",", "load_balancer_names", ",", "'LoadBalancerNames.member.%d'", ")", "tags", "=", "conn", ".", "get_object", "(", "'DescribeTags'", ",", "params", ",", "__utils__", "[", "'boto_elb_tag.get_tag_descriptions'", "]", "(", ")", ",", "verb", "=", "'POST'", ")", "if", "tags", "[", "load_balancer_names", "]", ":", "return", "tags", "[", "load_balancer_names", "]", "else", ":", "return", "None" ]
retrieve all the metadata tags associated with your elb(s) .
train
true
49,436
@login_required def edit_thread(request, document_slug, thread_id): doc = get_document(document_slug, request) thread = get_object_or_404(Thread, pk=thread_id, document=doc) perm = request.user.has_perm('kbforums.change_thread') if (not (perm or ((thread.creator == request.user) and (not thread.is_locked)))): raise PermissionDenied if (request.method == 'GET'): form = EditThreadForm(instance=thread) return render(request, 'kbforums/edit_thread.html', {'form': form, 'document': doc, 'thread': thread}) form = EditThreadForm(request.POST) if form.is_valid(): log.warning(('User %s is editing KB thread with id=%s' % (request.user, thread.id))) thread.title = form.cleaned_data['title'] thread.save() url = reverse('wiki.discuss.posts', args=[document_slug, thread_id]) return HttpResponseRedirect(url) return render(request, 'kbforums/edit_thread.html', {'form': form, 'document': doc, 'thread': thread})
[ "@", "login_required", "def", "edit_thread", "(", "request", ",", "document_slug", ",", "thread_id", ")", ":", "doc", "=", "get_document", "(", "document_slug", ",", "request", ")", "thread", "=", "get_object_or_404", "(", "Thread", ",", "pk", "=", "thread_id", ",", "document", "=", "doc", ")", "perm", "=", "request", ".", "user", ".", "has_perm", "(", "'kbforums.change_thread'", ")", "if", "(", "not", "(", "perm", "or", "(", "(", "thread", ".", "creator", "==", "request", ".", "user", ")", "and", "(", "not", "thread", ".", "is_locked", ")", ")", ")", ")", ":", "raise", "PermissionDenied", "if", "(", "request", ".", "method", "==", "'GET'", ")", ":", "form", "=", "EditThreadForm", "(", "instance", "=", "thread", ")", "return", "render", "(", "request", ",", "'kbforums/edit_thread.html'", ",", "{", "'form'", ":", "form", ",", "'document'", ":", "doc", ",", "'thread'", ":", "thread", "}", ")", "form", "=", "EditThreadForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "log", ".", "warning", "(", "(", "'User %s is editing KB thread with id=%s'", "%", "(", "request", ".", "user", ",", "thread", ".", "id", ")", ")", ")", "thread", ".", "title", "=", "form", ".", "cleaned_data", "[", "'title'", "]", "thread", ".", "save", "(", ")", "url", "=", "reverse", "(", "'wiki.discuss.posts'", ",", "args", "=", "[", "document_slug", ",", "thread_id", "]", ")", "return", "HttpResponseRedirect", "(", "url", ")", "return", "render", "(", "request", ",", "'kbforums/edit_thread.html'", ",", "{", "'form'", ":", "form", ",", "'document'", ":", "doc", ",", "'thread'", ":", "thread", "}", ")" ]
edit a thread .
train
false
49,437
def from_inet_ptoi(bgp_id): four_byte_id = None try: packed_byte = socket.inet_pton(socket.AF_INET, bgp_id) four_byte_id = int(packed_byte.encode('hex'), 16) except ValueError: LOG.debug('Invalid bgp id given for conversion to integer value %s', bgp_id) return four_byte_id
[ "def", "from_inet_ptoi", "(", "bgp_id", ")", ":", "four_byte_id", "=", "None", "try", ":", "packed_byte", "=", "socket", ".", "inet_pton", "(", "socket", ".", "AF_INET", ",", "bgp_id", ")", "four_byte_id", "=", "int", "(", "packed_byte", ".", "encode", "(", "'hex'", ")", ",", "16", ")", "except", "ValueError", ":", "LOG", ".", "debug", "(", "'Invalid bgp id given for conversion to integer value %s'", ",", "bgp_id", ")", "return", "four_byte_id" ]
convert an ipv4 address string format to a four byte long .
train
false
49,438
def CDL3INSIDE(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDL3INSIDE)
[ "def", "CDL3INSIDE", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDL3INSIDE", ")" ]
three inside up/down .
train
false
49,439
def formatSelector(lst): expr = [] for (name, sel) in iter(lst): if expr: expr.append('.') expr.append(name) if (sel is not None): expr.append(('[%s]' % repr(sel))) return ''.join(expr)
[ "def", "formatSelector", "(", "lst", ")", ":", "expr", "=", "[", "]", "for", "(", "name", ",", "sel", ")", "in", "iter", "(", "lst", ")", ":", "if", "expr", ":", "expr", ".", "append", "(", "'.'", ")", "expr", ".", "append", "(", "name", ")", "if", "(", "sel", "is", "not", "None", ")", ":", "expr", ".", "append", "(", "(", "'[%s]'", "%", "repr", "(", "sel", ")", ")", ")", "return", "''", ".", "join", "(", "expr", ")" ]
takes a list of tuples .
train
false
49,441
def handle_read_callback(notifier): print 'handle_read callback' notifier.loop.stop()
[ "def", "handle_read_callback", "(", "notifier", ")", ":", "print", "'handle_read callback'", "notifier", ".", "loop", ".", "stop", "(", ")" ]
just stop receiving io read events after the first iteration .
train
false
49,442
def unicodeencode(value, encoding=None): retVal = value if isinstance(value, unicode): try: retVal = value.encode((encoding or UNICODE_ENCODING)) except UnicodeEncodeError: retVal = value.encode(UNICODE_ENCODING, 'replace') return retVal
[ "def", "unicodeencode", "(", "value", ",", "encoding", "=", "None", ")", ":", "retVal", "=", "value", "if", "isinstance", "(", "value", ",", "unicode", ")", ":", "try", ":", "retVal", "=", "value", ".", "encode", "(", "(", "encoding", "or", "UNICODE_ENCODING", ")", ")", "except", "UnicodeEncodeError", ":", "retVal", "=", "value", ".", "encode", "(", "UNICODE_ENCODING", ",", "'replace'", ")", "return", "retVal" ]
returns 8-bit string representation of the supplied unicode value .
train
false
49,443
def split_for_transport(orig_pkt, transport_proto): header = orig_pkt.__class__(str(orig_pkt)) next_hdr = header.payload nh = None if (header.version == 4): nh = header.proto header.proto = transport_proto header.remove_payload() del header.chksum del header.len return (header, nh, next_hdr) else: found_rt_hdr = False prev = header while isinstance(next_hdr, (IPv6ExtHdrHopByHop, IPv6ExtHdrRouting, IPv6ExtHdrDestOpt)): if isinstance(next_hdr, IPv6ExtHdrHopByHop): pass if isinstance(next_hdr, IPv6ExtHdrRouting): found_rt_hdr = True elif (isinstance(next_hdr, IPv6ExtHdrDestOpt) and found_rt_hdr): break prev = next_hdr next_hdr = next_hdr.payload nh = prev.nh prev.nh = transport_proto prev.remove_payload() del header.plen return (header, nh, next_hdr)
[ "def", "split_for_transport", "(", "orig_pkt", ",", "transport_proto", ")", ":", "header", "=", "orig_pkt", ".", "__class__", "(", "str", "(", "orig_pkt", ")", ")", "next_hdr", "=", "header", ".", "payload", "nh", "=", "None", "if", "(", "header", ".", "version", "==", "4", ")", ":", "nh", "=", "header", ".", "proto", "header", ".", "proto", "=", "transport_proto", "header", ".", "remove_payload", "(", ")", "del", "header", ".", "chksum", "del", "header", ".", "len", "return", "(", "header", ",", "nh", ",", "next_hdr", ")", "else", ":", "found_rt_hdr", "=", "False", "prev", "=", "header", "while", "isinstance", "(", "next_hdr", ",", "(", "IPv6ExtHdrHopByHop", ",", "IPv6ExtHdrRouting", ",", "IPv6ExtHdrDestOpt", ")", ")", ":", "if", "isinstance", "(", "next_hdr", ",", "IPv6ExtHdrHopByHop", ")", ":", "pass", "if", "isinstance", "(", "next_hdr", ",", "IPv6ExtHdrRouting", ")", ":", "found_rt_hdr", "=", "True", "elif", "(", "isinstance", "(", "next_hdr", ",", "IPv6ExtHdrDestOpt", ")", "and", "found_rt_hdr", ")", ":", "break", "prev", "=", "next_hdr", "next_hdr", "=", "next_hdr", ".", "payload", "nh", "=", "prev", ".", "nh", "prev", ".", "nh", "=", "transport_proto", "prev", ".", "remove_payload", "(", ")", "del", "header", ".", "plen", "return", "(", "header", ",", "nh", ",", "next_hdr", ")" ]
split an ip packet in the correct location to insert an esp or ah header .
train
true
49,444
def calculate_duration(num_samples, sample_rate): return Gst.util_uint64_scale(num_samples, Gst.SECOND, sample_rate)
[ "def", "calculate_duration", "(", "num_samples", ",", "sample_rate", ")", ":", "return", "Gst", ".", "util_uint64_scale", "(", "num_samples", ",", "Gst", ".", "SECOND", ",", "sample_rate", ")" ]
determine duration of samples using gstreamer helper for precise math .
train
false
49,445
def user_page(request, user): user = get_object_or_404(User, username=user) profile = Profile.objects.get_or_create(user=user)[0] all_changes = Change.objects.last_changes(request.user).filter(user=user) last_changes = all_changes[:10] user_projects_ids = set(all_changes.values_list(u'translation__subproject__project', flat=True)) user_projects = Project.objects.filter(id__in=user_projects_ids) return render(request, u'accounts/user.html', {u'page_profile': profile, u'page_user': user, u'last_changes': last_changes, u'last_changes_url': urlencode({u'user': user.username.encode(u'utf-8')}), u'user_projects': user_projects})
[ "def", "user_page", "(", "request", ",", "user", ")", ":", "user", "=", "get_object_or_404", "(", "User", ",", "username", "=", "user", ")", "profile", "=", "Profile", ".", "objects", ".", "get_or_create", "(", "user", "=", "user", ")", "[", "0", "]", "all_changes", "=", "Change", ".", "objects", ".", "last_changes", "(", "request", ".", "user", ")", ".", "filter", "(", "user", "=", "user", ")", "last_changes", "=", "all_changes", "[", ":", "10", "]", "user_projects_ids", "=", "set", "(", "all_changes", ".", "values_list", "(", "u'translation__subproject__project'", ",", "flat", "=", "True", ")", ")", "user_projects", "=", "Project", ".", "objects", ".", "filter", "(", "id__in", "=", "user_projects_ids", ")", "return", "render", "(", "request", ",", "u'accounts/user.html'", ",", "{", "u'page_profile'", ":", "profile", ",", "u'page_user'", ":", "user", ",", "u'last_changes'", ":", "last_changes", ",", "u'last_changes_url'", ":", "urlencode", "(", "{", "u'user'", ":", "user", ".", "username", ".", "encode", "(", "u'utf-8'", ")", "}", ")", ",", "u'user_projects'", ":", "user_projects", "}", ")" ]
user details page .
train
false
49,446
def groups_for_user(environ, username): UserModel = auth.get_user_model() db.reset_queries() try: try: user = UserModel._default_manager.get_by_natural_key(username) except UserModel.DoesNotExist: return [] if (not user.is_active): return [] return [force_bytes(group.name) for group in user.groups.all()] finally: db.close_old_connections()
[ "def", "groups_for_user", "(", "environ", ",", "username", ")", ":", "UserModel", "=", "auth", ".", "get_user_model", "(", ")", "db", ".", "reset_queries", "(", ")", "try", ":", "try", ":", "user", "=", "UserModel", ".", "_default_manager", ".", "get_by_natural_key", "(", "username", ")", "except", "UserModel", ".", "DoesNotExist", ":", "return", "[", "]", "if", "(", "not", "user", ".", "is_active", ")", ":", "return", "[", "]", "return", "[", "force_bytes", "(", "group", ".", "name", ")", "for", "group", "in", "user", ".", "groups", ".", "all", "(", ")", "]", "finally", ":", "db", ".", "close_old_connections", "(", ")" ]
authorizes a user based on groups .
train
false
49,449
def _build_tree(type_args): stack = list() for arg in type_args: arg_type = arg[0] if (arg_type in _float_ops): numops = _float_ops[arg_type][0] node = [arg, (numops > 0), 0] for i in range(numops): operand = stack.pop() if (type(operand) is list): node[2] += operand[2] if (operand[1] == 0): node[1] = False elif ((operand[0] is ng.GPUTensor) and ((operand[1] > 0) or (not operand[4]))): node[1] = False node.insert(3, operand) stack.append(node) elif (arg_type in _reduction_ops): operand = stack.pop() reds = 1 if (type(operand) is list): reds += operand[2] stack.append([arg, True, reds, operand]) else: stack.append(arg) return stack[0]
[ "def", "_build_tree", "(", "type_args", ")", ":", "stack", "=", "list", "(", ")", "for", "arg", "in", "type_args", ":", "arg_type", "=", "arg", "[", "0", "]", "if", "(", "arg_type", "in", "_float_ops", ")", ":", "numops", "=", "_float_ops", "[", "arg_type", "]", "[", "0", "]", "node", "=", "[", "arg", ",", "(", "numops", ">", "0", ")", ",", "0", "]", "for", "i", "in", "range", "(", "numops", ")", ":", "operand", "=", "stack", ".", "pop", "(", ")", "if", "(", "type", "(", "operand", ")", "is", "list", ")", ":", "node", "[", "2", "]", "+=", "operand", "[", "2", "]", "if", "(", "operand", "[", "1", "]", "==", "0", ")", ":", "node", "[", "1", "]", "=", "False", "elif", "(", "(", "operand", "[", "0", "]", "is", "ng", ".", "GPUTensor", ")", "and", "(", "(", "operand", "[", "1", "]", ">", "0", ")", "or", "(", "not", "operand", "[", "4", "]", ")", ")", ")", ":", "node", "[", "1", "]", "=", "False", "node", ".", "insert", "(", "3", ",", "operand", ")", "stack", ".", "append", "(", "node", ")", "elif", "(", "arg_type", "in", "_reduction_ops", ")", ":", "operand", "=", "stack", ".", "pop", "(", ")", "reds", "=", "1", "if", "(", "type", "(", "operand", ")", "is", "list", ")", ":", "reds", "+=", "operand", "[", "2", "]", "stack", ".", "append", "(", "[", "arg", ",", "True", ",", "reds", ",", "operand", "]", ")", "else", ":", "stack", ".", "append", "(", "arg", ")", "return", "stack", "[", "0", "]" ]
rebuild a mutable tree from the stack flag each op node with whether it is scalar or not also include a count of reductions under this node: node: [ arg .
train
false
49,452
def ExpandEnvVars(string, expansions): for (k, v) in reversed(expansions): string = string.replace((('${' + k) + '}'), v) string = string.replace((('$(' + k) + ')'), v) string = string.replace(('$' + k), v) return string
[ "def", "ExpandEnvVars", "(", "string", ",", "expansions", ")", ":", "for", "(", "k", ",", "v", ")", "in", "reversed", "(", "expansions", ")", ":", "string", "=", "string", ".", "replace", "(", "(", "(", "'${'", "+", "k", ")", "+", "'}'", ")", ",", "v", ")", "string", "=", "string", ".", "replace", "(", "(", "(", "'$('", "+", "k", ")", "+", "')'", ")", ",", "v", ")", "string", "=", "string", ".", "replace", "(", "(", "'$'", "+", "k", ")", ",", "v", ")", "return", "string" ]
expands ${variables} .
train
false
49,453
def split_query(query_string, minimum_part_length=3): query_string = force_text(query_string) return set((part for part in (part.strip() for part in query_string.split()) if (len(part) >= minimum_part_length)))
[ "def", "split_query", "(", "query_string", ",", "minimum_part_length", "=", "3", ")", ":", "query_string", "=", "force_text", "(", "query_string", ")", "return", "set", "(", "(", "part", "for", "part", "in", "(", "part", ".", "strip", "(", ")", "for", "part", "in", "query_string", ".", "split", "(", ")", ")", "if", "(", "len", "(", "part", ")", ">=", "minimum_part_length", ")", ")", ")" ]
split a string into a set of non-empty words .
train
false
49,454
def overwriteDictionary(fromDictionary, keys, toDictionary): for key in keys: if (key in fromDictionary): toDictionary[key] = fromDictionary[key]
[ "def", "overwriteDictionary", "(", "fromDictionary", ",", "keys", ",", "toDictionary", ")", ":", "for", "key", "in", "keys", ":", "if", "(", "key", "in", "fromDictionary", ")", ":", "toDictionary", "[", "key", "]", "=", "fromDictionary", "[", "key", "]" ]
overwrite the dictionary .
train
false
49,455
def get_xem_absolute_numbering_for_show(indexer_id, indexer): if (indexer_id is None): return {} indexer_id = int(indexer_id) indexer = int(indexer) xem_refresh(indexer_id, indexer) result = {} main_db_con = db.DBConnection() rows = main_db_con.select('SELECT absolute_number, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ORDER BY absolute_number', [indexer, indexer_id]) for row in rows: absolute_number = int(row['absolute_number']) scene_absolute_number = int(row['scene_absolute_number']) result[absolute_number] = scene_absolute_number return result
[ "def", "get_xem_absolute_numbering_for_show", "(", "indexer_id", ",", "indexer", ")", ":", "if", "(", "indexer_id", "is", "None", ")", ":", "return", "{", "}", "indexer_id", "=", "int", "(", "indexer_id", ")", "indexer", "=", "int", "(", "indexer", ")", "xem_refresh", "(", "indexer_id", ",", "indexer", ")", "result", "=", "{", "}", "main_db_con", "=", "db", ".", "DBConnection", "(", ")", "rows", "=", "main_db_con", ".", "select", "(", "'SELECT absolute_number, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ORDER BY absolute_number'", ",", "[", "indexer", ",", "indexer_id", "]", ")", "for", "row", "in", "rows", ":", "absolute_number", "=", "int", "(", "row", "[", "'absolute_number'", "]", ")", "scene_absolute_number", "=", "int", "(", "row", "[", "'scene_absolute_number'", "]", ")", "result", "[", "absolute_number", "]", "=", "scene_absolute_number", "return", "result" ]
returns a dict of : mappings for an entire show .
train
false
49,456
def get_fqhostname(): l = [] l.append(socket.getfqdn()) try: addrinfo = socket.getaddrinfo(socket.gethostname(), 0, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME) for info in addrinfo: if (len(info) >= 4): l.append(info[3]) except socket.gaierror: pass return ((l and l[0]) or None)
[ "def", "get_fqhostname", "(", ")", ":", "l", "=", "[", "]", "l", ".", "append", "(", "socket", ".", "getfqdn", "(", ")", ")", "try", ":", "addrinfo", "=", "socket", ".", "getaddrinfo", "(", "socket", ".", "gethostname", "(", ")", ",", "0", ",", "socket", ".", "AF_UNSPEC", ",", "socket", ".", "SOCK_STREAM", ",", "socket", ".", "SOL_TCP", ",", "socket", ".", "AI_CANONNAME", ")", "for", "info", "in", "addrinfo", ":", "if", "(", "len", "(", "info", ")", ">=", "4", ")", ":", "l", ".", "append", "(", "info", "[", "3", "]", ")", "except", "socket", ".", "gaierror", ":", "pass", "return", "(", "(", "l", "and", "l", "[", "0", "]", ")", "or", "None", ")" ]
returns the fully qualified hostname .
train
false
49,459
@RegisterWithArgChecks(name='neighbor.attribute_map.set', req_args=[neighbors.IP_ADDRESS, neighbors.ATTRIBUTE_MAP], opt_args=[ROUTE_DISTINGUISHER, VRF_RF]) def set_neighbor_attribute_map(neigh_ip_address, at_maps, route_dist=None, route_family=VRF_RF_IPV4): core = CORE_MANAGER.get_core_service() peer = core.peer_manager.get_by_addr(neigh_ip_address) at_maps_key = const.ATTR_MAPS_LABEL_DEFAULT at_maps_dict = {} if (route_dist is not None): vrf_conf = CORE_MANAGER.vrfs_conf.get_vrf_conf(route_dist, route_family) if vrf_conf: at_maps_key = ':'.join([route_dist, route_family]) else: raise RuntimeConfigError(desc=('No VrfConf with rd %s' % route_dist)) at_maps_dict[const.ATTR_MAPS_LABEL_KEY] = at_maps_key at_maps_dict[const.ATTR_MAPS_VALUE] = at_maps peer.attribute_maps = at_maps_dict return True
[ "@", "RegisterWithArgChecks", "(", "name", "=", "'neighbor.attribute_map.set'", ",", "req_args", "=", "[", "neighbors", ".", "IP_ADDRESS", ",", "neighbors", ".", "ATTRIBUTE_MAP", "]", ",", "opt_args", "=", "[", "ROUTE_DISTINGUISHER", ",", "VRF_RF", "]", ")", "def", "set_neighbor_attribute_map", "(", "neigh_ip_address", ",", "at_maps", ",", "route_dist", "=", "None", ",", "route_family", "=", "VRF_RF_IPV4", ")", ":", "core", "=", "CORE_MANAGER", ".", "get_core_service", "(", ")", "peer", "=", "core", ".", "peer_manager", ".", "get_by_addr", "(", "neigh_ip_address", ")", "at_maps_key", "=", "const", ".", "ATTR_MAPS_LABEL_DEFAULT", "at_maps_dict", "=", "{", "}", "if", "(", "route_dist", "is", "not", "None", ")", ":", "vrf_conf", "=", "CORE_MANAGER", ".", "vrfs_conf", ".", "get_vrf_conf", "(", "route_dist", ",", "route_family", ")", "if", "vrf_conf", ":", "at_maps_key", "=", "':'", ".", "join", "(", "[", "route_dist", ",", "route_family", "]", ")", "else", ":", "raise", "RuntimeConfigError", "(", "desc", "=", "(", "'No VrfConf with rd %s'", "%", "route_dist", ")", ")", "at_maps_dict", "[", "const", ".", "ATTR_MAPS_LABEL_KEY", "]", "=", "at_maps_key", "at_maps_dict", "[", "const", ".", "ATTR_MAPS_VALUE", "]", "=", "at_maps", "peer", ".", "attribute_maps", "=", "at_maps_dict", "return", "True" ]
set attribute_maps to the neighbor .
train
false
49,461
def get_logo_url(is_secure=True): image_url = configuration_helpers.get_value('logo_image_url') if image_url: return _absolute_url_staticfile(is_secure=is_secure, name=image_url) university = configuration_helpers.get_value('university') if university: return staticfiles_storage.url('images/{uni}-on-edx-logo.png'.format(uni=university)) else: return staticfiles_storage.url('images/logo.png')
[ "def", "get_logo_url", "(", "is_secure", "=", "True", ")", ":", "image_url", "=", "configuration_helpers", ".", "get_value", "(", "'logo_image_url'", ")", "if", "image_url", ":", "return", "_absolute_url_staticfile", "(", "is_secure", "=", "is_secure", ",", "name", "=", "image_url", ")", "university", "=", "configuration_helpers", ".", "get_value", "(", "'university'", ")", "if", "university", ":", "return", "staticfiles_storage", ".", "url", "(", "'images/{uni}-on-edx-logo.png'", ".", "format", "(", "uni", "=", "university", ")", ")", "else", ":", "return", "staticfiles_storage", ".", "url", "(", "'images/logo.png'", ")" ]
return the url for the branded logo image to be used arguments: is_secure : if true .
train
false
49,462
def figimage(image, scale=1, dpi=None, **kwargs): dpi = (dpi if (dpi is not None) else mpl.rcParams['figure.dpi']) kwargs.setdefault('interpolation', 'nearest') kwargs.setdefault('cmap', 'gray') (h, w, d) = np.atleast_3d(image).shape figsize = ((np.array((w, h), dtype=float) / dpi) * scale) (fig, ax) = new_plot(figsize=figsize, dpi=dpi) fig.subplots_adjust(left=0, bottom=0, right=1, top=1) ax.set_axis_off() ax.imshow(image, **kwargs) ax.figure.canvas.draw() return (fig, ax)
[ "def", "figimage", "(", "image", ",", "scale", "=", "1", ",", "dpi", "=", "None", ",", "**", "kwargs", ")", ":", "dpi", "=", "(", "dpi", "if", "(", "dpi", "is", "not", "None", ")", "else", "mpl", ".", "rcParams", "[", "'figure.dpi'", "]", ")", "kwargs", ".", "setdefault", "(", "'interpolation'", ",", "'nearest'", ")", "kwargs", ".", "setdefault", "(", "'cmap'", ",", "'gray'", ")", "(", "h", ",", "w", ",", "d", ")", "=", "np", ".", "atleast_3d", "(", "image", ")", ".", "shape", "figsize", "=", "(", "(", "np", ".", "array", "(", "(", "w", ",", "h", ")", ",", "dtype", "=", "float", ")", "/", "dpi", ")", "*", "scale", ")", "(", "fig", ",", "ax", ")", "=", "new_plot", "(", "figsize", "=", "figsize", ",", "dpi", "=", "dpi", ")", "fig", ".", "subplots_adjust", "(", "left", "=", "0", ",", "bottom", "=", "0", ",", "right", "=", "1", ",", "top", "=", "1", ")", "ax", ".", "set_axis_off", "(", ")", "ax", ".", "imshow", "(", "image", ",", "**", "kwargs", ")", "ax", ".", "figure", ".", "canvas", ".", "draw", "(", ")", "return", "(", "fig", ",", "ax", ")" ]
return figure and axes with figure tightly surrounding image .
train
false
49,465
def get_mapper_params(): return context.get().mapreduce_spec.mapper.params
[ "def", "get_mapper_params", "(", ")", ":", "return", "context", ".", "get", "(", ")", ".", "mapreduce_spec", ".", "mapper", ".", "params" ]
return current mapreduce mapper params .
train
false
49,468
def test_fiducials_io(): tempdir = _TempDir() (pts, coord_frame) = read_fiducials(fiducials_fname) assert_equal(pts[0]['coord_frame'], FIFF.FIFFV_COORD_MRI) assert_equal(pts[0]['ident'], FIFF.FIFFV_POINT_CARDINAL) temp_fname = op.join(tempdir, 'test.fif') write_fiducials(temp_fname, pts, coord_frame) (pts_1, coord_frame_1) = read_fiducials(temp_fname) assert_equal(coord_frame, coord_frame_1) for (pt, pt_1) in zip(pts, pts_1): assert_equal(pt['kind'], pt_1['kind']) assert_equal(pt['ident'], pt_1['ident']) assert_equal(pt['coord_frame'], pt_1['coord_frame']) assert_array_equal(pt['r'], pt_1['r']) pts[0]['coord_frame'] += 1 assert_raises(ValueError, write_fiducials, temp_fname, pts, coord_frame)
[ "def", "test_fiducials_io", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "(", "pts", ",", "coord_frame", ")", "=", "read_fiducials", "(", "fiducials_fname", ")", "assert_equal", "(", "pts", "[", "0", "]", "[", "'coord_frame'", "]", ",", "FIFF", ".", "FIFFV_COORD_MRI", ")", "assert_equal", "(", "pts", "[", "0", "]", "[", "'ident'", "]", ",", "FIFF", ".", "FIFFV_POINT_CARDINAL", ")", "temp_fname", "=", "op", ".", "join", "(", "tempdir", ",", "'test.fif'", ")", "write_fiducials", "(", "temp_fname", ",", "pts", ",", "coord_frame", ")", "(", "pts_1", ",", "coord_frame_1", ")", "=", "read_fiducials", "(", "temp_fname", ")", "assert_equal", "(", "coord_frame", ",", "coord_frame_1", ")", "for", "(", "pt", ",", "pt_1", ")", "in", "zip", "(", "pts", ",", "pts_1", ")", ":", "assert_equal", "(", "pt", "[", "'kind'", "]", ",", "pt_1", "[", "'kind'", "]", ")", "assert_equal", "(", "pt", "[", "'ident'", "]", ",", "pt_1", "[", "'ident'", "]", ")", "assert_equal", "(", "pt", "[", "'coord_frame'", "]", ",", "pt_1", "[", "'coord_frame'", "]", ")", "assert_array_equal", "(", "pt", "[", "'r'", "]", ",", "pt_1", "[", "'r'", "]", ")", "pts", "[", "0", "]", "[", "'coord_frame'", "]", "+=", "1", "assert_raises", "(", "ValueError", ",", "write_fiducials", ",", "temp_fname", ",", "pts", ",", "coord_frame", ")" ]
test fiducials i/o .
train
false
49,470
def AAAA(host, nameserver=None): dig = ['dig', '+short', str(host), 'AAAA'] if (nameserver is not None): dig.append('@{0}'.format(nameserver)) cmd = __salt__['cmd.run_all'](dig, python_shell=False) if (cmd['retcode'] != 0): log.warning("dig returned exit code '{0}'. Returning empty list as fallback.".format(cmd['retcode'])) return [] return [x for x in cmd['stdout'].split('\n') if check_ip(x)]
[ "def", "AAAA", "(", "host", ",", "nameserver", "=", "None", ")", ":", "dig", "=", "[", "'dig'", ",", "'+short'", ",", "str", "(", "host", ")", ",", "'AAAA'", "]", "if", "(", "nameserver", "is", "not", "None", ")", ":", "dig", ".", "append", "(", "'@{0}'", ".", "format", "(", "nameserver", ")", ")", "cmd", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "dig", ",", "python_shell", "=", "False", ")", "if", "(", "cmd", "[", "'retcode'", "]", "!=", "0", ")", ":", "log", ".", "warning", "(", "\"dig returned exit code '{0}'. Returning empty list as fallback.\"", ".", "format", "(", "cmd", "[", "'retcode'", "]", ")", ")", "return", "[", "]", "return", "[", "x", "for", "x", "in", "cmd", "[", "'stdout'", "]", ".", "split", "(", "'\\n'", ")", "if", "check_ip", "(", "x", ")", "]" ]
return the aaaa record for host .
train
true
49,471
@parse_data @set_database def get_topic_nodes(parent=None, ids=None, **kwargs): if parent: Parent = Item.alias() if (parent == 'root'): selector = Parent.parent.is_null() else: selector = (Parent.id == parent) values = Item.select(Item.title, Item.description, Item.available, Item.kind, Item.children, Item.id, Item.path, Item.slug).join(Parent, on=(Item.parent == Parent.pk)).where((selector & Item.available)) return values elif ids: values = Item.select(Item.title, Item.description, Item.available, Item.kind, Item.children, Item.id, Item.path, Item.slug).where(Item.id.in_(ids)) return values
[ "@", "parse_data", "@", "set_database", "def", "get_topic_nodes", "(", "parent", "=", "None", ",", "ids", "=", "None", ",", "**", "kwargs", ")", ":", "if", "parent", ":", "Parent", "=", "Item", ".", "alias", "(", ")", "if", "(", "parent", "==", "'root'", ")", ":", "selector", "=", "Parent", ".", "parent", ".", "is_null", "(", ")", "else", ":", "selector", "=", "(", "Parent", ".", "id", "==", "parent", ")", "values", "=", "Item", ".", "select", "(", "Item", ".", "title", ",", "Item", ".", "description", ",", "Item", ".", "available", ",", "Item", ".", "kind", ",", "Item", ".", "children", ",", "Item", ".", "id", ",", "Item", ".", "path", ",", "Item", ".", "slug", ")", ".", "join", "(", "Parent", ",", "on", "=", "(", "Item", ".", "parent", "==", "Parent", ".", "pk", ")", ")", ".", "where", "(", "(", "selector", "&", "Item", ".", "available", ")", ")", "return", "values", "elif", "ids", ":", "values", "=", "Item", ".", "select", "(", "Item", ".", "title", ",", "Item", ".", "description", ",", "Item", ".", "available", ",", "Item", ".", "kind", ",", "Item", ".", "children", ",", "Item", ".", "id", ",", "Item", ".", "path", ",", "Item", ".", "slug", ")", ".", "where", "(", "Item", ".", "id", ".", "in_", "(", "ids", ")", ")", "return", "values" ]
convenience function for returning a set of topic nodes with limited fields for rendering the topic tree can either pass in the parent id to return all the immediate children of a node .
train
false
49,473
def _listXform(line): (index, size) = line.split(None, 1) return ((int(index) - 1), int(size))
[ "def", "_listXform", "(", "line", ")", ":", "(", "index", ",", "size", ")", "=", "line", ".", "split", "(", "None", ",", "1", ")", "return", "(", "(", "int", "(", "index", ")", "-", "1", ")", ",", "int", "(", "size", ")", ")" ]
parse a line of the response to a list command .
train
false
49,474
def make_path_log(r, t=None, i=None): rpath = ('/recipes/' + r) tpath = ((t and ('/tasks/' + t)) or '') ipath = ((i and ('/results/' + i)) or '') return (((rpath + tpath) + ipath) + '/logs')
[ "def", "make_path_log", "(", "r", ",", "t", "=", "None", ",", "i", "=", "None", ")", ":", "rpath", "=", "(", "'/recipes/'", "+", "r", ")", "tpath", "=", "(", "(", "t", "and", "(", "'/tasks/'", "+", "t", ")", ")", "or", "''", ")", "ipath", "=", "(", "(", "i", "and", "(", "'/results/'", "+", "i", ")", ")", "or", "''", ")", "return", "(", "(", "(", "rpath", "+", "tpath", ")", "+", "ipath", ")", "+", "'/logs'", ")" ]
converts id into a beaker path to log file given a recipe id .
train
false
49,475
def rand_mac_address(): mac = [250, 22, 62, random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)] return ':'.join([('%02x' % x) for x in mac])
[ "def", "rand_mac_address", "(", ")", ":", "mac", "=", "[", "250", ",", "22", ",", "62", ",", "random", ".", "randint", "(", "0", ",", "255", ")", ",", "random", ".", "randint", "(", "0", ",", "255", ")", ",", "random", ".", "randint", "(", "0", ",", "255", ")", "]", "return", "':'", ".", "join", "(", "[", "(", "'%02x'", "%", "x", ")", "for", "x", "in", "mac", "]", ")" ]
generate an ethernet mac address :return: an random ethernet mac address :rtype: string .
train
false
49,477
def power_on_instance(session, instance, vm_ref=None): if (vm_ref is None): vm_ref = get_vm_ref(session, instance) LOG.debug('Powering on the VM', instance=instance) try: poweron_task = session._call_method(session.vim, 'PowerOnVM_Task', vm_ref) session._wait_for_task(poweron_task) LOG.debug('Powered on the VM', instance=instance) except vexc.InvalidPowerStateException: LOG.debug('VM already powered on', instance=instance)
[ "def", "power_on_instance", "(", "session", ",", "instance", ",", "vm_ref", "=", "None", ")", ":", "if", "(", "vm_ref", "is", "None", ")", ":", "vm_ref", "=", "get_vm_ref", "(", "session", ",", "instance", ")", "LOG", ".", "debug", "(", "'Powering on the VM'", ",", "instance", "=", "instance", ")", "try", ":", "poweron_task", "=", "session", ".", "_call_method", "(", "session", ".", "vim", ",", "'PowerOnVM_Task'", ",", "vm_ref", ")", "session", ".", "_wait_for_task", "(", "poweron_task", ")", "LOG", ".", "debug", "(", "'Powered on the VM'", ",", "instance", "=", "instance", ")", "except", "vexc", ".", "InvalidPowerStateException", ":", "LOG", ".", "debug", "(", "'VM already powered on'", ",", "instance", "=", "instance", ")" ]
power on the specified instance .
train
false
49,480
def get_definition_location(project, source_code, offset, resource=None, maxfixes=1): fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes) pyname = fixer.pyname_at(offset) if (pyname is not None): (module, lineno) = pyname.get_definition_location() if (module is not None): return (module.get_module().get_resource(), lineno) return (None, None)
[ "def", "get_definition_location", "(", "project", ",", "source_code", ",", "offset", ",", "resource", "=", "None", ",", "maxfixes", "=", "1", ")", ":", "fixer", "=", "fixsyntax", ".", "FixSyntax", "(", "project", ",", "source_code", ",", "resource", ",", "maxfixes", ")", "pyname", "=", "fixer", ".", "pyname_at", "(", "offset", ")", "if", "(", "pyname", "is", "not", "None", ")", ":", "(", "module", ",", "lineno", ")", "=", "pyname", ".", "get_definition_location", "(", ")", "if", "(", "module", "is", "not", "None", ")", ":", "return", "(", "module", ".", "get_module", "(", ")", ".", "get_resource", "(", ")", ",", "lineno", ")", "return", "(", "None", ",", "None", ")" ]
return the definition location of the python name at offset return a tuple .
train
true
49,481
def paired_cosine_distances(X, Y): (X, Y) = check_paired_arrays(X, Y) return (0.5 * row_norms((normalize(X) - normalize(Y)), squared=True))
[ "def", "paired_cosine_distances", "(", "X", ",", "Y", ")", ":", "(", "X", ",", "Y", ")", "=", "check_paired_arrays", "(", "X", ",", "Y", ")", "return", "(", "0.5", "*", "row_norms", "(", "(", "normalize", "(", "X", ")", "-", "normalize", "(", "Y", ")", ")", ",", "squared", "=", "True", ")", ")" ]
computes the paired cosine distances between x and y read more in the :ref:user guide <metrics> .
train
false
49,482
def fresh_login_required(func): @wraps(func) def decorated_view(*args, **kwargs): if (request.method in EXEMPT_METHODS): return func(*args, **kwargs) elif current_app.login_manager._login_disabled: return func(*args, **kwargs) elif (not current_user.is_authenticated): return current_app.login_manager.unauthorized() elif (not login_fresh()): return current_app.login_manager.needs_refresh() return func(*args, **kwargs) return decorated_view
[ "def", "fresh_login_required", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "decorated_view", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "request", ".", "method", "in", "EXEMPT_METHODS", ")", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "elif", "current_app", ".", "login_manager", ".", "_login_disabled", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "elif", "(", "not", "current_user", ".", "is_authenticated", ")", ":", "return", "current_app", ".", "login_manager", ".", "unauthorized", "(", ")", "elif", "(", "not", "login_fresh", "(", ")", ")", ":", "return", "current_app", ".", "login_manager", ".", "needs_refresh", "(", ")", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "return", "decorated_view" ]
if you decorate a view with this .
train
true
49,483
def reverse_library_url(handler_name, library_key, kwargs=None): return reverse_url(handler_name, 'library_key_string', library_key, kwargs)
[ "def", "reverse_library_url", "(", "handler_name", ",", "library_key", ",", "kwargs", "=", "None", ")", ":", "return", "reverse_url", "(", "handler_name", ",", "'library_key_string'", ",", "library_key", ",", "kwargs", ")" ]
creates the url for handlers that use library_keys as url parameters .
train
false
49,484
def print_ic_table(ics, selected_orders): cols = sorted(ics) data = mat([[('%#10.4g' % v) for v in ics[c]] for c in cols], dtype=object).T for (i, col) in enumerate(cols): idx = (int(selected_orders[col]), i) data[idx] = (data[idx] + '*') fmt = dict(_default_table_fmt, data_fmts=(('%s',) * len(cols))) buf = StringIO() table = SimpleTable(data, cols, lrange(len(data)), title='VAR Order Selection', txt_fmt=fmt) buf.write((str(table) + '\n')) buf.write(('* Minimum' + '\n')) print(buf.getvalue())
[ "def", "print_ic_table", "(", "ics", ",", "selected_orders", ")", ":", "cols", "=", "sorted", "(", "ics", ")", "data", "=", "mat", "(", "[", "[", "(", "'%#10.4g'", "%", "v", ")", "for", "v", "in", "ics", "[", "c", "]", "]", "for", "c", "in", "cols", "]", ",", "dtype", "=", "object", ")", ".", "T", "for", "(", "i", ",", "col", ")", "in", "enumerate", "(", "cols", ")", ":", "idx", "=", "(", "int", "(", "selected_orders", "[", "col", "]", ")", ",", "i", ")", "data", "[", "idx", "]", "=", "(", "data", "[", "idx", "]", "+", "'*'", ")", "fmt", "=", "dict", "(", "_default_table_fmt", ",", "data_fmts", "=", "(", "(", "'%s'", ",", ")", "*", "len", "(", "cols", ")", ")", ")", "buf", "=", "StringIO", "(", ")", "table", "=", "SimpleTable", "(", "data", ",", "cols", ",", "lrange", "(", "len", "(", "data", ")", ")", ",", "title", "=", "'VAR Order Selection'", ",", "txt_fmt", "=", "fmt", ")", "buf", ".", "write", "(", "(", "str", "(", "table", ")", "+", "'\\n'", ")", ")", "buf", ".", "write", "(", "(", "'* Minimum'", "+", "'\\n'", ")", ")", "print", "(", "buf", ".", "getvalue", "(", ")", ")" ]
for var order selection .
train
false
49,486
def sdb(opts, functions=None, whitelist=None): return LazyLoader(_module_dirs(opts, 'sdb'), opts, tag='sdb', pack={'__sdb__': functions}, whitelist=whitelist)
[ "def", "sdb", "(", "opts", ",", "functions", "=", "None", ",", "whitelist", "=", "None", ")", ":", "return", "LazyLoader", "(", "_module_dirs", "(", "opts", ",", "'sdb'", ")", ",", "opts", ",", "tag", "=", "'sdb'", ",", "pack", "=", "{", "'__sdb__'", ":", "functions", "}", ",", "whitelist", "=", "whitelist", ")" ]
make a very small database call .
train
false
49,487
def test_LineInfo(): linfo = LineInfo(' %cd /home') nt.assert_equal(str(linfo), 'LineInfo [ |%|cd|/home]')
[ "def", "test_LineInfo", "(", ")", ":", "linfo", "=", "LineInfo", "(", "' %cd /home'", ")", "nt", ".", "assert_equal", "(", "str", "(", "linfo", ")", ",", "'LineInfo [ |%|cd|/home]'", ")" ]
simple test for lineinfo construction and str() .
train
false
49,489
def is_process_64(pid): is64 = False if (not ('64' in platform.machine())): return False hProcess = windll.kernel32.OpenProcess(PROCESS_QUERY_INFORMATION, False, pid) if (hProcess == INVALID_HANDLE_VALUE): raise WinError("can't OpenProcess for PROCESS_QUERY_INFORMATION. Insufficient privileges ?") is64 = is_process_64_from_handle(hProcess) windll.kernel32.CloseHandle(hProcess) return is64
[ "def", "is_process_64", "(", "pid", ")", ":", "is64", "=", "False", "if", "(", "not", "(", "'64'", "in", "platform", ".", "machine", "(", ")", ")", ")", ":", "return", "False", "hProcess", "=", "windll", ".", "kernel32", ".", "OpenProcess", "(", "PROCESS_QUERY_INFORMATION", ",", "False", ",", "pid", ")", "if", "(", "hProcess", "==", "INVALID_HANDLE_VALUE", ")", ":", "raise", "WinError", "(", "\"can't OpenProcess for PROCESS_QUERY_INFORMATION. Insufficient privileges ?\"", ")", "is64", "=", "is_process_64_from_handle", "(", "hProcess", ")", "windll", ".", "kernel32", ".", "CloseHandle", "(", "hProcess", ")", "return", "is64" ]
take a pid .
train
false
49,491
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get the repository constructor .
train
false
49,492
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialize strategy module .
train
false
49,493
def add_to_epsilon_closure(state_set, state): if (not state_set.get(state, 0)): state_set[state] = 1 state_set_2 = state.transitions.get_epsilon() if state_set_2: for state2 in state_set_2: add_to_epsilon_closure(state_set, state2)
[ "def", "add_to_epsilon_closure", "(", "state_set", ",", "state", ")", ":", "if", "(", "not", "state_set", ".", "get", "(", "state", ",", "0", ")", ")", ":", "state_set", "[", "state", "]", "=", "1", "state_set_2", "=", "state", ".", "transitions", ".", "get_epsilon", "(", ")", "if", "state_set_2", ":", "for", "state2", "in", "state_set_2", ":", "add_to_epsilon_closure", "(", "state_set", ",", "state2", ")" ]
recursively add to |state_set| states reachable from the given state by epsilon moves .
train
false
49,494
def multiset(seq): rv = defaultdict(int) for s in seq: rv[s] += 1 return dict(rv)
[ "def", "multiset", "(", "seq", ")", ":", "rv", "=", "defaultdict", "(", "int", ")", "for", "s", "in", "seq", ":", "rv", "[", "s", "]", "+=", "1", "return", "dict", "(", "rv", ")" ]
return the hashable sequence in multiset form with values being the multiplicity of the item in the sequence .
train
false
49,495
def filename_from_url(url): if (not url.isValid()): return None pathname = posixpath.basename(url.path()) if pathname: return pathname elif url.host(): return (url.host() + '.html') else: return None
[ "def", "filename_from_url", "(", "url", ")", ":", "if", "(", "not", "url", ".", "isValid", "(", ")", ")", ":", "return", "None", "pathname", "=", "posixpath", ".", "basename", "(", "url", ".", "path", "(", ")", ")", "if", "pathname", ":", "return", "pathname", "elif", "url", ".", "host", "(", ")", ":", "return", "(", "url", ".", "host", "(", ")", "+", "'.html'", ")", "else", ":", "return", "None" ]
get a suitable filename from a url .
train
false
49,496
def _save_user_settings(user_settings): user_settings.validate() user_models.UserSettingsModel(id=user_settings.user_id, email=user_settings.email, username=user_settings.username, normalized_username=user_settings.normalized_username, last_agreed_to_terms=user_settings.last_agreed_to_terms, last_started_state_editor_tutorial=user_settings.last_started_state_editor_tutorial, last_logged_in=user_settings.last_logged_in, last_edited_an_exploration=user_settings.last_edited_an_exploration, last_created_an_exploration=user_settings.last_created_an_exploration, profile_picture_data_url=user_settings.profile_picture_data_url, user_bio=user_settings.user_bio, subject_interests=user_settings.subject_interests, first_contribution_msec=user_settings.first_contribution_msec, preferred_language_codes=user_settings.preferred_language_codes, preferred_site_language_code=user_settings.preferred_site_language_code).put()
[ "def", "_save_user_settings", "(", "user_settings", ")", ":", "user_settings", ".", "validate", "(", ")", "user_models", ".", "UserSettingsModel", "(", "id", "=", "user_settings", ".", "user_id", ",", "email", "=", "user_settings", ".", "email", ",", "username", "=", "user_settings", ".", "username", ",", "normalized_username", "=", "user_settings", ".", "normalized_username", ",", "last_agreed_to_terms", "=", "user_settings", ".", "last_agreed_to_terms", ",", "last_started_state_editor_tutorial", "=", "user_settings", ".", "last_started_state_editor_tutorial", ",", "last_logged_in", "=", "user_settings", ".", "last_logged_in", ",", "last_edited_an_exploration", "=", "user_settings", ".", "last_edited_an_exploration", ",", "last_created_an_exploration", "=", "user_settings", ".", "last_created_an_exploration", ",", "profile_picture_data_url", "=", "user_settings", ".", "profile_picture_data_url", ",", "user_bio", "=", "user_settings", ".", "user_bio", ",", "subject_interests", "=", "user_settings", ".", "subject_interests", ",", "first_contribution_msec", "=", "user_settings", ".", "first_contribution_msec", ",", "preferred_language_codes", "=", "user_settings", ".", "preferred_language_codes", ",", "preferred_site_language_code", "=", "user_settings", ".", "preferred_site_language_code", ")", ".", "put", "(", ")" ]
commits a user settings object to the datastore .
train
false
49,497
def getMaximumSpan(loop): extent = (getMaximumByPathComplex(loop) - getMinimumByPathComplex(loop)) return max(extent.real, extent.imag)
[ "def", "getMaximumSpan", "(", "loop", ")", ":", "extent", "=", "(", "getMaximumByPathComplex", "(", "loop", ")", "-", "getMinimumByPathComplex", "(", "loop", ")", ")", "return", "max", "(", "extent", ".", "real", ",", "extent", ".", "imag", ")" ]
get the maximum span of the loop .
train
false
49,498
def add_syncing_models(models, dependency_check=False): get_foreign_key_classes = (lambda m: set([field.rel.to for field in m._meta.fields if isinstance(field, ForeignKey)])) for model in models: if (model in _syncing_models): logging.debug(('We are already syncing model %s; likely from different ways of importing the same models file.' % unicode(model))) continue foreign_key_classes = get_foreign_key_classes(model) class_indices = [_syncing_models.index(cls) for cls in foreign_key_classes if (cls in _syncing_models)] insert_after_idx = (1 + (max(class_indices) if class_indices else (-1))) if (dependency_check and [True for synmod in _syncing_models[0:(insert_after_idx - 1)] if (model in get_foreign_key_classes(synmod))]): raise Exception('Dependency loop detected in syncing models; cannot proceed.') _syncing_models.insert((insert_after_idx + 1), model)
[ "def", "add_syncing_models", "(", "models", ",", "dependency_check", "=", "False", ")", ":", "get_foreign_key_classes", "=", "(", "lambda", "m", ":", "set", "(", "[", "field", ".", "rel", ".", "to", "for", "field", "in", "m", ".", "_meta", ".", "fields", "if", "isinstance", "(", "field", ",", "ForeignKey", ")", "]", ")", ")", "for", "model", "in", "models", ":", "if", "(", "model", "in", "_syncing_models", ")", ":", "logging", ".", "debug", "(", "(", "'We are already syncing model %s; likely from different ways of importing the same models file.'", "%", "unicode", "(", "model", ")", ")", ")", "continue", "foreign_key_classes", "=", "get_foreign_key_classes", "(", "model", ")", "class_indices", "=", "[", "_syncing_models", ".", "index", "(", "cls", ")", "for", "cls", "in", "foreign_key_classes", "if", "(", "cls", "in", "_syncing_models", ")", "]", "insert_after_idx", "=", "(", "1", "+", "(", "max", "(", "class_indices", ")", "if", "class_indices", "else", "(", "-", "1", ")", ")", ")", "if", "(", "dependency_check", "and", "[", "True", "for", "synmod", "in", "_syncing_models", "[", "0", ":", "(", "insert_after_idx", "-", "1", ")", "]", "if", "(", "model", "in", "get_foreign_key_classes", "(", "synmod", ")", ")", "]", ")", ":", "raise", "Exception", "(", "'Dependency loop detected in syncing models; cannot proceed.'", ")", "_syncing_models", ".", "insert", "(", "(", "insert_after_idx", "+", "1", ")", ",", "model", ")" ]
when sync is run .
train
false
49,499
@task @timed def clean_test_files(): sh('git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads') sh("find . -name '.git' -prune -o -name '*.pyc' -exec rm {} \\;") sh('rm -rf test_root/log/auto_screenshots/*') sh('rm -rf /tmp/mako_[cl]ms')
[ "@", "task", "@", "timed", "def", "clean_test_files", "(", ")", ":", "sh", "(", "'git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads'", ")", "sh", "(", "\"find . -name '.git' -prune -o -name '*.pyc' -exec rm {} \\\\;\"", ")", "sh", "(", "'rm -rf test_root/log/auto_screenshots/*'", ")", "sh", "(", "'rm -rf /tmp/mako_[cl]ms'", ")" ]
clean fixture files used by tests and .
train
false
49,500
def delete_cascade(context, name, session): namespace_rec = _get_by_name(context, name, session) with session.begin(): try: metadef_api.tag.delete_namespace_content(context, namespace_rec.id, session) metadef_api.object.delete_namespace_content(context, namespace_rec.id, session) metadef_api.property.delete_namespace_content(context, namespace_rec.id, session) metadef_api.resource_type_association.delete_namespace_content(context, namespace_rec.id, session) session.delete(namespace_rec) session.flush() except db_exc.DBError as e: if isinstance(e.inner_exception, sa_exc.IntegrityError): LOG.debug('Metadata definition namespace=%s not deleted. Other records still refer to it.', name) raise exc.MetadefIntegrityError(record_type='namespace', record_name=name) else: raise return namespace_rec.to_dict()
[ "def", "delete_cascade", "(", "context", ",", "name", ",", "session", ")", ":", "namespace_rec", "=", "_get_by_name", "(", "context", ",", "name", ",", "session", ")", "with", "session", ".", "begin", "(", ")", ":", "try", ":", "metadef_api", ".", "tag", ".", "delete_namespace_content", "(", "context", ",", "namespace_rec", ".", "id", ",", "session", ")", "metadef_api", ".", "object", ".", "delete_namespace_content", "(", "context", ",", "namespace_rec", ".", "id", ",", "session", ")", "metadef_api", ".", "property", ".", "delete_namespace_content", "(", "context", ",", "namespace_rec", ".", "id", ",", "session", ")", "metadef_api", ".", "resource_type_association", ".", "delete_namespace_content", "(", "context", ",", "namespace_rec", ".", "id", ",", "session", ")", "session", ".", "delete", "(", "namespace_rec", ")", "session", ".", "flush", "(", ")", "except", "db_exc", ".", "DBError", "as", "e", ":", "if", "isinstance", "(", "e", ".", "inner_exception", ",", "sa_exc", ".", "IntegrityError", ")", ":", "LOG", ".", "debug", "(", "'Metadata definition namespace=%s not deleted. Other records still refer to it.'", ",", "name", ")", "raise", "exc", ".", "MetadefIntegrityError", "(", "record_type", "=", "'namespace'", ",", "record_name", "=", "name", ")", "else", ":", "raise", "return", "namespace_rec", ".", "to_dict", "(", ")" ]
raise if not found .
train
false
49,501
def break_around_binary_operator(logical_line, tokens): def is_binary_operator(token_type, text): return (((token_type == tokenize.OP) or (text in ['and', 'or'])) and (text not in '()[]{},:.;@=%')) line_break = False unary_context = True for (token_type, text, start, end, line) in tokens: if (token_type == tokenize.COMMENT): continue if ((('\n' in text) or ('\r' in text)) and (token_type != tokenize.STRING)): line_break = True else: if (is_binary_operator(token_type, text) and line_break and (not unary_context)): (yield (start, 'W503 line break before binary operator')) unary_context = (text in '([{,;') line_break = False
[ "def", "break_around_binary_operator", "(", "logical_line", ",", "tokens", ")", ":", "def", "is_binary_operator", "(", "token_type", ",", "text", ")", ":", "return", "(", "(", "(", "token_type", "==", "tokenize", ".", "OP", ")", "or", "(", "text", "in", "[", "'and'", ",", "'or'", "]", ")", ")", "and", "(", "text", "not", "in", "'()[]{},:.;@=%'", ")", ")", "line_break", "=", "False", "unary_context", "=", "True", "for", "(", "token_type", ",", "text", ",", "start", ",", "end", ",", "line", ")", "in", "tokens", ":", "if", "(", "token_type", "==", "tokenize", ".", "COMMENT", ")", ":", "continue", "if", "(", "(", "(", "'\\n'", "in", "text", ")", "or", "(", "'\\r'", "in", "text", ")", ")", "and", "(", "token_type", "!=", "tokenize", ".", "STRING", ")", ")", ":", "line_break", "=", "True", "else", ":", "if", "(", "is_binary_operator", "(", "token_type", ",", "text", ")", "and", "line_break", "and", "(", "not", "unary_context", ")", ")", ":", "(", "yield", "(", "start", ",", "'W503 line break before binary operator'", ")", ")", "unary_context", "=", "(", "text", "in", "'([{,;'", ")", "line_break", "=", "False" ]
avoid breaks before binary operators .
train
true
49,502
@opt.register_stabilize('fast_compile_gpu') @opt.register_specialize('fast_compile_gpu') @gof.optimizer def crossentropy_to_crossentropy_with_softmax_with_bias(fgraph): def search_make_one_sub(): for node in fgraph.toposort(): if (node.op == crossentropy_categorical_1hot): (nll,) = node.outputs (sm, one_of_n) = node.inputs if (sm.owner and (sm.owner.op == softmax_with_bias)): (x, b) = sm.owner.inputs (new_nll, new_sm, new_am) = crossentropy_softmax_argmax_1hot_with_bias(x, b, one_of_n) fgraph.replace_all_validate([(nll, new_nll), (sm, new_sm)], reason='crossentropy_to_crossentropy_with_softmax_with_bias') return True return False while search_make_one_sub(): pass return
[ "@", "opt", ".", "register_stabilize", "(", "'fast_compile_gpu'", ")", "@", "opt", ".", "register_specialize", "(", "'fast_compile_gpu'", ")", "@", "gof", ".", "optimizer", "def", "crossentropy_to_crossentropy_with_softmax_with_bias", "(", "fgraph", ")", ":", "def", "search_make_one_sub", "(", ")", ":", "for", "node", "in", "fgraph", ".", "toposort", "(", ")", ":", "if", "(", "node", ".", "op", "==", "crossentropy_categorical_1hot", ")", ":", "(", "nll", ",", ")", "=", "node", ".", "outputs", "(", "sm", ",", "one_of_n", ")", "=", "node", ".", "inputs", "if", "(", "sm", ".", "owner", "and", "(", "sm", ".", "owner", ".", "op", "==", "softmax_with_bias", ")", ")", ":", "(", "x", ",", "b", ")", "=", "sm", ".", "owner", ".", "inputs", "(", "new_nll", ",", "new_sm", ",", "new_am", ")", "=", "crossentropy_softmax_argmax_1hot_with_bias", "(", "x", ",", "b", ",", "one_of_n", ")", "fgraph", ".", "replace_all_validate", "(", "[", "(", "nll", ",", "new_nll", ")", ",", "(", "sm", ",", "new_sm", ")", "]", ",", "reason", "=", "'crossentropy_to_crossentropy_with_softmax_with_bias'", ")", "return", "True", "return", "False", "while", "search_make_one_sub", "(", ")", ":", "pass", "return" ]
this is a stabilization optimization .
train
false
49,505
def data_info_factory(names, funcs): def func(dat): outs = [] for (name, func) in zip(names, funcs): try: if isinstance(func, six.string_types): out = getattr(dat, func)() else: out = func(dat) except Exception: outs.append('--') else: outs.append(str(out)) return OrderedDict(zip(names, outs)) return func
[ "def", "data_info_factory", "(", "names", ",", "funcs", ")", ":", "def", "func", "(", "dat", ")", ":", "outs", "=", "[", "]", "for", "(", "name", ",", "func", ")", "in", "zip", "(", "names", ",", "funcs", ")", ":", "try", ":", "if", "isinstance", "(", "func", ",", "six", ".", "string_types", ")", ":", "out", "=", "getattr", "(", "dat", ",", "func", ")", "(", ")", "else", ":", "out", "=", "func", "(", "dat", ")", "except", "Exception", ":", "outs", ".", "append", "(", "'--'", ")", "else", ":", "outs", ".", "append", "(", "str", "(", "out", ")", ")", "return", "OrderedDict", "(", "zip", "(", "names", ",", "outs", ")", ")", "return", "func" ]
factory to create a function that can be used as an option for outputting data object summary information .
train
false
49,506
def test_progress_affecting_statusbar_height(fake_statusbar, progress_widget): expected_height = fake_statusbar.fontMetrics().height() assert (fake_statusbar.height() == expected_height) fake_statusbar.hbox.addWidget(progress_widget) progress_widget.show() assert (fake_statusbar.height() == expected_height)
[ "def", "test_progress_affecting_statusbar_height", "(", "fake_statusbar", ",", "progress_widget", ")", ":", "expected_height", "=", "fake_statusbar", ".", "fontMetrics", "(", ")", ".", "height", "(", ")", "assert", "(", "fake_statusbar", ".", "height", "(", ")", "==", "expected_height", ")", "fake_statusbar", ".", "hbox", ".", "addWidget", "(", "progress_widget", ")", "progress_widget", ".", "show", "(", ")", "assert", "(", "fake_statusbar", ".", "height", "(", ")", "==", "expected_height", ")" ]
make sure the statusbar stays the same height when progress is shown .
train
false
49,507
def is_luks(device): try: utils.execute('cryptsetup', 'isLuks', '--verbose', device, run_as_root=True, check_exit_code=True) return True except processutils.ProcessExecutionError as e: LOG.warning(_LW('isLuks exited abnormally (status %(exit_code)s): %(stderr)s'), {'exit_code': e.exit_code, 'stderr': e.stderr}) return False
[ "def", "is_luks", "(", "device", ")", ":", "try", ":", "utils", ".", "execute", "(", "'cryptsetup'", ",", "'isLuks'", ",", "'--verbose'", ",", "device", ",", "run_as_root", "=", "True", ",", "check_exit_code", "=", "True", ")", "return", "True", "except", "processutils", ".", "ProcessExecutionError", "as", "e", ":", "LOG", ".", "warning", "(", "_LW", "(", "'isLuks exited abnormally (status %(exit_code)s): %(stderr)s'", ")", ",", "{", "'exit_code'", ":", "e", ".", "exit_code", ",", "'stderr'", ":", "e", ".", "stderr", "}", ")", "return", "False" ]
checks if the specified device uses luks for encryption .
train
false
49,508
def _walk_tree(store, tree, root=''): for entry in tree.iteritems(): entry_abspath = posixpath.join(root, entry.path) if stat.S_ISDIR(entry.mode): for _ in _walk_tree(store, store[entry.sha], entry_abspath): (yield _) else: (yield (entry_abspath, entry))
[ "def", "_walk_tree", "(", "store", ",", "tree", ",", "root", "=", "''", ")", ":", "for", "entry", "in", "tree", ".", "iteritems", "(", ")", ":", "entry_abspath", "=", "posixpath", ".", "join", "(", "root", ",", "entry", ".", "path", ")", "if", "stat", ".", "S_ISDIR", "(", "entry", ".", "mode", ")", ":", "for", "_", "in", "_walk_tree", "(", "store", ",", "store", "[", "entry", ".", "sha", "]", ",", "entry_abspath", ")", ":", "(", "yield", "_", ")", "else", ":", "(", "yield", "(", "entry_abspath", ",", "entry", ")", ")" ]
recursively walk a dulwich tree .
train
false
49,509
def has_intersection(set_, iterable): return bool(set_.intersection([i for i in iterable if i.__hash__]))
[ "def", "has_intersection", "(", "set_", ",", "iterable", ")", ":", "return", "bool", "(", "set_", ".", "intersection", "(", "[", "i", "for", "i", "in", "iterable", "if", "i", ".", "__hash__", "]", ")", ")" ]
return true if any items of set_ are present in iterable .
train
false