id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
44,579
def resnet_v1_200(inputs, num_classes=None, is_training=True, global_pool=True, output_stride=None, reuse=None, scope='resnet_v1_200'): blocks = [resnet_utils.Block('block1', bottleneck, (([(256, 64, 1)] * 2) + [(256, 64, 2)])), resnet_utils.Block('block2', bottleneck, (([(512, 128, 1)] * 23) + [(512, 128, 2)])), resnet_utils.Block('block3', bottleneck, (([(1024, 256, 1)] * 35) + [(1024, 256, 2)])), resnet_utils.Block('block4', bottleneck, ([(2048, 512, 1)] * 3))] return resnet_v1(inputs, blocks, num_classes, is_training, global_pool=global_pool, output_stride=output_stride, include_root_block=True, reuse=reuse, scope=scope)
[ "def", "resnet_v1_200", "(", "inputs", ",", "num_classes", "=", "None", ",", "is_training", "=", "True", ",", "global_pool", "=", "True", ",", "output_stride", "=", "None", ",", "reuse", "=", "None", ",", "scope", "=", "'resnet_v1_200'", ")", ":", "blocks", "=", "[", "resnet_utils", ".", "Block", "(", "'block1'", ",", "bottleneck", ",", "(", "(", "[", "(", "256", ",", "64", ",", "1", ")", "]", "*", "2", ")", "+", "[", "(", "256", ",", "64", ",", "2", ")", "]", ")", ")", ",", "resnet_utils", ".", "Block", "(", "'block2'", ",", "bottleneck", ",", "(", "(", "[", "(", "512", ",", "128", ",", "1", ")", "]", "*", "23", ")", "+", "[", "(", "512", ",", "128", ",", "2", ")", "]", ")", ")", ",", "resnet_utils", ".", "Block", "(", "'block3'", ",", "bottleneck", ",", "(", "(", "[", "(", "1024", ",", "256", ",", "1", ")", "]", "*", "35", ")", "+", "[", "(", "1024", ",", "256", ",", "2", ")", "]", ")", ")", ",", "resnet_utils", ".", "Block", "(", "'block4'", ",", "bottleneck", ",", "(", "[", "(", "2048", ",", "512", ",", "1", ")", "]", "*", "3", ")", ")", "]", "return", "resnet_v1", "(", "inputs", ",", "blocks", ",", "num_classes", ",", "is_training", ",", "global_pool", "=", "global_pool", ",", "output_stride", "=", "output_stride", ",", "include_root_block", "=", "True", ",", "reuse", "=", "reuse", ",", "scope", "=", "scope", ")" ]
resnet-200 model of [2] .
train
false
44,580
def value(val): return dict(value=val)
[ "def", "value", "(", "val", ")", ":", "return", "dict", "(", "value", "=", "val", ")" ]
convenience function to explicitly return a "value" specification for a bokeh :class:~bokeh .
train
false
44,581
def calculate_graphsize(args, graphtype, multiplier=1.0): if (graphtype not in khmer._buckets_per_byte): raise ValueError((u'unknown graph type: ' + graphtype)) if args.max_memory_usage: tablesize = (((khmer._buckets_per_byte[graphtype] * args.max_memory_usage) / args.n_tables) / float(multiplier)) else: tablesize = args.max_tablesize return tablesize
[ "def", "calculate_graphsize", "(", "args", ",", "graphtype", ",", "multiplier", "=", "1.0", ")", ":", "if", "(", "graphtype", "not", "in", "khmer", ".", "_buckets_per_byte", ")", ":", "raise", "ValueError", "(", "(", "u'unknown graph type: '", "+", "graphtype", ")", ")", "if", "args", ".", "max_memory_usage", ":", "tablesize", "=", "(", "(", "(", "khmer", ".", "_buckets_per_byte", "[", "graphtype", "]", "*", "args", ".", "max_memory_usage", ")", "/", "args", ".", "n_tables", ")", "/", "float", "(", "multiplier", ")", ")", "else", ":", "tablesize", "=", "args", ".", "max_tablesize", "return", "tablesize" ]
transform the table parameters into a size .
train
false
44,582
def qnwlege(n, a, b): return _make_multidim_func(_qnwlege1, n, a, b)
[ "def", "qnwlege", "(", "n", ",", "a", ",", "b", ")", ":", "return", "_make_multidim_func", "(", "_qnwlege1", ",", "n", ",", "a", ",", "b", ")" ]
computes multivariate guass-legendre quadrature nodes and weights .
train
false
44,583
def test_capa_system(render_template=None): the_system = Mock(spec=LoncapaSystem, ajax_url='/dummy-ajax-url', anonymous_student_id='student', cache=None, can_execute_unsafe_code=(lambda : False), get_python_lib_zip=(lambda : None), DEBUG=True, filestore=fs.osfs.OSFS(os.path.join(TEST_DIR, 'test_files')), i18n=gettext.NullTranslations(), node_path=os.environ.get('NODE_PATH', '/usr/local/lib/node_modules'), render_template=(render_template or tst_render_template), seed=0, STATIC_URL='/dummy-static/', STATUS_CLASS=Status, xqueue={'interface': xqueue_interface, 'construct_callback': calledback_url, 'default_queuename': 'testqueue', 'waittime': 10}) return the_system
[ "def", "test_capa_system", "(", "render_template", "=", "None", ")", ":", "the_system", "=", "Mock", "(", "spec", "=", "LoncapaSystem", ",", "ajax_url", "=", "'/dummy-ajax-url'", ",", "anonymous_student_id", "=", "'student'", ",", "cache", "=", "None", ",", "can_execute_unsafe_code", "=", "(", "lambda", ":", "False", ")", ",", "get_python_lib_zip", "=", "(", "lambda", ":", "None", ")", ",", "DEBUG", "=", "True", ",", "filestore", "=", "fs", ".", "osfs", ".", "OSFS", "(", "os", ".", "path", ".", "join", "(", "TEST_DIR", ",", "'test_files'", ")", ")", ",", "i18n", "=", "gettext", ".", "NullTranslations", "(", ")", ",", "node_path", "=", "os", ".", "environ", ".", "get", "(", "'NODE_PATH'", ",", "'/usr/local/lib/node_modules'", ")", ",", "render_template", "=", "(", "render_template", "or", "tst_render_template", ")", ",", "seed", "=", "0", ",", "STATIC_URL", "=", "'/dummy-static/'", ",", "STATUS_CLASS", "=", "Status", ",", "xqueue", "=", "{", "'interface'", ":", "xqueue_interface", ",", "'construct_callback'", ":", "calledback_url", ",", "'default_queuename'", ":", "'testqueue'", ",", "'waittime'", ":", "10", "}", ")", "return", "the_system" ]
construct a mock loncapasystem instance .
train
false
44,591
def ptb_iterator(raw_data, batch_size, num_steps): raw_data = np.array(raw_data, dtype=np.int32) data_len = len(raw_data) batch_len = (data_len // batch_size) data = np.zeros([batch_size, batch_len], dtype=np.int32) for i in range(batch_size): data[i] = raw_data[(batch_len * i):(batch_len * (i + 1))] epoch_size = ((batch_len - 1) // num_steps) if (epoch_size == 0): raise ValueError('epoch_size == 0, decrease batch_size or num_steps') for i in range(epoch_size): x = data[:, (i * num_steps):((i + 1) * num_steps)] y = data[:, ((i * num_steps) + 1):(((i + 1) * num_steps) + 1)] (yield (x, y))
[ "def", "ptb_iterator", "(", "raw_data", ",", "batch_size", ",", "num_steps", ")", ":", "raw_data", "=", "np", ".", "array", "(", "raw_data", ",", "dtype", "=", "np", ".", "int32", ")", "data_len", "=", "len", "(", "raw_data", ")", "batch_len", "=", "(", "data_len", "//", "batch_size", ")", "data", "=", "np", ".", "zeros", "(", "[", "batch_size", ",", "batch_len", "]", ",", "dtype", "=", "np", ".", "int32", ")", "for", "i", "in", "range", "(", "batch_size", ")", ":", "data", "[", "i", "]", "=", "raw_data", "[", "(", "batch_len", "*", "i", ")", ":", "(", "batch_len", "*", "(", "i", "+", "1", ")", ")", "]", "epoch_size", "=", "(", "(", "batch_len", "-", "1", ")", "//", "num_steps", ")", "if", "(", "epoch_size", "==", "0", ")", ":", "raise", "ValueError", "(", "'epoch_size == 0, decrease batch_size or num_steps'", ")", "for", "i", "in", "range", "(", "epoch_size", ")", ":", "x", "=", "data", "[", ":", ",", "(", "i", "*", "num_steps", ")", ":", "(", "(", "i", "+", "1", ")", "*", "num_steps", ")", "]", "y", "=", "data", "[", ":", ",", "(", "(", "i", "*", "num_steps", ")", "+", "1", ")", ":", "(", "(", "(", "i", "+", "1", ")", "*", "num_steps", ")", "+", "1", ")", "]", "(", "yield", "(", "x", ",", "y", ")", ")" ]
generate a generator that iterates on a list of words .
train
true
44,592
@disabled @retry_on_failure def test_SSLType_ssl_neg(): s = socket.socket(socket.AF_INET) s.connect((SSL_URL, SSL_PORT)) AssertError(TypeError, real_ssl.sslwrap) AssertError(TypeError, real_ssl.sslwrap, False) AssertError(TypeError, real_ssl.sslwrap, None, False) AssertError(real_ssl.SSLError, real_ssl.sslwrap, s._sock, False, 'bad keyfile') AssertError(real_ssl.SSLError, real_ssl.sslwrap, s._sock, False, 'bad keyfile', 'bad certfile') s.close()
[ "@", "disabled", "@", "retry_on_failure", "def", "test_SSLType_ssl_neg", "(", ")", ":", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ")", "s", ".", "connect", "(", "(", "SSL_URL", ",", "SSL_PORT", ")", ")", "AssertError", "(", "TypeError", ",", "real_ssl", ".", "sslwrap", ")", "AssertError", "(", "TypeError", ",", "real_ssl", ".", "sslwrap", ",", "False", ")", "AssertError", "(", "TypeError", ",", "real_ssl", ".", "sslwrap", ",", "None", ",", "False", ")", "AssertError", "(", "real_ssl", ".", "SSLError", ",", "real_ssl", ".", "sslwrap", ",", "s", ".", "_sock", ",", "False", ",", "'bad keyfile'", ")", "AssertError", "(", "real_ssl", ".", "SSLError", ",", "real_ssl", ".", "sslwrap", ",", "s", ".", "_sock", ",", "False", ",", "'bad keyfile'", ",", "'bad certfile'", ")", "s", ".", "close", "(", ")" ]
see comments on test_ssltype_ssl .
train
false
44,593
def delete_key(key_name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: key = conn.delete_key_pair(key_name) log.debug('the key to return is : {0}'.format(key)) return key except boto.exception.BotoServerError as e: log.debug(e) return False
[ "def", "delete_key", "(", "key_name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "key", "=", "conn", ".", "delete_key_pair", "(", "key_name", ")", "log", ".", "debug", "(", "'the key to return is : {0}'", ".", "format", "(", "key", ")", ")", "return", "key", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "debug", "(", "e", ")", "return", "False" ]
deletes a key .
train
true
44,594
def sprModelAnal(x, fs, w, N, H, t, minSineDur, maxnSines, freqDevOffset, freqDevSlope): (tfreq, tmag, tphase) = SM.sineModelAnal(x, fs, w, N, H, t, maxnSines, minSineDur, freqDevOffset, freqDevSlope) Ns = 512 xr = UF.sineSubtraction(x, Ns, H, tfreq, tmag, tphase, fs) return (tfreq, tmag, tphase, xr)
[ "def", "sprModelAnal", "(", "x", ",", "fs", ",", "w", ",", "N", ",", "H", ",", "t", ",", "minSineDur", ",", "maxnSines", ",", "freqDevOffset", ",", "freqDevSlope", ")", ":", "(", "tfreq", ",", "tmag", ",", "tphase", ")", "=", "SM", ".", "sineModelAnal", "(", "x", ",", "fs", ",", "w", ",", "N", ",", "H", ",", "t", ",", "maxnSines", ",", "minSineDur", ",", "freqDevOffset", ",", "freqDevSlope", ")", "Ns", "=", "512", "xr", "=", "UF", ".", "sineSubtraction", "(", "x", ",", "Ns", ",", "H", ",", "tfreq", ",", "tmag", ",", "tphase", ",", "fs", ")", "return", "(", "tfreq", ",", "tmag", ",", "tphase", ",", "xr", ")" ]
analysis of a sound using the sinusoidal plus residual model x: input sound .
train
false
44,596
def compile_application(folder, skip_failed_views=False): remove_compiled_application(folder) os.mkdir(pjoin(folder, 'compiled')) compile_models(folder) compile_controllers(folder) failed_views = compile_views(folder, skip_failed_views) return failed_views
[ "def", "compile_application", "(", "folder", ",", "skip_failed_views", "=", "False", ")", ":", "remove_compiled_application", "(", "folder", ")", "os", ".", "mkdir", "(", "pjoin", "(", "folder", ",", "'compiled'", ")", ")", "compile_models", "(", "folder", ")", "compile_controllers", "(", "folder", ")", "failed_views", "=", "compile_views", "(", "folder", ",", "skip_failed_views", ")", "return", "failed_views" ]
compiles all models .
train
false
44,597
def do(name, query=''): try: cmd = 'do_{}'.format(name) if (cmd in globals()): globals()[cmd](query) else: _out('Invalid command "{}"'.format(name)) except Exception as e: _out('Error: {}'.format(e))
[ "def", "do", "(", "name", ",", "query", "=", "''", ")", ":", "try", ":", "cmd", "=", "'do_{}'", ".", "format", "(", "name", ")", "if", "(", "cmd", "in", "globals", "(", ")", ")", ":", "globals", "(", ")", "[", "cmd", "]", "(", "query", ")", "else", ":", "_out", "(", "'Invalid command \"{}\"'", ".", "format", "(", "name", ")", ")", "except", "Exception", "as", "e", ":", "_out", "(", "'Error: {}'", ".", "format", "(", "e", ")", ")" ]
execute a ruby command with rbenvs shims from the user or the system cli example: .
train
false
44,598
@pytest.mark.parametrize('url_text, expected', [('http://test.gr/%CE%B1%CE%B2%CE%B3%CE%B4.txt', 'http://test.gr/\xce\xb1\xce\xb2\xce\xb3\xce\xb4.txt'), ('http://test.ru/%D0%B0%D0%B1%D0%B2%D0%B3.txt', 'http://test.ru/\xd0\xb0\xd0\xb1\xd0\xb2\xd0\xb3.txt'), ('http://test.com/s%20p%20a%20c%20e.txt', 'http://test.com/s p a c e.txt'), ('http://test.com/%22quotes%22.html', 'http://test.com/%22quotes%22.html'), ('http://username:secret%20password@test.com', 'http://username@test.com'), ('http://example.com%5b/', 'http://example.com%5b/')]) def test_set_hover_url_encoded(url_widget, url_text, expected): url_widget.set_hover_url(url_text) assert (url_widget.text() == expected) assert (url_widget._urltype == url.UrlType.hover)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'url_text, expected'", ",", "[", "(", "'http://test.gr/%CE%B1%CE%B2%CE%B3%CE%B4.txt'", ",", "'http://test.gr/\\xce\\xb1\\xce\\xb2\\xce\\xb3\\xce\\xb4.txt'", ")", ",", "(", "'http://test.ru/%D0%B0%D0%B1%D0%B2%D0%B3.txt'", ",", "'http://test.ru/\\xd0\\xb0\\xd0\\xb1\\xd0\\xb2\\xd0\\xb3.txt'", ")", ",", "(", "'http://test.com/s%20p%20a%20c%20e.txt'", ",", "'http://test.com/s p a c e.txt'", ")", ",", "(", "'http://test.com/%22quotes%22.html'", ",", "'http://test.com/%22quotes%22.html'", ")", ",", "(", "'http://username:secret%20password@test.com'", ",", "'http://username@test.com'", ")", ",", "(", "'http://example.com%5b/'", ",", "'http://example.com%5b/'", ")", "]", ")", "def", "test_set_hover_url_encoded", "(", "url_widget", ",", "url_text", ",", "expected", ")", ":", "url_widget", ".", "set_hover_url", "(", "url_text", ")", "assert", "(", "url_widget", ".", "text", "(", ")", "==", "expected", ")", "assert", "(", "url_widget", ".", "_urltype", "==", "url", ".", "UrlType", ".", "hover", ")" ]
test text when hovering over a percent encoded link .
train
false
44,599
def _prepare(praw_object, arguments_dict, target): if praw_object.__dict__.get('_listing_use_sort'): PRAWBase._safely_add_arguments(arguments_dict, 'params', sort=target) return praw_object._path return urljoin(praw_object._path, target)
[ "def", "_prepare", "(", "praw_object", ",", "arguments_dict", ",", "target", ")", ":", "if", "praw_object", ".", "__dict__", ".", "get", "(", "'_listing_use_sort'", ")", ":", "PRAWBase", ".", "_safely_add_arguments", "(", "arguments_dict", ",", "'params'", ",", "sort", "=", "target", ")", "return", "praw_object", ".", "_path", "return", "urljoin", "(", "praw_object", ".", "_path", ",", "target", ")" ]
fix for redditor methods that use a query param rather than subpath .
train
false
44,604
def get_preprocessing(name, is_training=False): preprocessing_fn_map = {'cifarnet': cifarnet_preprocessing, 'inception': inception_preprocessing, 'inception_v1': inception_preprocessing, 'inception_v2': inception_preprocessing, 'inception_v3': inception_preprocessing, 'inception_v4': inception_preprocessing, 'inception_resnet_v2': inception_preprocessing, 'lenet': lenet_preprocessing, 'resnet_v1_50': vgg_preprocessing, 'resnet_v1_101': vgg_preprocessing, 'resnet_v1_152': vgg_preprocessing, 'vgg': vgg_preprocessing, 'vgg_a': vgg_preprocessing, 'vgg_16': vgg_preprocessing, 'vgg_19': vgg_preprocessing} if (name not in preprocessing_fn_map): raise ValueError(('Preprocessing name [%s] was not recognized' % name)) def preprocessing_fn(image, output_height, output_width, **kwargs): return preprocessing_fn_map[name].preprocess_image(image, output_height, output_width, is_training=is_training, **kwargs) return preprocessing_fn
[ "def", "get_preprocessing", "(", "name", ",", "is_training", "=", "False", ")", ":", "preprocessing_fn_map", "=", "{", "'cifarnet'", ":", "cifarnet_preprocessing", ",", "'inception'", ":", "inception_preprocessing", ",", "'inception_v1'", ":", "inception_preprocessing", ",", "'inception_v2'", ":", "inception_preprocessing", ",", "'inception_v3'", ":", "inception_preprocessing", ",", "'inception_v4'", ":", "inception_preprocessing", ",", "'inception_resnet_v2'", ":", "inception_preprocessing", ",", "'lenet'", ":", "lenet_preprocessing", ",", "'resnet_v1_50'", ":", "vgg_preprocessing", ",", "'resnet_v1_101'", ":", "vgg_preprocessing", ",", "'resnet_v1_152'", ":", "vgg_preprocessing", ",", "'vgg'", ":", "vgg_preprocessing", ",", "'vgg_a'", ":", "vgg_preprocessing", ",", "'vgg_16'", ":", "vgg_preprocessing", ",", "'vgg_19'", ":", "vgg_preprocessing", "}", "if", "(", "name", "not", "in", "preprocessing_fn_map", ")", ":", "raise", "ValueError", "(", "(", "'Preprocessing name [%s] was not recognized'", "%", "name", ")", ")", "def", "preprocessing_fn", "(", "image", ",", "output_height", ",", "output_width", ",", "**", "kwargs", ")", ":", "return", "preprocessing_fn_map", "[", "name", "]", ".", "preprocess_image", "(", "image", ",", "output_height", ",", "output_width", ",", "is_training", "=", "is_training", ",", "**", "kwargs", ")", "return", "preprocessing_fn" ]
returns preprocessing_fn .
train
false
44,605
def UCB(mean, var, zscore): return (mean + (np.sqrt(var) * zscore))
[ "def", "UCB", "(", "mean", ",", "var", ",", "zscore", ")", ":", "return", "(", "mean", "+", "(", "np", ".", "sqrt", "(", "var", ")", "*", "zscore", ")", ")" ]
upper confidence bound for a model which predicts a gaussian-distributed outcome .
train
false
44,608
def for_brands(brands): brands = safe_brands(brands) recs = recommend_for_brands(brands) return 'For a user who likes {liked}, we recommend {recs}.'.format(liked=', '.join(brands), recs=', '.join(recs))
[ "def", "for_brands", "(", "brands", ")", ":", "brands", "=", "safe_brands", "(", "brands", ")", "recs", "=", "recommend_for_brands", "(", "brands", ")", "return", "'For a user who likes {liked}, we recommend {recs}.'", ".", "format", "(", "liked", "=", "', '", ".", "join", "(", "brands", ")", ",", "recs", "=", "', '", ".", "join", "(", "recs", ")", ")" ]
return a pretty-print string of recommendations for brands alone .
train
false
44,610
def veoh_download_by_id(item_id, output_dir='.', merge=False, info_only=False, **kwargs): webpage_url = 'http://www.veoh.com/m/watch.php?v={item_id}&quality=1'.format(item_id=item_id) a = get_content(webpage_url, decoded=True) url = match1(a, '<source src="(.*?)\\"\\W') title = match1(a, '<meta property="og:title" content="([^"]*)"') (type_, ext, size) = url_info(url) print_info(site_info, title, type_, size) if (not info_only): download_urls([url], title, ext, total_size=None, output_dir=output_dir, merge=merge)
[ "def", "veoh_download_by_id", "(", "item_id", ",", "output_dir", "=", "'.'", ",", "merge", "=", "False", ",", "info_only", "=", "False", ",", "**", "kwargs", ")", ":", "webpage_url", "=", "'http://www.veoh.com/m/watch.php?v={item_id}&quality=1'", ".", "format", "(", "item_id", "=", "item_id", ")", "a", "=", "get_content", "(", "webpage_url", ",", "decoded", "=", "True", ")", "url", "=", "match1", "(", "a", ",", "'<source src=\"(.*?)\\\\\"\\\\W'", ")", "title", "=", "match1", "(", "a", ",", "'<meta property=\"og:title\" content=\"([^\"]*)\"'", ")", "(", "type_", ",", "ext", ",", "size", ")", "=", "url_info", "(", "url", ")", "print_info", "(", "site_info", ",", "title", ",", "type_", ",", "size", ")", "if", "(", "not", "info_only", ")", ":", "download_urls", "(", "[", "url", "]", ",", "title", ",", "ext", ",", "total_size", "=", "None", ",", "output_dir", "=", "output_dir", ",", "merge", "=", "merge", ")" ]
source: android mobile .
train
true
44,612
def _get_server_root(config): subdirs = [name for name in os.listdir(config) if os.path.isdir(os.path.join(config, name))] if (len(subdirs) != 1): errors.Error('Malformed configuration directory {0}'.format(config)) return os.path.join(config, subdirs[0].rstrip())
[ "def", "_get_server_root", "(", "config", ")", ":", "subdirs", "=", "[", "name", "for", "name", "in", "os", ".", "listdir", "(", "config", ")", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "config", ",", "name", ")", ")", "]", "if", "(", "len", "(", "subdirs", ")", "!=", "1", ")", ":", "errors", ".", "Error", "(", "'Malformed configuration directory {0}'", ".", "format", "(", "config", ")", ")", "return", "os", ".", "path", ".", "join", "(", "config", ",", "subdirs", "[", "0", "]", ".", "rstrip", "(", ")", ")" ]
returns the server root directory in config .
train
false
44,613
def ttost_ind(x1, x2, low, upp, usevar='pooled', weights=(None, None), transform=None): if transform: if (transform is np.log): x1 = transform(x1) x2 = transform(x2) else: xx = transform(np.concatenate((x1, x2), 0)) x1 = xx[:len(x1)] x2 = xx[len(x1):] low = transform(low) upp = transform(upp) cm = CompareMeans(DescrStatsW(x1, weights=weights[0], ddof=0), DescrStatsW(x2, weights=weights[1], ddof=0)) (pval, res) = cm.ttost_ind(low, upp, usevar=usevar) return (pval, res[0], res[1])
[ "def", "ttost_ind", "(", "x1", ",", "x2", ",", "low", ",", "upp", ",", "usevar", "=", "'pooled'", ",", "weights", "=", "(", "None", ",", "None", ")", ",", "transform", "=", "None", ")", ":", "if", "transform", ":", "if", "(", "transform", "is", "np", ".", "log", ")", ":", "x1", "=", "transform", "(", "x1", ")", "x2", "=", "transform", "(", "x2", ")", "else", ":", "xx", "=", "transform", "(", "np", ".", "concatenate", "(", "(", "x1", ",", "x2", ")", ",", "0", ")", ")", "x1", "=", "xx", "[", ":", "len", "(", "x1", ")", "]", "x2", "=", "xx", "[", "len", "(", "x1", ")", ":", "]", "low", "=", "transform", "(", "low", ")", "upp", "=", "transform", "(", "upp", ")", "cm", "=", "CompareMeans", "(", "DescrStatsW", "(", "x1", ",", "weights", "=", "weights", "[", "0", "]", ",", "ddof", "=", "0", ")", ",", "DescrStatsW", "(", "x2", ",", "weights", "=", "weights", "[", "1", "]", ",", "ddof", "=", "0", ")", ")", "(", "pval", ",", "res", ")", "=", "cm", ".", "ttost_ind", "(", "low", ",", "upp", ",", "usevar", "=", "usevar", ")", "return", "(", "pval", ",", "res", "[", "0", "]", ",", "res", "[", "1", "]", ")" ]
test of equivalence for two independent samples tost: two one-sided t tests null hypothesis: m1 - m2 < low or m1 - m2 > upp alternative hypothesis: low < m1 - m2 < upp where m1 .
train
false
44,614
def hmset(key, **fieldsvals): host = fieldsvals.pop('host', None) port = fieldsvals.pop('port', None) database = fieldsvals.pop('db', None) password = fieldsvals.pop('password', None) server = _connect(host, port, database, password) return server.hmset(key, **fieldsvals)
[ "def", "hmset", "(", "key", ",", "**", "fieldsvals", ")", ":", "host", "=", "fieldsvals", ".", "pop", "(", "'host'", ",", "None", ")", "port", "=", "fieldsvals", ".", "pop", "(", "'port'", ",", "None", ")", "database", "=", "fieldsvals", ".", "pop", "(", "'db'", ",", "None", ")", "password", "=", "fieldsvals", ".", "pop", "(", "'password'", ",", "None", ")", "server", "=", "_connect", "(", "host", ",", "port", ",", "database", ",", "password", ")", "return", "server", ".", "hmset", "(", "key", ",", "**", "fieldsvals", ")" ]
sets multiple hash fields to multiple values .
train
false
44,615
def _set_lang(lang): if config.get('ckan.i18n_directory'): fake_config = {'pylons.paths': {'root': config['ckan.i18n_directory']}, 'pylons.package': config['pylons.package']} i18n.set_lang(lang, config=fake_config, class_=Translations) else: i18n.set_lang(lang, class_=Translations)
[ "def", "_set_lang", "(", "lang", ")", ":", "if", "config", ".", "get", "(", "'ckan.i18n_directory'", ")", ":", "fake_config", "=", "{", "'pylons.paths'", ":", "{", "'root'", ":", "config", "[", "'ckan.i18n_directory'", "]", "}", ",", "'pylons.package'", ":", "config", "[", "'pylons.package'", "]", "}", "i18n", ".", "set_lang", "(", "lang", ",", "config", "=", "fake_config", ",", "class_", "=", "Translations", ")", "else", ":", "i18n", ".", "set_lang", "(", "lang", ",", "class_", "=", "Translations", ")" ]
allows a custom i18n directory to be specified .
train
false
44,617
def get_thread_analytics(exploration_id): return feedback_jobs_continuous.FeedbackAnalyticsAggregator.get_thread_analytics(exploration_id)
[ "def", "get_thread_analytics", "(", "exploration_id", ")", ":", "return", "feedback_jobs_continuous", ".", "FeedbackAnalyticsAggregator", ".", "get_thread_analytics", "(", "exploration_id", ")" ]
fetches the feedbackanalytics for the given exploration id .
train
false
44,619
def register_surrogateescape(): if PY3: return try: codecs.lookup_error(FS_ERRORS) except LookupError: codecs.register_error(FS_ERRORS, surrogateescape_handler)
[ "def", "register_surrogateescape", "(", ")", ":", "if", "PY3", ":", "return", "try", ":", "codecs", ".", "lookup_error", "(", "FS_ERRORS", ")", "except", "LookupError", ":", "codecs", ".", "register_error", "(", "FS_ERRORS", ",", "surrogateescape_handler", ")" ]
registers the surrogateescape error handler on python 2 .
train
true
44,620
@image_comparison(baseline_images=[u'EventCollection_plot__switch_orientation__2x']) def test__EventCollection__switch_orientation_2x(): (splt, coll, props) = generate_EventCollection_plot() coll.switch_orientation() coll.switch_orientation() new_positions = coll.get_positions() assert_equal(props[u'orientation'], coll.get_orientation()) assert_equal(True, coll.is_horizontal()) np.testing.assert_array_equal(props[u'positions'], new_positions) check_segments(coll, new_positions, props[u'linelength'], props[u'lineoffset'], props[u'orientation']) splt.set_title(u'EventCollection: switch_orientation 2x')
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'EventCollection_plot__switch_orientation__2x'", "]", ")", "def", "test__EventCollection__switch_orientation_2x", "(", ")", ":", "(", "splt", ",", "coll", ",", "props", ")", "=", "generate_EventCollection_plot", "(", ")", "coll", ".", "switch_orientation", "(", ")", "coll", ".", "switch_orientation", "(", ")", "new_positions", "=", "coll", ".", "get_positions", "(", ")", "assert_equal", "(", "props", "[", "u'orientation'", "]", ",", "coll", ".", "get_orientation", "(", ")", ")", "assert_equal", "(", "True", ",", "coll", ".", "is_horizontal", "(", ")", ")", "np", ".", "testing", ".", "assert_array_equal", "(", "props", "[", "u'positions'", "]", ",", "new_positions", ")", "check_segments", "(", "coll", ",", "new_positions", ",", "props", "[", "u'linelength'", "]", ",", "props", "[", "u'lineoffset'", "]", ",", "props", "[", "u'orientation'", "]", ")", "splt", ".", "set_title", "(", "u'EventCollection: switch_orientation 2x'", ")" ]
check to make sure calling switch_orientation twice sets the orientation back to the default .
train
false
44,625
def getComplexPolygon(center, radius, sides, startAngle=0.0): complexPolygon = [] sideAngle = ((2.0 * math.pi) / float(sides)) for side in xrange(abs(sides)): unitPolar = getWiddershinsUnitPolar(startAngle) complexPolygon.append(((unitPolar * radius) + center)) startAngle += sideAngle return complexPolygon
[ "def", "getComplexPolygon", "(", "center", ",", "radius", ",", "sides", ",", "startAngle", "=", "0.0", ")", ":", "complexPolygon", "=", "[", "]", "sideAngle", "=", "(", "(", "2.0", "*", "math", ".", "pi", ")", "/", "float", "(", "sides", ")", ")", "for", "side", "in", "xrange", "(", "abs", "(", "sides", ")", ")", ":", "unitPolar", "=", "getWiddershinsUnitPolar", "(", "startAngle", ")", "complexPolygon", ".", "append", "(", "(", "(", "unitPolar", "*", "radius", ")", "+", "center", ")", ")", "startAngle", "+=", "sideAngle", "return", "complexPolygon" ]
get the complex polygon .
train
false
44,628
def find_executable(executable, path=None): if (path is None): path = os.environ['PATH'] paths = string.split(path, os.pathsep) (base, ext) = os.path.splitext(executable) if (((sys.platform == 'win32') or (os.name == 'os2')) and (ext != '.exe')): executable = (executable + '.exe') if (not os.path.isfile(executable)): for p in paths: f = os.path.join(p, executable) if os.path.isfile(f): return f return None else: return executable
[ "def", "find_executable", "(", "executable", ",", "path", "=", "None", ")", ":", "if", "(", "path", "is", "None", ")", ":", "path", "=", "os", ".", "environ", "[", "'PATH'", "]", "paths", "=", "string", ".", "split", "(", "path", ",", "os", ".", "pathsep", ")", "(", "base", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "executable", ")", "if", "(", "(", "(", "sys", ".", "platform", "==", "'win32'", ")", "or", "(", "os", ".", "name", "==", "'os2'", ")", ")", "and", "(", "ext", "!=", "'.exe'", ")", ")", ":", "executable", "=", "(", "executable", "+", "'.exe'", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "executable", ")", ")", ":", "for", "p", "in", "paths", ":", "f", "=", "os", ".", "path", ".", "join", "(", "p", ",", "executable", ")", "if", "os", ".", "path", ".", "isfile", "(", "f", ")", ":", "return", "f", "return", "None", "else", ":", "return", "executable" ]
try to find executable in the directories listed in path .
train
true
44,629
def escapejs(value): for (bad, good) in _js_escapes: value = mark_safe(force_unicode(value).replace(bad, good)) return value
[ "def", "escapejs", "(", "value", ")", ":", "for", "(", "bad", ",", "good", ")", "in", "_js_escapes", ":", "value", "=", "mark_safe", "(", "force_unicode", "(", "value", ")", ".", "replace", "(", "bad", ",", "good", ")", ")", "return", "value" ]
hex encodes characters for use in javascript strings .
train
false
44,630
@Profiler.profile def test_baked_query_cols_only(n): bakery = baked.bakery() s = Session(bind=engine) for id_ in random.sample(ids, n): q = bakery((lambda s: s.query(Customer.id, Customer.name, Customer.description))) q += (lambda q: q.filter((Customer.id == bindparam('id')))) q(s).params(id=id_).one()
[ "@", "Profiler", ".", "profile", "def", "test_baked_query_cols_only", "(", "n", ")", ":", "bakery", "=", "baked", ".", "bakery", "(", ")", "s", "=", "Session", "(", "bind", "=", "engine", ")", "for", "id_", "in", "random", ".", "sample", "(", "ids", ",", "n", ")", ":", "q", "=", "bakery", "(", "(", "lambda", "s", ":", "s", ".", "query", "(", "Customer", ".", "id", ",", "Customer", ".", "name", ",", "Customer", ".", "description", ")", ")", ")", "q", "+=", "(", "lambda", "q", ":", "q", ".", "filter", "(", "(", "Customer", ".", "id", "==", "bindparam", "(", "'id'", ")", ")", ")", ")", "q", "(", "s", ")", ".", "params", "(", "id", "=", "id_", ")", ".", "one", "(", ")" ]
test a baked query of only the entity columns .
train
false
44,631
def codec_lookup(encoding, default='utf-8'): if (not encoding): return codecs.lookup(default) try: info = codecs.lookup(encoding) except (LookupError, TypeError): return codecs.lookup(default) try: if (not info._is_text_encoding): return codecs.lookup(default) except AttributeError: pass if (info.name == 'undefined'): return codecs.lookup(default) return info
[ "def", "codec_lookup", "(", "encoding", ",", "default", "=", "'utf-8'", ")", ":", "if", "(", "not", "encoding", ")", ":", "return", "codecs", ".", "lookup", "(", "default", ")", "try", ":", "info", "=", "codecs", ".", "lookup", "(", "encoding", ")", "except", "(", "LookupError", ",", "TypeError", ")", ":", "return", "codecs", ".", "lookup", "(", "default", ")", "try", ":", "if", "(", "not", "info", ".", "_is_text_encoding", ")", ":", "return", "codecs", ".", "lookup", "(", "default", ")", "except", "AttributeError", ":", "pass", "if", "(", "info", ".", "name", "==", "'undefined'", ")", ":", "return", "codecs", ".", "lookup", "(", "default", ")", "return", "info" ]
safely lookup a codec and ignore non-text codecs .
train
false
44,632
def preprocess_for_cse(expr, optimizations): for (pre, post) in optimizations: if (pre is not None): expr = pre(expr) return expr
[ "def", "preprocess_for_cse", "(", "expr", ",", "optimizations", ")", ":", "for", "(", "pre", ",", "post", ")", "in", "optimizations", ":", "if", "(", "pre", "is", "not", "None", ")", ":", "expr", "=", "pre", "(", "expr", ")", "return", "expr" ]
preprocess an expression to optimize for common subexpression elimination .
train
false
44,635
def _rename_json_field(data_dict): return _rename_field(data_dict, 'json', 'nested')
[ "def", "_rename_json_field", "(", "data_dict", ")", ":", "return", "_rename_field", "(", "data_dict", ",", "'json'", ",", "'nested'", ")" ]
rename json type to a corresponding type for the datastore since pre 9 .
train
false
44,636
def get_filter_by_name(filtername, **options): cls = find_filter_class(filtername) if cls: return cls(**options) else: raise ClassNotFound(('filter %r not found' % filtername))
[ "def", "get_filter_by_name", "(", "filtername", ",", "**", "options", ")", ":", "cls", "=", "find_filter_class", "(", "filtername", ")", "if", "cls", ":", "return", "cls", "(", "**", "options", ")", "else", ":", "raise", "ClassNotFound", "(", "(", "'filter %r not found'", "%", "filtername", ")", ")" ]
return an instantiated filter .
train
true
44,637
def get_sorted_index(l, reverse=True): return sorted(range(len(l)), key=(lambda k: l[k]), reverse=reverse)
[ "def", "get_sorted_index", "(", "l", ",", "reverse", "=", "True", ")", ":", "return", "sorted", "(", "range", "(", "len", "(", "l", ")", ")", ",", "key", "=", "(", "lambda", "k", ":", "l", "[", "k", "]", ")", ",", "reverse", "=", "reverse", ")" ]
get the sorted index of the original list .
train
false
44,639
def ordered_dict_representer(dumper, data): return dumper.represent_dict(data)
[ "def", "ordered_dict_representer", "(", "dumper", ",", "data", ")", ":", "return", "dumper", ".", "represent_dict", "(", "data", ")" ]
ordereddict representer .
train
false
44,642
def get_entry_map(dist, group=None): return get_distribution(dist).get_entry_map(group)
[ "def", "get_entry_map", "(", "dist", ",", "group", "=", "None", ")", ":", "return", "get_distribution", "(", "dist", ")", ".", "get_entry_map", "(", "group", ")" ]
return the entry point map for group .
train
false
44,644
def set_repoze_user(user_id): if ('repoze.who.plugins' in request.environ): rememberer = request.environ['repoze.who.plugins']['friendlyform'] identity = {'repoze.who.userid': user_id} response.headerlist += rememberer.remember(request.environ, identity)
[ "def", "set_repoze_user", "(", "user_id", ")", ":", "if", "(", "'repoze.who.plugins'", "in", "request", ".", "environ", ")", ":", "rememberer", "=", "request", ".", "environ", "[", "'repoze.who.plugins'", "]", "[", "'friendlyform'", "]", "identity", "=", "{", "'repoze.who.userid'", ":", "user_id", "}", "response", ".", "headerlist", "+=", "rememberer", ".", "remember", "(", "request", ".", "environ", ",", "identity", ")" ]
set the repoze .
train
false
44,645
def _count_righthand_zero_bits(number, bits): if (number == 0): return bits for i in range(bits): if ((number >> i) & 1): return i return bits
[ "def", "_count_righthand_zero_bits", "(", "number", ",", "bits", ")", ":", "if", "(", "number", "==", "0", ")", ":", "return", "bits", "for", "i", "in", "range", "(", "bits", ")", ":", "if", "(", "(", "number", ">>", "i", ")", "&", "1", ")", ":", "return", "i", "return", "bits" ]
count the number of zero bits on the right hand side .
train
true
44,647
def _group_agg(values, bounds, f): if (values.ndim == 1): N = len(values) result = np.empty(len(bounds), dtype=float) elif (values.ndim == 2): (N, K) = values.shape result = np.empty((len(bounds), K), dtype=float) testagg = f(values[:min(1, len(values))]) if (isinstance(testagg, np.ndarray) and (testagg.ndim == 2)): raise AssertionError('Function must reduce') for (i, left_bound) in enumerate(bounds): if (i == (len(bounds) - 1)): right_bound = N else: right_bound = bounds[(i + 1)] result[i] = f(values[left_bound:right_bound]) return result
[ "def", "_group_agg", "(", "values", ",", "bounds", ",", "f", ")", ":", "if", "(", "values", ".", "ndim", "==", "1", ")", ":", "N", "=", "len", "(", "values", ")", "result", "=", "np", ".", "empty", "(", "len", "(", "bounds", ")", ",", "dtype", "=", "float", ")", "elif", "(", "values", ".", "ndim", "==", "2", ")", ":", "(", "N", ",", "K", ")", "=", "values", ".", "shape", "result", "=", "np", ".", "empty", "(", "(", "len", "(", "bounds", ")", ",", "K", ")", ",", "dtype", "=", "float", ")", "testagg", "=", "f", "(", "values", "[", ":", "min", "(", "1", ",", "len", "(", "values", ")", ")", "]", ")", "if", "(", "isinstance", "(", "testagg", ",", "np", ".", "ndarray", ")", "and", "(", "testagg", ".", "ndim", "==", "2", ")", ")", ":", "raise", "AssertionError", "(", "'Function must reduce'", ")", "for", "(", "i", ",", "left_bound", ")", "in", "enumerate", "(", "bounds", ")", ":", "if", "(", "i", "==", "(", "len", "(", "bounds", ")", "-", "1", ")", ")", ":", "right_bound", "=", "N", "else", ":", "right_bound", "=", "bounds", "[", "(", "i", "+", "1", ")", "]", "result", "[", "i", "]", "=", "f", "(", "values", "[", "left_bound", ":", "right_bound", "]", ")", "return", "result" ]
r-style aggregator parameters values : n-length or n x k ndarray bounds : b-length ndarray f : ndarray aggregation function returns ndarray with same length as bounds array .
train
false
44,649
def create_subpackage(path, parent_package_name, subpackage_name='plugins'): package_name = _module_name(parent_package_name, subpackage_name) package = types.ModuleType(package_name) package.__path__ = ([path] if isinstance(path, six.string_types) else list(path)) sys.modules[package_name] = package return package
[ "def", "create_subpackage", "(", "path", ",", "parent_package_name", ",", "subpackage_name", "=", "'plugins'", ")", ":", "package_name", "=", "_module_name", "(", "parent_package_name", ",", "subpackage_name", ")", "package", "=", "types", ".", "ModuleType", "(", "package_name", ")", "package", ".", "__path__", "=", "(", "[", "path", "]", "if", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", "else", "list", "(", "path", ")", ")", "sys", ".", "modules", "[", "package_name", "]", "=", "package", "return", "package" ]
dynamically create a package into which to load plugins .
train
false
44,650
def get_service_ports(service): cmd = '--permanent --service={0} --get-ports'.format(service) return __firewall_cmd(cmd).split()
[ "def", "get_service_ports", "(", "service", ")", ":", "cmd", "=", "'--permanent --service={0} --get-ports'", ".", "format", "(", "service", ")", "return", "__firewall_cmd", "(", "cmd", ")", ".", "split", "(", ")" ]
list ports of a service .
train
false
44,652
def _api_config_speedlimit(output, kwargs): value = kwargs.get('value') if (not value): value = '0' Downloader.do.limit_speed(value) return report(output)
[ "def", "_api_config_speedlimit", "(", "output", ",", "kwargs", ")", ":", "value", "=", "kwargs", ".", "get", "(", "'value'", ")", "if", "(", "not", "value", ")", ":", "value", "=", "'0'", "Downloader", ".", "do", ".", "limit_speed", "(", "value", ")", "return", "report", "(", "output", ")" ]
api: accepts output .
train
false
44,653
def validator(application): def lint_app(*args, **kw): assert_((len(args) == 2), 'Two arguments required') assert_((not kw), 'No keyword arguments allowed') (environ, start_response) = args check_environ(environ) start_response_started = [] def start_response_wrapper(*args, **kw): assert_(((len(args) == 2) or (len(args) == 3)), ('Invalid number of arguments: %s' % (args,))) assert_((not kw), 'No keyword arguments allowed') status = args[0] headers = args[1] if (len(args) == 3): exc_info = args[2] else: exc_info = None check_status(status) check_headers(headers) check_content_type(status, headers) check_exc_info(exc_info) start_response_started.append(None) return WriteWrapper(start_response(*args)) environ['wsgi.input'] = InputWrapper(environ['wsgi.input']) environ['wsgi.errors'] = ErrorWrapper(environ['wsgi.errors']) iterator = application(environ, start_response_wrapper) assert_(((iterator is not None) and (iterator != False)), 'The application must return an iterator, if only an empty list') check_iterator(iterator) return IteratorWrapper(iterator, start_response_started) return lint_app
[ "def", "validator", "(", "application", ")", ":", "def", "lint_app", "(", "*", "args", ",", "**", "kw", ")", ":", "assert_", "(", "(", "len", "(", "args", ")", "==", "2", ")", ",", "'Two arguments required'", ")", "assert_", "(", "(", "not", "kw", ")", ",", "'No keyword arguments allowed'", ")", "(", "environ", ",", "start_response", ")", "=", "args", "check_environ", "(", "environ", ")", "start_response_started", "=", "[", "]", "def", "start_response_wrapper", "(", "*", "args", ",", "**", "kw", ")", ":", "assert_", "(", "(", "(", "len", "(", "args", ")", "==", "2", ")", "or", "(", "len", "(", "args", ")", "==", "3", ")", ")", ",", "(", "'Invalid number of arguments: %s'", "%", "(", "args", ",", ")", ")", ")", "assert_", "(", "(", "not", "kw", ")", ",", "'No keyword arguments allowed'", ")", "status", "=", "args", "[", "0", "]", "headers", "=", "args", "[", "1", "]", "if", "(", "len", "(", "args", ")", "==", "3", ")", ":", "exc_info", "=", "args", "[", "2", "]", "else", ":", "exc_info", "=", "None", "check_status", "(", "status", ")", "check_headers", "(", "headers", ")", "check_content_type", "(", "status", ",", "headers", ")", "check_exc_info", "(", "exc_info", ")", "start_response_started", ".", "append", "(", "None", ")", "return", "WriteWrapper", "(", "start_response", "(", "*", "args", ")", ")", "environ", "[", "'wsgi.input'", "]", "=", "InputWrapper", "(", "environ", "[", "'wsgi.input'", "]", ")", "environ", "[", "'wsgi.errors'", "]", "=", "ErrorWrapper", "(", "environ", "[", "'wsgi.errors'", "]", ")", "iterator", "=", "application", "(", "environ", ",", "start_response_wrapper", ")", "assert_", "(", "(", "(", "iterator", "is", "not", "None", ")", "and", "(", "iterator", "!=", "False", ")", ")", ",", "'The application must return an iterator, if only an empty list'", ")", "check_iterator", "(", "iterator", ")", "return", "IteratorWrapper", "(", "iterator", ",", "start_response_started", ")", "return", "lint_app" ]
when applied between a wsgi server and a wsgi application .
train
false
44,654
def permission_required_or_403(perm, *args, **kwargs): kwargs['redirect_to_login'] = False return permission_required(perm, *args, **kwargs)
[ "def", "permission_required_or_403", "(", "perm", ",", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'redirect_to_login'", "]", "=", "False", "return", "permission_required", "(", "perm", ",", "*", "args", ",", "**", "kwargs", ")" ]
simple wrapper for permission_required decorator .
train
true
44,658
def map_field(field, func, dict_sequence): for item in dict_sequence: try: item[field] = func(item.get(field, None)) (yield item) except ValueError: pass
[ "def", "map_field", "(", "field", ",", "func", ",", "dict_sequence", ")", ":", "for", "item", "in", "dict_sequence", ":", "try", ":", "item", "[", "field", "]", "=", "func", "(", "item", ".", "get", "(", "field", ",", "None", ")", ")", "(", "yield", "item", ")", "except", "ValueError", ":", "pass" ]
apply given function to value of given key in every dictionary in sequence and set the result as new value for that key .
train
true
44,660
def test_no_explicit_absolute_imports(): parser = ParserWithRecovery(load_grammar(), u('1'), 'test.py') assert (not parser.module.has_explicit_absolute_import)
[ "def", "test_no_explicit_absolute_imports", "(", ")", ":", "parser", "=", "ParserWithRecovery", "(", "load_grammar", "(", ")", ",", "u", "(", "'1'", ")", ",", "'test.py'", ")", "assert", "(", "not", "parser", ".", "module", ".", "has_explicit_absolute_import", ")" ]
detect modules without from __future__ import absolute_import .
train
false
44,661
def to_timestamp(datetime_value): return int((datetime_value - datetime(1970, 1, 1, tzinfo=utc)).total_seconds())
[ "def", "to_timestamp", "(", "datetime_value", ")", ":", "return", "int", "(", "(", "datetime_value", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "utc", ")", ")", ".", "total_seconds", "(", ")", ")" ]
convert a datetime into a timestamp .
train
false
44,662
def pathjoin(*args): return '/'.join([p for p in args if p])
[ "def", "pathjoin", "(", "*", "args", ")", ":", "return", "'/'", ".", "join", "(", "[", "p", "for", "p", "in", "args", "if", "p", "]", ")" ]
join a /-delimited path .
train
false
44,664
def inverse_fourier_transform(F, k, x, **hints): return InverseFourierTransform(F, k, x).doit(**hints)
[ "def", "inverse_fourier_transform", "(", "F", ",", "k", ",", "x", ",", "**", "hints", ")", ":", "return", "InverseFourierTransform", "(", "F", ",", "k", ",", "x", ")", ".", "doit", "(", "**", "hints", ")" ]
compute the unitary .
train
false
44,665
def exec_code_all(lang, code, cwd=None): powershell = lang.lower().startswith('powershell') if powershell: codefile = salt.utils.files.mkstemp(suffix='.ps1') else: codefile = salt.utils.files.mkstemp() with salt.utils.fopen(codefile, 'w+t', binary=False) as fp_: fp_.write(code) if powershell: cmd = [lang, '-File', codefile] else: cmd = [lang, codefile] ret = run_all(cmd, cwd=cwd, python_shell=False) os.remove(codefile) return ret
[ "def", "exec_code_all", "(", "lang", ",", "code", ",", "cwd", "=", "None", ")", ":", "powershell", "=", "lang", ".", "lower", "(", ")", ".", "startswith", "(", "'powershell'", ")", "if", "powershell", ":", "codefile", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", "suffix", "=", "'.ps1'", ")", "else", ":", "codefile", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "codefile", ",", "'w+t'", ",", "binary", "=", "False", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "code", ")", "if", "powershell", ":", "cmd", "=", "[", "lang", ",", "'-File'", ",", "codefile", "]", "else", ":", "cmd", "=", "[", "lang", ",", "codefile", "]", "ret", "=", "run_all", "(", "cmd", ",", "cwd", "=", "cwd", ",", "python_shell", "=", "False", ")", "os", ".", "remove", "(", "codefile", ")", "return", "ret" ]
pass in two strings .
train
false
44,666
@njit def mergesort_inplace(arr): assert (arr.ndim == 1) if (arr.size > 2): mid = (arr.size // 2) first = arr[:mid] second = arr[mid:] mergesort_inplace(first) mergesort_inplace(second) left = 0 right = mid while ((left < mid) and (right < arr.size)): if (arr[left] <= arr[right]): left += 1 else: temp = arr[right] right += 1 for i in range(mid, left, (-1)): arr[i] = arr[(i - 1)] arr[left] = temp left += 1 mid += 1 elif (arr.size == 2): (a, b) = arr (arr[0], arr[1]) = ((a, b) if (a <= b) else (b, a)) return arr
[ "@", "njit", "def", "mergesort_inplace", "(", "arr", ")", ":", "assert", "(", "arr", ".", "ndim", "==", "1", ")", "if", "(", "arr", ".", "size", ">", "2", ")", ":", "mid", "=", "(", "arr", ".", "size", "//", "2", ")", "first", "=", "arr", "[", ":", "mid", "]", "second", "=", "arr", "[", "mid", ":", "]", "mergesort_inplace", "(", "first", ")", "mergesort_inplace", "(", "second", ")", "left", "=", "0", "right", "=", "mid", "while", "(", "(", "left", "<", "mid", ")", "and", "(", "right", "<", "arr", ".", "size", ")", ")", ":", "if", "(", "arr", "[", "left", "]", "<=", "arr", "[", "right", "]", ")", ":", "left", "+=", "1", "else", ":", "temp", "=", "arr", "[", "right", "]", "right", "+=", "1", "for", "i", "in", "range", "(", "mid", ",", "left", ",", "(", "-", "1", ")", ")", ":", "arr", "[", "i", "]", "=", "arr", "[", "(", "i", "-", "1", ")", "]", "arr", "[", "left", "]", "=", "temp", "left", "+=", "1", "mid", "+=", "1", "elif", "(", "arr", ".", "size", "==", "2", ")", ":", "(", "a", ",", "b", ")", "=", "arr", "(", "arr", "[", "0", "]", ",", "arr", "[", "1", "]", ")", "=", "(", "(", "a", ",", "b", ")", "if", "(", "a", "<=", "b", ")", "else", "(", "b", ",", "a", ")", ")", "return", "arr" ]
inplace mergesort .
train
false
44,668
def shellCall(shellCmd, stdin='', stderr=False): if (type(shellCmd) == str): shellCmdList = shlex.split(shellCmd) elif (type(shellCmd) in (list, tuple)): shellCmdList = shellCmd else: return (None, 'shellCmd requires a list or string') proc = subprocess.Popen(shellCmdList, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdoutData, stderrData) = proc.communicate(stdin) del proc if stderr: return (stdoutData.strip(), stderrData.strip()) else: return stdoutData.strip()
[ "def", "shellCall", "(", "shellCmd", ",", "stdin", "=", "''", ",", "stderr", "=", "False", ")", ":", "if", "(", "type", "(", "shellCmd", ")", "==", "str", ")", ":", "shellCmdList", "=", "shlex", ".", "split", "(", "shellCmd", ")", "elif", "(", "type", "(", "shellCmd", ")", "in", "(", "list", ",", "tuple", ")", ")", ":", "shellCmdList", "=", "shellCmd", "else", ":", "return", "(", "None", ",", "'shellCmd requires a list or string'", ")", "proc", "=", "subprocess", ".", "Popen", "(", "shellCmdList", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "(", "stdoutData", ",", "stderrData", ")", "=", "proc", ".", "communicate", "(", "stdin", ")", "del", "proc", "if", "stderr", ":", "return", "(", "stdoutData", ".", "strip", "(", ")", ",", "stderrData", ".", "strip", "(", ")", ")", "else", ":", "return", "stdoutData", ".", "strip", "(", ")" ]
call a single system command with arguments .
train
false
44,669
def run_os_command(command_map): success = True for (executable, commands) in command_map.items(): if find_executable(executable): if isinstance(commands, basestring): commands = [commands] for command in commands: returncode = subprocess.check_call(command, shell=True) success = (success and (returncode == 0)) break return success
[ "def", "run_os_command", "(", "command_map", ")", ":", "success", "=", "True", "for", "(", "executable", ",", "commands", ")", "in", "command_map", ".", "items", "(", ")", ":", "if", "find_executable", "(", "executable", ")", ":", "if", "isinstance", "(", "commands", ",", "basestring", ")", ":", "commands", "=", "[", "commands", "]", "for", "command", "in", "commands", ":", "returncode", "=", "subprocess", ".", "check_call", "(", "command", ",", "shell", "=", "True", ")", "success", "=", "(", "success", "and", "(", "returncode", "==", "0", ")", ")", "break", "return", "success" ]
command_map is a dictionary of {executable: command} .
train
false
44,670
def encipher_substitution(msg, old, new=None): return translate(msg, old, new)
[ "def", "encipher_substitution", "(", "msg", ",", "old", ",", "new", "=", "None", ")", ":", "return", "translate", "(", "msg", ",", "old", ",", "new", ")" ]
returns the ciphertext obtained by replacing each character that appears in old with the corresponding character in new .
train
false
44,671
def addEndCap(begin, end, points, radius): beginMinusEnd = (begin - end) beginMinusEndLength = abs(beginMinusEnd) if (beginMinusEndLength <= 0.0): points.append(begin) return beginMinusEnd *= (radius / beginMinusEndLength) perpendicular = complex((- beginMinusEnd.imag), beginMinusEnd.real) numberOfSides = 20 numberOfPositiveSides = (numberOfSides / 2) totalAngle = 0.0 angle = (euclidean.globalTau / float(numberOfSides)) dotProductMultiplier = (2.0 - (1.0 / math.cos((0.5 * angle)))) for sideIndex in xrange((numberOfPositiveSides + 1)): circumferentialPoint = ((math.sin(totalAngle) * beginMinusEnd) + (math.cos(totalAngle) * perpendicular)) points.append((begin + (circumferentialPoint * dotProductMultiplier))) totalAngle += angle
[ "def", "addEndCap", "(", "begin", ",", "end", ",", "points", ",", "radius", ")", ":", "beginMinusEnd", "=", "(", "begin", "-", "end", ")", "beginMinusEndLength", "=", "abs", "(", "beginMinusEnd", ")", "if", "(", "beginMinusEndLength", "<=", "0.0", ")", ":", "points", ".", "append", "(", "begin", ")", "return", "beginMinusEnd", "*=", "(", "radius", "/", "beginMinusEndLength", ")", "perpendicular", "=", "complex", "(", "(", "-", "beginMinusEnd", ".", "imag", ")", ",", "beginMinusEnd", ".", "real", ")", "numberOfSides", "=", "20", "numberOfPositiveSides", "=", "(", "numberOfSides", "/", "2", ")", "totalAngle", "=", "0.0", "angle", "=", "(", "euclidean", ".", "globalTau", "/", "float", "(", "numberOfSides", ")", ")", "dotProductMultiplier", "=", "(", "2.0", "-", "(", "1.0", "/", "math", ".", "cos", "(", "(", "0.5", "*", "angle", ")", ")", ")", ")", "for", "sideIndex", "in", "xrange", "(", "(", "numberOfPositiveSides", "+", "1", ")", ")", ":", "circumferentialPoint", "=", "(", "(", "math", ".", "sin", "(", "totalAngle", ")", "*", "beginMinusEnd", ")", "+", "(", "math", ".", "cos", "(", "totalAngle", ")", "*", "perpendicular", ")", ")", "points", ".", "append", "(", "(", "begin", "+", "(", "circumferentialPoint", "*", "dotProductMultiplier", ")", ")", ")", "totalAngle", "+=", "angle" ]
get a pair of side points .
train
false
44,672
def subtract_date_from_date(date1, date2, result_format='number', exclude_millis=False, date1_format=None, date2_format=None): time = (Date(date1, date1_format) - Date(date2, date2_format)) return time.convert(result_format, millis=is_falsy(exclude_millis))
[ "def", "subtract_date_from_date", "(", "date1", ",", "date2", ",", "result_format", "=", "'number'", ",", "exclude_millis", "=", "False", ",", "date1_format", "=", "None", ",", "date2_format", "=", "None", ")", ":", "time", "=", "(", "Date", "(", "date1", ",", "date1_format", ")", "-", "Date", "(", "date2", ",", "date2_format", ")", ")", "return", "time", ".", "convert", "(", "result_format", ",", "millis", "=", "is_falsy", "(", "exclude_millis", ")", ")" ]
subtracts date from another date and returns time between .
train
false
44,675
def ResolveSubjectDestination(subject, regexes): components = Components(subject) if (not components): return ('aff4', '') path = utils.JoinPath(*[ConvertStringToFilename(x) for x in components]) for route in regexes: m = route.match(path) if m: value = m.group('path') if value: base = os.path.basename(value) dirname = os.path.dirname(value) return (base, dirname) return ('aff4', '')
[ "def", "ResolveSubjectDestination", "(", "subject", ",", "regexes", ")", ":", "components", "=", "Components", "(", "subject", ")", "if", "(", "not", "components", ")", ":", "return", "(", "'aff4'", ",", "''", ")", "path", "=", "utils", ".", "JoinPath", "(", "*", "[", "ConvertStringToFilename", "(", "x", ")", "for", "x", "in", "components", "]", ")", "for", "route", "in", "regexes", ":", "m", "=", "route", ".", "match", "(", "path", ")", "if", "m", ":", "value", "=", "m", ".", "group", "(", "'path'", ")", "if", "value", ":", "base", "=", "os", ".", "path", ".", "basename", "(", "value", ")", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "value", ")", "return", "(", "base", ",", "dirname", ")", "return", "(", "'aff4'", ",", "''", ")" ]
returns the directory/filename where the subject will be stored .
train
true
44,676
def test_get_by_idx(test_idx): try: test = Test.objects.get(pk=test_idx) return test except Test.DoesNotExist: return None
[ "def", "test_get_by_idx", "(", "test_idx", ")", ":", "try", ":", "test", "=", "Test", ".", "objects", ".", "get", "(", "pk", "=", "test_idx", ")", "return", "test", "except", "Test", ".", "DoesNotExist", ":", "return", "None" ]
returns a test based on its index or none .
train
false
44,677
def test_merge_events(): events_orig = [[1, 0, 1], [3, 0, 2], [10, 0, 3], [20, 0, 4]] events_replacement = [[1, 0, 12], [3, 0, 12], [10, 0, 34], [20, 0, 34]] events_no_replacement = [[1, 0, 1], [1, 0, 12], [1, 0, 1234], [3, 0, 2], [3, 0, 12], [3, 0, 1234], [10, 0, 3], [10, 0, 34], [10, 0, 1234], [20, 0, 4], [20, 0, 34], [20, 0, 1234]] for (replace_events, events_good) in [(True, events_replacement), (False, events_no_replacement)]: events = merge_events(events_orig, [1, 2], 12, replace_events) events = merge_events(events, [3, 4], 34, replace_events) events = merge_events(events, [1, 2, 3, 4], 1234, replace_events) assert_array_equal(events, events_good)
[ "def", "test_merge_events", "(", ")", ":", "events_orig", "=", "[", "[", "1", ",", "0", ",", "1", "]", ",", "[", "3", ",", "0", ",", "2", "]", ",", "[", "10", ",", "0", ",", "3", "]", ",", "[", "20", ",", "0", ",", "4", "]", "]", "events_replacement", "=", "[", "[", "1", ",", "0", ",", "12", "]", ",", "[", "3", ",", "0", ",", "12", "]", ",", "[", "10", ",", "0", ",", "34", "]", ",", "[", "20", ",", "0", ",", "34", "]", "]", "events_no_replacement", "=", "[", "[", "1", ",", "0", ",", "1", "]", ",", "[", "1", ",", "0", ",", "12", "]", ",", "[", "1", ",", "0", ",", "1234", "]", ",", "[", "3", ",", "0", ",", "2", "]", ",", "[", "3", ",", "0", ",", "12", "]", ",", "[", "3", ",", "0", ",", "1234", "]", ",", "[", "10", ",", "0", ",", "3", "]", ",", "[", "10", ",", "0", ",", "34", "]", ",", "[", "10", ",", "0", ",", "1234", "]", ",", "[", "20", ",", "0", ",", "4", "]", ",", "[", "20", ",", "0", ",", "34", "]", ",", "[", "20", ",", "0", ",", "1234", "]", "]", "for", "(", "replace_events", ",", "events_good", ")", "in", "[", "(", "True", ",", "events_replacement", ")", ",", "(", "False", ",", "events_no_replacement", ")", "]", ":", "events", "=", "merge_events", "(", "events_orig", ",", "[", "1", ",", "2", "]", ",", "12", ",", "replace_events", ")", "events", "=", "merge_events", "(", "events", ",", "[", "3", ",", "4", "]", ",", "34", ",", "replace_events", ")", "events", "=", "merge_events", "(", "events", ",", "[", "1", ",", "2", ",", "3", ",", "4", "]", ",", "1234", ",", "replace_events", ")", "assert_array_equal", "(", "events", ",", "events_good", ")" ]
test event merging .
train
false
44,678
def p_struct_declaration_list_1(t): pass
[ "def", "p_struct_declaration_list_1", "(", "t", ")", ":", "pass" ]
struct_declaration_list : struct_declaration .
train
false
44,679
def test_user_defined_modules(): class MockModule(object, ): def __init__(self, name): self.__name__ = name def __repr__(self): return (('MockModule("' + self.__name__) + '")') TopModule = MockModule('TopModule') sys.modules['TopModule'] = TopModule SubModule = MockModule('SubModule') theObj = object() SubModule.Object = theObj TopModule.SubModule = SubModule sys.modules['TopModule.SubModule'] = SubModule (x, y) = (TopModule, SubModule) del TopModule, SubModule import TopModule.SubModule AreEqual(TopModule, x) Assert(('SubModule' not in dir())) from TopModule.SubModule import Object AreEqual(Object, theObj) SubModule2 = MockModule('SubModule2') SubModule2.Object2 = theObj sys.modules['TopModule.SubModule'] = SubModule2 from TopModule.SubModule import Object2 AreEqual(Object2, theObj) del sys.modules['TopModule'] del sys.modules['TopModule.SubModule']
[ "def", "test_user_defined_modules", "(", ")", ":", "class", "MockModule", "(", "object", ",", ")", ":", "def", "__init__", "(", "self", ",", "name", ")", ":", "self", ".", "__name__", "=", "name", "def", "__repr__", "(", "self", ")", ":", "return", "(", "(", "'MockModule(\"'", "+", "self", ".", "__name__", ")", "+", "'\")'", ")", "TopModule", "=", "MockModule", "(", "'TopModule'", ")", "sys", ".", "modules", "[", "'TopModule'", "]", "=", "TopModule", "SubModule", "=", "MockModule", "(", "'SubModule'", ")", "theObj", "=", "object", "(", ")", "SubModule", ".", "Object", "=", "theObj", "TopModule", ".", "SubModule", "=", "SubModule", "sys", ".", "modules", "[", "'TopModule.SubModule'", "]", "=", "SubModule", "(", "x", ",", "y", ")", "=", "(", "TopModule", ",", "SubModule", ")", "del", "TopModule", ",", "SubModule", "import", "TopModule", ".", "SubModule", "AreEqual", "(", "TopModule", ",", "x", ")", "Assert", "(", "(", "'SubModule'", "not", "in", "dir", "(", ")", ")", ")", "from", "TopModule", ".", "SubModule", "import", "Object", "AreEqual", "(", "Object", ",", "theObj", ")", "SubModule2", "=", "MockModule", "(", "'SubModule2'", ")", "SubModule2", ".", "Object2", "=", "theObj", "sys", ".", "modules", "[", "'TopModule.SubModule'", "]", "=", "SubModule2", "from", "TopModule", ".", "SubModule", "import", "Object2", "AreEqual", "(", "Object2", ",", "theObj", ")", "del", "sys", ".", "modules", "[", "'TopModule'", "]", "del", "sys", ".", "modules", "[", "'TopModule.SubModule'", "]" ]
test the importer using user-defined module types .
train
false
44,681
def _find_domains_or_certname(config, installer): domains = None certname = config.certname if config.domains: domains = config.domains elif certname: domains = cert_manager.domains_for_certname(config, certname) if (not domains): domains = display_ops.choose_names(installer) if ((not domains) and (not certname)): raise errors.Error('Please specify --domains, or --installer that will help in domain names autodiscovery, or --cert-name for an existing certificate name.') return (domains, certname)
[ "def", "_find_domains_or_certname", "(", "config", ",", "installer", ")", ":", "domains", "=", "None", "certname", "=", "config", ".", "certname", "if", "config", ".", "domains", ":", "domains", "=", "config", ".", "domains", "elif", "certname", ":", "domains", "=", "cert_manager", ".", "domains_for_certname", "(", "config", ",", "certname", ")", "if", "(", "not", "domains", ")", ":", "domains", "=", "display_ops", ".", "choose_names", "(", "installer", ")", "if", "(", "(", "not", "domains", ")", "and", "(", "not", "certname", ")", ")", ":", "raise", "errors", ".", "Error", "(", "'Please specify --domains, or --installer that will help in domain names autodiscovery, or --cert-name for an existing certificate name.'", ")", "return", "(", "domains", ",", "certname", ")" ]
retrieve domains and certname from config or user input .
train
false
44,682
def skip_if_32bit(func): @wraps(func) def run_test(*args, **kwargs): bits = (8 * struct.calcsize('P')) if (bits == 32): raise SkipTest('Test skipped on 32bit platforms.') else: return func(*args, **kwargs) return run_test
[ "def", "skip_if_32bit", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "run_test", "(", "*", "args", ",", "**", "kwargs", ")", ":", "bits", "=", "(", "8", "*", "struct", ".", "calcsize", "(", "'P'", ")", ")", "if", "(", "bits", "==", "32", ")", ":", "raise", "SkipTest", "(", "'Test skipped on 32bit platforms.'", ")", "else", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "return", "run_test" ]
test decorator that skips tests on 32bit platforms .
train
false
44,683
def test_simple_template(): data = jinja.render('test.html', var='World') assert (data == 'Hello World')
[ "def", "test_simple_template", "(", ")", ":", "data", "=", "jinja", ".", "render", "(", "'test.html'", ",", "var", "=", "'World'", ")", "assert", "(", "data", "==", "'Hello World'", ")" ]
test with a simple template .
train
false
44,684
def test_only_if_needed_does_upgrade_deps_when_no_longer_satisfied(script): script.pip_install_local('simple==1.0', expect_error=True) result = script.pip_install_local('--upgrade', '--upgrade-strategy=only-if-needed', 'require_simple', expect_error=True) assert (((script.site_packages / 'require_simple-1.0-py%s.egg-info') % pyversion) not in result.files_deleted), 'should have installed require_simple==1.0' assert (((script.site_packages / 'simple-3.0-py%s.egg-info') % pyversion) in result.files_created), 'should have installed simple==3.0' assert (((script.site_packages / 'simple-1.0-py%s.egg-info') % pyversion) in result.files_deleted), 'should have uninstalled simple==1.0'
[ "def", "test_only_if_needed_does_upgrade_deps_when_no_longer_satisfied", "(", "script", ")", ":", "script", ".", "pip_install_local", "(", "'simple==1.0'", ",", "expect_error", "=", "True", ")", "result", "=", "script", ".", "pip_install_local", "(", "'--upgrade'", ",", "'--upgrade-strategy=only-if-needed'", ",", "'require_simple'", ",", "expect_error", "=", "True", ")", "assert", "(", "(", "(", "script", ".", "site_packages", "/", "'require_simple-1.0-py%s.egg-info'", ")", "%", "pyversion", ")", "not", "in", "result", ".", "files_deleted", ")", ",", "'should have installed require_simple==1.0'", "assert", "(", "(", "(", "script", ".", "site_packages", "/", "'simple-3.0-py%s.egg-info'", ")", "%", "pyversion", ")", "in", "result", ".", "files_created", ")", ",", "'should have installed simple==3.0'", "assert", "(", "(", "(", "script", ".", "site_packages", "/", "'simple-1.0-py%s.egg-info'", ")", "%", "pyversion", ")", "in", "result", ".", "files_deleted", ")", ",", "'should have uninstalled simple==1.0'" ]
it does upgrade a dependency if it no longer satisfies the requirements .
train
false
44,686
def parse_udiff(diff, patterns=None, parent='.'): rv = {} path = nrows = None for line in diff.splitlines(): if nrows: if (line[:1] != '-'): nrows -= 1 continue if (line[:3] == '@@ '): hunk_match = HUNK_REGEX.match(line) (row, nrows) = [int((g or '1')) for g in hunk_match.groups()] rv[path].update(range(row, (row + nrows))) elif (line[:3] == '+++'): path = line[4:].split(' DCTB ', 1)[0] if (path[:2] == 'b/'): path = path[2:] rv[path] = set() return dict([(os.path.join(parent, p), rows) for (p, rows) in rv.items() if (rows and filename_match(p, patterns))])
[ "def", "parse_udiff", "(", "diff", ",", "patterns", "=", "None", ",", "parent", "=", "'.'", ")", ":", "rv", "=", "{", "}", "path", "=", "nrows", "=", "None", "for", "line", "in", "diff", ".", "splitlines", "(", ")", ":", "if", "nrows", ":", "if", "(", "line", "[", ":", "1", "]", "!=", "'-'", ")", ":", "nrows", "-=", "1", "continue", "if", "(", "line", "[", ":", "3", "]", "==", "'@@ '", ")", ":", "hunk_match", "=", "HUNK_REGEX", ".", "match", "(", "line", ")", "(", "row", ",", "nrows", ")", "=", "[", "int", "(", "(", "g", "or", "'1'", ")", ")", "for", "g", "in", "hunk_match", ".", "groups", "(", ")", "]", "rv", "[", "path", "]", ".", "update", "(", "range", "(", "row", ",", "(", "row", "+", "nrows", ")", ")", ")", "elif", "(", "line", "[", ":", "3", "]", "==", "'+++'", ")", ":", "path", "=", "line", "[", "4", ":", "]", ".", "split", "(", "' DCTB '", ",", "1", ")", "[", "0", "]", "if", "(", "path", "[", ":", "2", "]", "==", "'b/'", ")", ":", "path", "=", "path", "[", "2", ":", "]", "rv", "[", "path", "]", "=", "set", "(", ")", "return", "dict", "(", "[", "(", "os", ".", "path", ".", "join", "(", "parent", ",", "p", ")", ",", "rows", ")", "for", "(", "p", ",", "rows", ")", "in", "rv", ".", "items", "(", ")", "if", "(", "rows", "and", "filename_match", "(", "p", ",", "patterns", ")", ")", "]", ")" ]
return a dictionary of matching lines .
train
true
44,687
def upvars(n=2): return dictadd(sys._getframe(n).f_globals, sys._getframe(n).f_locals)
[ "def", "upvars", "(", "n", "=", "2", ")", ":", "return", "dictadd", "(", "sys", ".", "_getframe", "(", "n", ")", ".", "f_globals", ",", "sys", ".", "_getframe", "(", "n", ")", ".", "f_locals", ")" ]
guido van rossum doesnt want you to use this function .
train
false
44,688
def interact(banner=None, readfunc=None, local=None): console = InteractiveConsole(local) if (readfunc is not None): console.raw_input = readfunc else: try: import readline except ImportError: pass console.interact(banner)
[ "def", "interact", "(", "banner", "=", "None", ",", "readfunc", "=", "None", ",", "local", "=", "None", ")", ":", "console", "=", "InteractiveConsole", "(", "local", ")", "if", "(", "readfunc", "is", "not", "None", ")", ":", "console", ".", "raw_input", "=", "readfunc", "else", ":", "try", ":", "import", "readline", "except", "ImportError", ":", "pass", "console", ".", "interact", "(", "banner", ")" ]
remote interactive interpreter .
train
true
44,690
def get_empty_facilities_msg(browser): try: elem = browser.find_element_by_id('no-facilities-message') return elem except NoSuchElementException: return None
[ "def", "get_empty_facilities_msg", "(", "browser", ")", ":", "try", ":", "elem", "=", "browser", ".", "find_element_by_id", "(", "'no-facilities-message'", ")", "return", "elem", "except", "NoSuchElementException", ":", "return", "None" ]
returns a selenium webelement if it exists .
train
false
44,691
def test_outside_sky(): header = get_pkg_data_contents(u'data/outside_sky.hdr', encoding=u'binary') w = wcs.WCS(header) assert np.all(np.isnan(w.wcs_pix2world([[100.0, 500.0]], 0))) assert np.all(np.isnan(w.wcs_pix2world([[200.0, 200.0]], 0))) assert (not np.any(np.isnan(w.wcs_pix2world([[1000.0, 1000.0]], 0))))
[ "def", "test_outside_sky", "(", ")", ":", "header", "=", "get_pkg_data_contents", "(", "u'data/outside_sky.hdr'", ",", "encoding", "=", "u'binary'", ")", "w", "=", "wcs", ".", "WCS", "(", "header", ")", "assert", "np", ".", "all", "(", "np", ".", "isnan", "(", "w", ".", "wcs_pix2world", "(", "[", "[", "100.0", ",", "500.0", "]", "]", ",", "0", ")", ")", ")", "assert", "np", ".", "all", "(", "np", ".", "isnan", "(", "w", ".", "wcs_pix2world", "(", "[", "[", "200.0", ",", "200.0", "]", "]", ",", "0", ")", ")", ")", "assert", "(", "not", "np", ".", "any", "(", "np", ".", "isnan", "(", "w", ".", "wcs_pix2world", "(", "[", "[", "1000.0", ",", "1000.0", "]", "]", ",", "0", ")", ")", ")", ")" ]
from github issue #107 .
train
false
44,693
def get_text_style(text): style = {} style['alpha'] = text.get_alpha() if (style['alpha'] is None): style['alpha'] = 1 style['fontsize'] = text.get_size() style['color'] = color_to_hex(text.get_color()) style['halign'] = text.get_horizontalalignment() style['valign'] = text.get_verticalalignment() style['malign'] = text._multialignment style['rotation'] = text.get_rotation() style['zorder'] = text.get_zorder() return style
[ "def", "get_text_style", "(", "text", ")", ":", "style", "=", "{", "}", "style", "[", "'alpha'", "]", "=", "text", ".", "get_alpha", "(", ")", "if", "(", "style", "[", "'alpha'", "]", "is", "None", ")", ":", "style", "[", "'alpha'", "]", "=", "1", "style", "[", "'fontsize'", "]", "=", "text", ".", "get_size", "(", ")", "style", "[", "'color'", "]", "=", "color_to_hex", "(", "text", ".", "get_color", "(", ")", ")", "style", "[", "'halign'", "]", "=", "text", ".", "get_horizontalalignment", "(", ")", "style", "[", "'valign'", "]", "=", "text", ".", "get_verticalalignment", "(", ")", "style", "[", "'malign'", "]", "=", "text", ".", "_multialignment", "style", "[", "'rotation'", "]", "=", "text", ".", "get_rotation", "(", ")", "style", "[", "'zorder'", "]", "=", "text", ".", "get_zorder", "(", ")", "return", "style" ]
return the text style dict for a text instance .
train
true
44,694
def get_xqueue_callback_url_prefix(request): prefix = '{proto}://{host}'.format(proto=request.META.get('HTTP_X_FORWARDED_PROTO', ('https' if request.is_secure() else 'http')), host=request.get_host()) return settings.XQUEUE_INTERFACE.get('callback_url', prefix)
[ "def", "get_xqueue_callback_url_prefix", "(", "request", ")", ":", "prefix", "=", "'{proto}://{host}'", ".", "format", "(", "proto", "=", "request", ".", "META", ".", "get", "(", "'HTTP_X_FORWARDED_PROTO'", ",", "(", "'https'", "if", "request", ".", "is_secure", "(", ")", "else", "'http'", ")", ")", ",", "host", "=", "request", ".", "get_host", "(", ")", ")", "return", "settings", ".", "XQUEUE_INTERFACE", ".", "get", "(", "'callback_url'", ",", "prefix", ")" ]
calculates default prefix based on request .
train
false
44,695
def reshape(incoming, new_shape, name='Reshape'): with tf.name_scope(name) as scope: inference = incoming if isinstance(inference, list): inference = tf.concat(inference, 0) inference = tf.cast(inference, tf.float32) inference = tf.reshape(inference, shape=new_shape) inference.scope = scope tf.add_to_collection(((tf.GraphKeys.LAYER_TENSOR + '/') + name), inference) return inference
[ "def", "reshape", "(", "incoming", ",", "new_shape", ",", "name", "=", "'Reshape'", ")", ":", "with", "tf", ".", "name_scope", "(", "name", ")", "as", "scope", ":", "inference", "=", "incoming", "if", "isinstance", "(", "inference", ",", "list", ")", ":", "inference", "=", "tf", ".", "concat", "(", "inference", ",", "0", ")", "inference", "=", "tf", ".", "cast", "(", "inference", ",", "tf", ".", "float32", ")", "inference", "=", "tf", ".", "reshape", "(", "inference", ",", "shape", "=", "new_shape", ")", "inference", ".", "scope", "=", "scope", "tf", ".", "add_to_collection", "(", "(", "(", "tf", ".", "GraphKeys", ".", "LAYER_TENSOR", "+", "'/'", ")", "+", "name", ")", ",", "inference", ")", "return", "inference" ]
returns an array with new shape and same elements .
train
false
44,696
def raw_cleanup(raw): return formatters(cleanup, strip)(raw.lower())
[ "def", "raw_cleanup", "(", "raw", ")", ":", "return", "formatters", "(", "cleanup", ",", "strip", ")", "(", "raw", ".", "lower", "(", ")", ")" ]
cleanup a raw value to perform raw comparison .
train
false
44,697
def raw_memcpy(builder, dst, src, count, itemsize, align=1): return _raw_memcpy(builder, 'llvm.memcpy', dst, src, count, itemsize, align)
[ "def", "raw_memcpy", "(", "builder", ",", "dst", ",", "src", ",", "count", ",", "itemsize", ",", "align", "=", "1", ")", ":", "return", "_raw_memcpy", "(", "builder", ",", "'llvm.memcpy'", ",", "dst", ",", "src", ",", "count", ",", "itemsize", ",", "align", ")" ]
emit a raw memcpy() call for count items of size itemsize from src to dest .
train
false
44,698
def check_overflow(arg, ctype, fatal=True): maxval = MAXVALS[ctype] minval = MINVALS[ctype] if (arg > maxval): if fatal: raise OverflowError(arg) else: return maxval elif (arg < minval): if fatal: raise OverflowError(arg) else: return minval else: return arg
[ "def", "check_overflow", "(", "arg", ",", "ctype", ",", "fatal", "=", "True", ")", ":", "maxval", "=", "MAXVALS", "[", "ctype", "]", "minval", "=", "MINVALS", "[", "ctype", "]", "if", "(", "arg", ">", "maxval", ")", ":", "if", "fatal", ":", "raise", "OverflowError", "(", "arg", ")", "else", ":", "return", "maxval", "elif", "(", "arg", "<", "minval", ")", ":", "if", "fatal", ":", "raise", "OverflowError", "(", "arg", ")", "else", ":", "return", "minval", "else", ":", "return", "arg" ]
check if the given argument is in bounds for the given type .
train
false
44,701
def RemoveMultiLineCommentsFromRange(lines, begin, end): for i in range(begin, end): lines[i] = '// dummy'
[ "def", "RemoveMultiLineCommentsFromRange", "(", "lines", ",", "begin", ",", "end", ")", ":", "for", "i", "in", "range", "(", "begin", ",", "end", ")", ":", "lines", "[", "i", "]", "=", "'// dummy'" ]
clears a range of lines for multi-line comments .
train
true
44,702
def dict_keys_startswith(dictionary, string): matched_keys = [key for key in dictionary.keys() if key.startswith(string)] return dict(((k, dictionary[k]) for k in matched_keys))
[ "def", "dict_keys_startswith", "(", "dictionary", ",", "string", ")", ":", "matched_keys", "=", "[", "key", "for", "key", "in", "dictionary", ".", "keys", "(", ")", "if", "key", ".", "startswith", "(", "string", ")", "]", "return", "dict", "(", "(", "(", "k", ",", "dictionary", "[", "k", "]", ")", "for", "k", "in", "matched_keys", ")", ")" ]
returns a dictionary containing the elements of <dict> whose keys start with <string> .
train
false
44,704
def get_current_date_as_string(): return datetime.datetime.utcnow().strftime(feconf.DASHBOARD_STATS_DATETIME_STRING_FORMAT)
[ "def", "get_current_date_as_string", "(", ")", ":", "return", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "feconf", ".", "DASHBOARD_STATS_DATETIME_STRING_FORMAT", ")" ]
gets the current date .
train
false
44,705
def unexpected_error_msg(error): return gcp_error(error)
[ "def", "unexpected_error_msg", "(", "error", ")", ":", "return", "gcp_error", "(", "error", ")" ]
create an error string based on passed in error .
train
false
44,706
def http_headers(s): d = {} for line in s.split('\r\n'): try: (key, value) = [i.strip() for i in line.split(':', 1)] d[key] = value except ValueError: pass return d
[ "def", "http_headers", "(", "s", ")", ":", "d", "=", "{", "}", "for", "line", "in", "s", ".", "split", "(", "'\\r\\n'", ")", ":", "try", ":", "(", "key", ",", "value", ")", "=", "[", "i", ".", "strip", "(", ")", "for", "i", "in", "line", ".", "split", "(", "':'", ",", "1", ")", "]", "d", "[", "key", "]", "=", "value", "except", "ValueError", ":", "pass", "return", "d" ]
create a dictionary of data from raw http headers .
train
false
44,707
def GetClientConfig(filename): config_lib.SetPlatformArchContext() config_lib.ParseConfigCommandLine() context = list(config_lib.CONFIG.context) context.append('Client Context') deployer = build.ClientRepacker() config_data = deployer.GetClientConfig(context, validate=True, deploy_timestamp=False) builder = build.ClientBuilder() with open(filename, 'w') as fd: fd.write(config_data) builder.WriteBuildYaml(fd, build_timestamp=False)
[ "def", "GetClientConfig", "(", "filename", ")", ":", "config_lib", ".", "SetPlatformArchContext", "(", ")", "config_lib", ".", "ParseConfigCommandLine", "(", ")", "context", "=", "list", "(", "config_lib", ".", "CONFIG", ".", "context", ")", "context", ".", "append", "(", "'Client Context'", ")", "deployer", "=", "build", ".", "ClientRepacker", "(", ")", "config_data", "=", "deployer", ".", "GetClientConfig", "(", "context", ",", "validate", "=", "True", ",", "deploy_timestamp", "=", "False", ")", "builder", "=", "build", ".", "ClientBuilder", "(", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fd", ":", "fd", ".", "write", "(", "config_data", ")", "builder", ".", "WriteBuildYaml", "(", "fd", ",", "build_timestamp", "=", "False", ")" ]
write client config to filename .
train
true
44,708
def rmmod(mod): return quietRun(['rmmod', mod])
[ "def", "rmmod", "(", "mod", ")", ":", "return", "quietRun", "(", "[", "'rmmod'", ",", "mod", "]", ")" ]
return output of lsmod .
train
false
44,709
def preBuild(site): optimize = site.config.get('optimize', []) if ('js' in optimize): site.external_manager.register_optimizer(ClosureJSOptimizer) if ('css' in optimize): site.external_manager.register_optimizer(YUICSSOptimizer)
[ "def", "preBuild", "(", "site", ")", ":", "optimize", "=", "site", ".", "config", ".", "get", "(", "'optimize'", ",", "[", "]", ")", "if", "(", "'js'", "in", "optimize", ")", ":", "site", ".", "external_manager", ".", "register_optimizer", "(", "ClosureJSOptimizer", ")", "if", "(", "'css'", "in", "optimize", ")", ":", "site", ".", "external_manager", ".", "register_optimizer", "(", "YUICSSOptimizer", ")" ]
registers optimizers as requested by the configuration .
train
false
44,710
def get_deleted_objects(objs, opts, user, admin_site, using): collector = NestedObjects(using=using) collector.collect(objs) perms_needed = set() def format_callback(obj): has_admin = (obj.__class__ in admin_site._registry) opts = obj._meta if has_admin: admin_url = reverse((u'%s:%s_%s_change' % (admin_site.name, opts.app_label, opts.model_name)), None, (quote(obj._get_pk_val()),)) p = (u'%s.%s' % (opts.app_label, get_permission_codename(u'delete', opts))) if (not user.has_perm(p)): perms_needed.add(opts.verbose_name) return format_html(u'{0}: <a href="{1}">{2}</a>', capfirst(opts.verbose_name), admin_url, obj) else: return (u'%s: %s' % (capfirst(opts.verbose_name), force_text(obj))) to_delete = collector.nested(format_callback) protected = [format_callback(obj) for obj in collector.protected] return (to_delete, perms_needed, protected)
[ "def", "get_deleted_objects", "(", "objs", ",", "opts", ",", "user", ",", "admin_site", ",", "using", ")", ":", "collector", "=", "NestedObjects", "(", "using", "=", "using", ")", "collector", ".", "collect", "(", "objs", ")", "perms_needed", "=", "set", "(", ")", "def", "format_callback", "(", "obj", ")", ":", "has_admin", "=", "(", "obj", ".", "__class__", "in", "admin_site", ".", "_registry", ")", "opts", "=", "obj", ".", "_meta", "if", "has_admin", ":", "admin_url", "=", "reverse", "(", "(", "u'%s:%s_%s_change'", "%", "(", "admin_site", ".", "name", ",", "opts", ".", "app_label", ",", "opts", ".", "model_name", ")", ")", ",", "None", ",", "(", "quote", "(", "obj", ".", "_get_pk_val", "(", ")", ")", ",", ")", ")", "p", "=", "(", "u'%s.%s'", "%", "(", "opts", ".", "app_label", ",", "get_permission_codename", "(", "u'delete'", ",", "opts", ")", ")", ")", "if", "(", "not", "user", ".", "has_perm", "(", "p", ")", ")", ":", "perms_needed", ".", "add", "(", "opts", ".", "verbose_name", ")", "return", "format_html", "(", "u'{0}: <a href=\"{1}\">{2}</a>'", ",", "capfirst", "(", "opts", ".", "verbose_name", ")", ",", "admin_url", ",", "obj", ")", "else", ":", "return", "(", "u'%s: %s'", "%", "(", "capfirst", "(", "opts", ".", "verbose_name", ")", ",", "force_text", "(", "obj", ")", ")", ")", "to_delete", "=", "collector", ".", "nested", "(", "format_callback", ")", "protected", "=", "[", "format_callback", "(", "obj", ")", "for", "obj", "in", "collector", ".", "protected", "]", "return", "(", "to_delete", ",", "perms_needed", ",", "protected", ")" ]
find all objects related to objs that should also be deleted .
train
false
44,711
def generate_synthetic_data(): rs = np.random.RandomState(0) n_pts = 36.0 (x, y) = np.ogrid[0:l, 0:l] mask_outer = ((((x - (l / 2)) ** 2) + ((y - (l / 2)) ** 2)) < ((l / 2) ** 2)) mask = np.zeros((l, l)) points = (l * rs.rand(2, n_pts)) mask[(points[0].astype(np.int), points[1].astype(np.int))] = 1 mask = ndimage.gaussian_filter(mask, sigma=(l / n_pts)) res = np.logical_and((mask > mask.mean()), mask_outer) return (res - ndimage.binary_erosion(res))
[ "def", "generate_synthetic_data", "(", ")", ":", "rs", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "n_pts", "=", "36.0", "(", "x", ",", "y", ")", "=", "np", ".", "ogrid", "[", "0", ":", "l", ",", "0", ":", "l", "]", "mask_outer", "=", "(", "(", "(", "(", "x", "-", "(", "l", "/", "2", ")", ")", "**", "2", ")", "+", "(", "(", "y", "-", "(", "l", "/", "2", ")", ")", "**", "2", ")", ")", "<", "(", "(", "l", "/", "2", ")", "**", "2", ")", ")", "mask", "=", "np", ".", "zeros", "(", "(", "l", ",", "l", ")", ")", "points", "=", "(", "l", "*", "rs", ".", "rand", "(", "2", ",", "n_pts", ")", ")", "mask", "[", "(", "points", "[", "0", "]", ".", "astype", "(", "np", ".", "int", ")", ",", "points", "[", "1", "]", ".", "astype", "(", "np", ".", "int", ")", ")", "]", "=", "1", "mask", "=", "ndimage", ".", "gaussian_filter", "(", "mask", ",", "sigma", "=", "(", "l", "/", "n_pts", ")", ")", "res", "=", "np", ".", "logical_and", "(", "(", "mask", ">", "mask", ".", "mean", "(", ")", ")", ",", "mask_outer", ")", "return", "(", "res", "-", "ndimage", ".", "binary_erosion", "(", "res", ")", ")" ]
synthetic binary data .
train
false
44,712
def rst_for_module(toc_path): f = open((toc_path + u'.rst'), u'w+') heading = u':py:mod:`~{}`'.format(toc_path.replace(u'/', u'.')) dotted = toc_path.replace(u'/', u'.') w(f, heading) w(f, (u'=' * len(heading))) w(f, u'.. automodule:: {}', dotted) w(f, u' :member-order: bysource') return f
[ "def", "rst_for_module", "(", "toc_path", ")", ":", "f", "=", "open", "(", "(", "toc_path", "+", "u'.rst'", ")", ",", "u'w+'", ")", "heading", "=", "u':py:mod:`~{}`'", ".", "format", "(", "toc_path", ".", "replace", "(", "u'/'", ",", "u'.'", ")", ")", "dotted", "=", "toc_path", ".", "replace", "(", "u'/'", ",", "u'.'", ")", "w", "(", "f", ",", "heading", ")", "w", "(", "f", ",", "(", "u'='", "*", "len", "(", "heading", ")", ")", ")", "w", "(", "f", ",", "u'.. automodule:: {}'", ",", "dotted", ")", "w", "(", "f", ",", "u' :member-order: bysource'", ")", "return", "f" ]
given a toc_path .
train
false
44,713
def double_urldecode(t): return urllib.unquote(urllib.unquote(t))
[ "def", "double_urldecode", "(", "t", ")", ":", "return", "urllib", ".", "unquote", "(", "urllib", ".", "unquote", "(", "t", ")", ")" ]
decoder doing double url encode .
train
false
44,714
def _validate_rpc_port(port): if (not port): raise NetworkControllerError(desc='Invalid rpc port number.') if isinstance(port, str): port = int(port) if (port <= 0): raise NetworkControllerError(desc=('Invalid rpc port number %s' % port)) return port
[ "def", "_validate_rpc_port", "(", "port", ")", ":", "if", "(", "not", "port", ")", ":", "raise", "NetworkControllerError", "(", "desc", "=", "'Invalid rpc port number.'", ")", "if", "isinstance", "(", "port", ",", "str", ")", ":", "port", "=", "int", "(", "port", ")", "if", "(", "port", "<=", "0", ")", ":", "raise", "NetworkControllerError", "(", "desc", "=", "(", "'Invalid rpc port number %s'", "%", "port", ")", ")", "return", "port" ]
validates give port for use as rpc server port .
train
true
44,715
def _root_diagbroyden_doc(): pass
[ "def", "_root_diagbroyden_doc", "(", ")", ":", "pass" ]
options nit : int .
train
false
44,716
def getVector3ByDictionary(dictionary, vector3): if ('x' in dictionary): vector3 = getVector3IfNone(vector3) vector3.x = euclidean.getFloatFromValue(dictionary['x']) if ('y' in dictionary): vector3 = getVector3IfNone(vector3) vector3.y = euclidean.getFloatFromValue(dictionary['y']) if ('z' in dictionary): vector3 = getVector3IfNone(vector3) vector3.z = euclidean.getFloatFromValue(dictionary['z']) return vector3
[ "def", "getVector3ByDictionary", "(", "dictionary", ",", "vector3", ")", ":", "if", "(", "'x'", "in", "dictionary", ")", ":", "vector3", "=", "getVector3IfNone", "(", "vector3", ")", "vector3", ".", "x", "=", "euclidean", ".", "getFloatFromValue", "(", "dictionary", "[", "'x'", "]", ")", "if", "(", "'y'", "in", "dictionary", ")", ":", "vector3", "=", "getVector3IfNone", "(", "vector3", ")", "vector3", ".", "y", "=", "euclidean", ".", "getFloatFromValue", "(", "dictionary", "[", "'y'", "]", ")", "if", "(", "'z'", "in", "dictionary", ")", ":", "vector3", "=", "getVector3IfNone", "(", "vector3", ")", "vector3", ".", "z", "=", "euclidean", ".", "getFloatFromValue", "(", "dictionary", "[", "'z'", "]", ")", "return", "vector3" ]
get vector3 by dictionary .
train
false
44,717
def ansi_color_style_names(): return ANSI_STYLES.keys()
[ "def", "ansi_color_style_names", "(", ")", ":", "return", "ANSI_STYLES", ".", "keys", "(", ")" ]
returns an iterable of all ansi color style names .
train
false
44,718
def on_success(result): return result
[ "def", "on_success", "(", "result", ")", ":", "return", "result" ]
called on the result of the function .
train
false
44,719
def load_sample_image(image_name): images = load_sample_images() index = None for (i, filename) in enumerate(images.filenames): if filename.endswith(image_name): index = i break if (index is None): raise AttributeError(('Cannot find sample image: %s' % image_name)) return images.images[index]
[ "def", "load_sample_image", "(", "image_name", ")", ":", "images", "=", "load_sample_images", "(", ")", "index", "=", "None", "for", "(", "i", ",", "filename", ")", "in", "enumerate", "(", "images", ".", "filenames", ")", ":", "if", "filename", ".", "endswith", "(", "image_name", ")", ":", "index", "=", "i", "break", "if", "(", "index", "is", "None", ")", ":", "raise", "AttributeError", "(", "(", "'Cannot find sample image: %s'", "%", "image_name", ")", ")", "return", "images", ".", "images", "[", "index", "]" ]
load the numpy array of a single sample image parameters image_name : {china .
train
false
44,720
def get_filtered_bears(languages, log_printer, arg_parser=None): (sections, _) = load_configuration(arg_list=None, log_printer=log_printer, arg_parser=arg_parser) (local_bears, global_bears) = collect_all_bears_from_sections(sections, log_printer) if languages: local_bears = filter_section_bears_by_languages(local_bears, languages) global_bears = filter_section_bears_by_languages(global_bears, languages) return (local_bears, global_bears)
[ "def", "get_filtered_bears", "(", "languages", ",", "log_printer", ",", "arg_parser", "=", "None", ")", ":", "(", "sections", ",", "_", ")", "=", "load_configuration", "(", "arg_list", "=", "None", ",", "log_printer", "=", "log_printer", ",", "arg_parser", "=", "arg_parser", ")", "(", "local_bears", ",", "global_bears", ")", "=", "collect_all_bears_from_sections", "(", "sections", ",", "log_printer", ")", "if", "languages", ":", "local_bears", "=", "filter_section_bears_by_languages", "(", "local_bears", ",", "languages", ")", "global_bears", "=", "filter_section_bears_by_languages", "(", "global_bears", ",", "languages", ")", "return", "(", "local_bears", ",", "global_bears", ")" ]
fetch bears and filter them based on given list of languages .
train
false
44,721
def chi2prob(x, df, direction='high'): if (x <= 0): return nan elif (direction == 'high'): return (1.0 - chi2.cdf(x, df)) elif (direction == 'low'): return chi2.cdf(x, df) else: raise ValueError('Unknown direction.')
[ "def", "chi2prob", "(", "x", ",", "df", ",", "direction", "=", "'high'", ")", ":", "if", "(", "x", "<=", "0", ")", ":", "return", "nan", "elif", "(", "direction", "==", "'high'", ")", ":", "return", "(", "1.0", "-", "chi2", ".", "cdf", "(", "x", ",", "df", ")", ")", "elif", "(", "direction", "==", "'low'", ")", ":", "return", "chi2", ".", "cdf", "(", "x", ",", "df", ")", "else", ":", "raise", "ValueError", "(", "'Unknown direction.'", ")" ]
return the chi-squared statistic .
train
false
44,722
def parse_privs(privs, db): if (privs is None): return privs o_privs = {'database': {}, 'table': {}} for token in privs.split('/'): if (':' not in token): type_ = 'database' name = db priv_set = frozenset((x.strip().upper() for x in token.split(',') if x.strip())) else: type_ = 'table' (name, privileges) = token.split(':', 1) priv_set = frozenset((x.strip().upper() for x in privileges.split(',') if x.strip())) if (not priv_set.issubset(VALID_PRIVS[type_])): raise InvalidPrivsError(('Invalid privs specified for %s: %s' % (type_, ' '.join(priv_set.difference(VALID_PRIVS[type_]))))) priv_set = normalize_privileges(priv_set, type_) o_privs[type_][name] = priv_set return o_privs
[ "def", "parse_privs", "(", "privs", ",", "db", ")", ":", "if", "(", "privs", "is", "None", ")", ":", "return", "privs", "o_privs", "=", "{", "'database'", ":", "{", "}", ",", "'table'", ":", "{", "}", "}", "for", "token", "in", "privs", ".", "split", "(", "'/'", ")", ":", "if", "(", "':'", "not", "in", "token", ")", ":", "type_", "=", "'database'", "name", "=", "db", "priv_set", "=", "frozenset", "(", "(", "x", ".", "strip", "(", ")", ".", "upper", "(", ")", "for", "x", "in", "token", ".", "split", "(", "','", ")", "if", "x", ".", "strip", "(", ")", ")", ")", "else", ":", "type_", "=", "'table'", "(", "name", ",", "privileges", ")", "=", "token", ".", "split", "(", "':'", ",", "1", ")", "priv_set", "=", "frozenset", "(", "(", "x", ".", "strip", "(", ")", ".", "upper", "(", ")", "for", "x", "in", "privileges", ".", "split", "(", "','", ")", "if", "x", ".", "strip", "(", ")", ")", ")", "if", "(", "not", "priv_set", ".", "issubset", "(", "VALID_PRIVS", "[", "type_", "]", ")", ")", ":", "raise", "InvalidPrivsError", "(", "(", "'Invalid privs specified for %s: %s'", "%", "(", "type_", ",", "' '", ".", "join", "(", "priv_set", ".", "difference", "(", "VALID_PRIVS", "[", "type_", "]", ")", ")", ")", ")", ")", "priv_set", "=", "normalize_privileges", "(", "priv_set", ",", "type_", ")", "o_privs", "[", "type_", "]", "[", "name", "]", "=", "priv_set", "return", "o_privs" ]
parse privilege string to determine permissions for database db .
train
false
44,723
def set_windows_appusermodelid(): try: return windll.shell32.SetCurrentProcessExplicitAppUserModelID('spyder.Spyder') except AttributeError: return 'SetCurrentProcessExplicitAppUserModelID not found'
[ "def", "set_windows_appusermodelid", "(", ")", ":", "try", ":", "return", "windll", ".", "shell32", ".", "SetCurrentProcessExplicitAppUserModelID", "(", "'spyder.Spyder'", ")", "except", "AttributeError", ":", "return", "'SetCurrentProcessExplicitAppUserModelID not found'" ]
make sure correct icon is used on windows 7 taskbar .
train
false
44,724
def uid(pid): (start_time, parameter) = (time.time(), 'uid') status_path = ('/proc/%s/status' % pid) uid_line = _get_line(status_path, 'Uid:', parameter) try: result = int(uid_line.split()[1]) _log_runtime(parameter, ('%s[Uid]' % status_path), start_time) return result except: exc = IOError(('unable to parse the %s Uid entry: %s' % (status_path, uid_line))) _log_failure(parameter, exc) raise exc
[ "def", "uid", "(", "pid", ")", ":", "(", "start_time", ",", "parameter", ")", "=", "(", "time", ".", "time", "(", ")", ",", "'uid'", ")", "status_path", "=", "(", "'/proc/%s/status'", "%", "pid", ")", "uid_line", "=", "_get_line", "(", "status_path", ",", "'Uid:'", ",", "parameter", ")", "try", ":", "result", "=", "int", "(", "uid_line", ".", "split", "(", ")", "[", "1", "]", ")", "_log_runtime", "(", "parameter", ",", "(", "'%s[Uid]'", "%", "status_path", ")", ",", "start_time", ")", "return", "result", "except", ":", "exc", "=", "IOError", "(", "(", "'unable to parse the %s Uid entry: %s'", "%", "(", "status_path", ",", "uid_line", ")", ")", ")", "_log_failure", "(", "parameter", ",", "exc", ")", "raise", "exc" ]
provides the user id the given process is running under .
train
false
44,725
def set_snmp(community, host=None, admin_username=None, admin_password=None): return __execute_cmd('config -g cfgOobSnmp -o cfgOobSnmpAgentCommunity {0}'.format(community), host=host, admin_username=admin_username, admin_password=admin_password)
[ "def", "set_snmp", "(", "community", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "return", "__execute_cmd", "(", "'config -g cfgOobSnmp -o cfgOobSnmpAgentCommunity {0}'", ".", "format", "(", "community", ")", ",", "host", "=", "host", ",", "admin_username", "=", "admin_username", ",", "admin_password", "=", "admin_password", ")" ]
configure snmp community string cli example: .
train
true
44,726
def read_bearer_token_file(filename): f = open(filename) bearer_token = f.readline().strip() f.close() return bearer_token
[ "def", "read_bearer_token_file", "(", "filename", ")", ":", "f", "=", "open", "(", "filename", ")", "bearer_token", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "f", ".", "close", "(", ")", "return", "bearer_token" ]
read a token file and return the oauth2 bearer token .
train
false