id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
29,718
@contextmanager def mutated_working_copy(files_to_mutate, to_append=u'\n '): assert to_append, u'to_append may not be empty' for f in files_to_mutate: with open(f, u'ab') as fh: fh.write(to_append) try: (yield) finally: seek_point = (len(to_append) * (-1)) for f in files_to_mutate: with open(f, u'ab') as fh: fh.seek(seek_point, os.SEEK_END) fh.truncate()
[ "@", "contextmanager", "def", "mutated_working_copy", "(", "files_to_mutate", ",", "to_append", "=", "u'\\n '", ")", ":", "assert", "to_append", ",", "u'to_append may not be empty'", "for", "f", "in", "files_to_mutate", ":", "with", "open", "(", "f", ",", "u'ab'",...
given a list of files .
train
false
29,720
def lu_solve(lu_and_piv, b, trans=0, overwrite_b=False, check_finite=True): (lu, piv) = lu_and_piv if check_finite: b1 = asarray_chkfinite(b) else: b1 = asarray(b) overwrite_b = (overwrite_b or _datacopied(b1, b)) if (lu.shape[0] != b1.shape[0]): raise ValueError('incompatible dimensions.') (getrs,) = get_lapack_funcs(('getrs',), (lu, b1)) (x, info) = getrs(lu, piv, b1, trans=trans, overwrite_b=overwrite_b) if (info == 0): return x raise ValueError(('illegal value in %d-th argument of internal gesv|posv' % (- info)))
[ "def", "lu_solve", "(", "lu_and_piv", ",", "b", ",", "trans", "=", "0", ",", "overwrite_b", "=", "False", ",", "check_finite", "=", "True", ")", ":", "(", "lu", ",", "piv", ")", "=", "lu_and_piv", "if", "check_finite", ":", "b1", "=", "asarray_chkfinit...
solve an equation system .
train
false
29,722
def tags_published(): from tagging.models import Tag from zinnia.models.entry import Entry tags_entry_published = Tag.objects.usage_for_queryset(Entry.published.all()) return Tag.objects.filter(name__in=[t.name for t in tags_entry_published])
[ "def", "tags_published", "(", ")", ":", "from", "tagging", ".", "models", "import", "Tag", "from", "zinnia", ".", "models", ".", "entry", "import", "Entry", "tags_entry_published", "=", "Tag", ".", "objects", ".", "usage_for_queryset", "(", "Entry", ".", "pu...
return the published tags .
train
true
29,723
def DescStat(endog): if (endog.ndim == 1): endog = endog.reshape(len(endog), 1) if (endog.shape[1] == 1): return DescStatUV(endog) if (endog.shape[1] > 1): return DescStatMV(endog)
[ "def", "DescStat", "(", "endog", ")", ":", "if", "(", "endog", ".", "ndim", "==", "1", ")", ":", "endog", "=", "endog", ".", "reshape", "(", "len", "(", "endog", ")", ",", "1", ")", "if", "(", "endog", ".", "shape", "[", "1", "]", "==", "1", ...
returns an instance to conduct inference on descriptive statistics via empirical likelihood .
train
false
29,724
def make_istatepersister_tests(fixture): class IStatePersisterTests(TestCase, ): '\n Tests for ``IStatePersister`` implementations.\n ' def test_interface(self): '\n The object implements ``IStatePersister``.\n ' (state_persister, _get_state) = fixture(self) verifyObject(IStatePersister, state_persister) @given(dataset_id=uuids(), blockdevice_id=text()) def test_records_blockdeviceid(self, dataset_id, blockdevice_id): '\n Calling ``record_ownership`` records the blockdevice\n mapping in the state.\n ' (state_persister, get_state) = fixture(self) d = state_persister.record_ownership(dataset_id=dataset_id, blockdevice_id=blockdevice_id) self.successResultOf(d) self.assertEqual(get_state().blockdevice_ownership[dataset_id], blockdevice_id) @given(dataset_id=uuids(), blockdevice_id=text(), other_blockdevice_id=text()) def test_duplicate_blockdevice_id(self, dataset_id, blockdevice_id, other_blockdevice_id): '\n Calling ``record_ownership`` raises\n ``DatasetAlreadyOwned`` if the dataset already has a\n associated blockdevice.\n ' assume((blockdevice_id != other_blockdevice_id)) (state_persister, get_state) = fixture(self) self.successResultOf(state_persister.record_ownership(dataset_id=dataset_id, blockdevice_id=blockdevice_id)) self.failureResultOf(state_persister.record_ownership(dataset_id=dataset_id, blockdevice_id=other_blockdevice_id), DatasetAlreadyOwned) self.assertEqual(get_state().blockdevice_ownership[dataset_id], blockdevice_id) return IStatePersisterTests
[ "def", "make_istatepersister_tests", "(", "fixture", ")", ":", "class", "IStatePersisterTests", "(", "TestCase", ",", ")", ":", "def", "test_interface", "(", "self", ")", ":", "(", "state_persister", ",", "_get_state", ")", "=", "fixture", "(", "self", ")", ...
create a testcase for istatepersister .
train
false
29,725
def get_varname(rpc_call): r = re.search('([ptdf]\\.|system\\.|get\\_|is\\_|set\\_)+([^=]*)', rpc_call, re.I) if r: return r.groups()[(-1)] else: return None
[ "def", "get_varname", "(", "rpc_call", ")", ":", "r", "=", "re", ".", "search", "(", "'([ptdf]\\\\.|system\\\\.|get\\\\_|is\\\\_|set\\\\_)+([^=]*)'", ",", "rpc_call", ",", "re", ".", "I", ")", "if", "r", ":", "return", "r", ".", "groups", "(", ")", "[", "(...
transform rpc method into variable name .
train
false
29,726
def vectorize1(func, args=(), vec_func=False): if vec_func: def vfunc(x): return func(x, *args) else: def vfunc(x): if np.isscalar(x): return func(x, *args) x = np.asarray(x) y0 = func(x[0], *args) n = len(x) dtype = getattr(y0, 'dtype', type(y0)) output = np.empty((n,), dtype=dtype) output[0] = y0 for i in xrange(1, n): output[i] = func(x[i], *args) return output return vfunc
[ "def", "vectorize1", "(", "func", ",", "args", "=", "(", ")", ",", "vec_func", "=", "False", ")", ":", "if", "vec_func", ":", "def", "vfunc", "(", "x", ")", ":", "return", "func", "(", "x", ",", "*", "args", ")", "else", ":", "def", "vfunc", "(...
vectorize the call to a function .
train
false
29,730
def update_subtask_status(entry_id, current_task_id, new_subtask_status, retry_count=0): try: _update_subtask_status(entry_id, current_task_id, new_subtask_status) except DatabaseError: retry_count += 1 if (retry_count < MAX_DATABASE_LOCK_RETRIES): TASK_LOG.info('Retrying to update status for subtask %s of instructor task %d with status %s: retry %d', current_task_id, entry_id, new_subtask_status, retry_count) dog_stats_api.increment('instructor_task.subtask.retry_after_failed_update') update_subtask_status(entry_id, current_task_id, new_subtask_status, retry_count) else: TASK_LOG.info('Failed to update status after %d retries for subtask %s of instructor task %d with status %s', retry_count, current_task_id, entry_id, new_subtask_status) dog_stats_api.increment('instructor_task.subtask.failed_after_update_retries') raise finally: _release_subtask_lock(current_task_id)
[ "def", "update_subtask_status", "(", "entry_id", ",", "current_task_id", ",", "new_subtask_status", ",", "retry_count", "=", "0", ")", ":", "try", ":", "_update_subtask_status", "(", "entry_id", ",", "current_task_id", ",", "new_subtask_status", ")", "except", "Data...
update the status of the subtask in the parent instructortask object tracking its progress .
train
false
29,732
def strip_regex_metachars(pattern): start = 0 till = len(pattern) if pattern.startswith('^'): start = 1 if pattern.endswith('$'): till -= 1 return pattern[start:till]
[ "def", "strip_regex_metachars", "(", "pattern", ")", ":", "start", "=", "0", "till", "=", "len", "(", "pattern", ")", "if", "pattern", ".", "startswith", "(", "'^'", ")", ":", "start", "=", "1", "if", "pattern", ".", "endswith", "(", "'$'", ")", ":",...
strip ^ and $ from pattern begining and end .
train
false
29,733
@pytest.mark.parametrize('y, expected', [(0, '[top]'), (100, '[bot]'), (75, '[75%]'), (25, '[25%]'), (5, '[ 5%]'), (None, '[???]')]) def test_percentage_text(percentage, y, expected): percentage.set_perc(x=None, y=y) assert (percentage.text() == expected)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'y, expected'", ",", "[", "(", "0", ",", "'[top]'", ")", ",", "(", "100", ",", "'[bot]'", ")", ",", "(", "75", ",", "'[75%]'", ")", ",", "(", "25", ",", "'[25%]'", ")", ",", "(", "5", ",", ...
test text displayed by the widget based on the y position of a page .
train
false
29,734
def convert_yielded(yielded): if isinstance(yielded, (list, dict)): return multi_future(yielded) elif is_future(yielded): return yielded else: raise BadYieldError(('yielded unknown object %r' % (yielded,)))
[ "def", "convert_yielded", "(", "yielded", ")", ":", "if", "isinstance", "(", "yielded", ",", "(", "list", ",", "dict", ")", ")", ":", "return", "multi_future", "(", "yielded", ")", "elif", "is_future", "(", "yielded", ")", ":", "return", "yielded", "else...
convert a yielded object into a .
train
false
29,736
@pytest.mark.cmd def test_pootle_init(capfd): call(['pootle', 'init', '--help']) (out, err) = capfd.readouterr() assert ('--db' in out)
[ "@", "pytest", ".", "mark", ".", "cmd", "def", "test_pootle_init", "(", "capfd", ")", ":", "call", "(", "[", "'pootle'", ",", "'init'", ",", "'--help'", "]", ")", "(", "out", ",", "err", ")", "=", "capfd", ".", "readouterr", "(", ")", "assert", "("...
pootle init --help .
train
false
29,737
def elemwise_kl(Y, Y_hat): assert hasattr(Y_hat, 'owner') owner = Y_hat.owner assert (owner is not None) op = owner.op if (not hasattr(op, 'scalar_op')): raise ValueError(((('Expected Y_hat to be generated by an Elemwise op, got ' + str(op)) + ' of type ') + str(type(op)))) assert isinstance(op.scalar_op, T.nnet.sigm.ScalarSigmoid) for Yv in get_debug_values(Y): if (not ((Yv.min() >= 0.0) and (Yv.max() <= 1.0))): raise ValueError(('Expected Y to be between 0 and 1. Either Y' + '< 0 or Y > 1 was found in the input.')) (z,) = owner.inputs term_1 = (Y * T.nnet.softplus((- z))) term_2 = ((1 - Y) * T.nnet.softplus(z)) total = (term_1 + term_2) return total
[ "def", "elemwise_kl", "(", "Y", ",", "Y_hat", ")", ":", "assert", "hasattr", "(", "Y_hat", ",", "'owner'", ")", "owner", "=", "Y_hat", ".", "owner", "assert", "(", "owner", "is", "not", "None", ")", "op", "=", "owner", ".", "op", "if", "(", "not", ...
warning: this function expects a sigmoid nonlinearity in the output layer .
train
false
29,738
def example_helper(): return 'This is some example text.'
[ "def", "example_helper", "(", ")", ":", "return", "'This is some example text.'" ]
an example template helper function .
train
false
29,739
@contextmanager def catch_warnings(): warnings.simplefilter('default', category=DeprecationWarning) filters = warnings.filters warnings.filters = filters[:] old_showwarning = warnings.showwarning log = [] def showwarning(message, category, filename, lineno, file=None, line=None): log.append(locals()) try: warnings.showwarning = showwarning (yield log) finally: warnings.filters = filters warnings.showwarning = old_showwarning
[ "@", "contextmanager", "def", "catch_warnings", "(", ")", ":", "warnings", ".", "simplefilter", "(", "'default'", ",", "category", "=", "DeprecationWarning", ")", "filters", "=", "warnings", ".", "filters", "warnings", ".", "filters", "=", "filters", "[", ":",...
catch warnings in a with block in a list .
train
false
29,740
@pytest.fixture def italian(): return _require_language('it', 'Italian')
[ "@", "pytest", ".", "fixture", "def", "italian", "(", ")", ":", "return", "_require_language", "(", "'it'", ",", "'Italian'", ")" ]
require the italian language .
train
false
29,741
def get_toolbar_plugin_struct(plugins, slot=None, page=None): template = None if page: template = page.template modules = get_placeholder_conf('plugin_modules', slot, template, default={}) names = get_placeholder_conf('plugin_labels', slot, template, default={}) main_list = [] for plugin in plugins: main_list.append({'value': plugin.value, 'name': names.get(plugin.value, plugin.name), 'module': modules.get(plugin.value, plugin.module)}) return sorted(main_list, key=operator.itemgetter('module'))
[ "def", "get_toolbar_plugin_struct", "(", "plugins", ",", "slot", "=", "None", ",", "page", "=", "None", ")", ":", "template", "=", "None", "if", "page", ":", "template", "=", "page", ".", "template", "modules", "=", "get_placeholder_conf", "(", "'plugin_modu...
return the list of plugins to render in the toolbar .
train
false
29,745
def send_invitation(invitation): if invitation.user: template = mail_builder.membership_notification email = template(invitation.user, {'membership': invitation}) else: template = mail_builder.membership_invitation email = template(invitation.email, {'membership': invitation}) email.send()
[ "def", "send_invitation", "(", "invitation", ")", ":", "if", "invitation", ".", "user", ":", "template", "=", "mail_builder", ".", "membership_notification", "email", "=", "template", "(", "invitation", ".", "user", ",", "{", "'membership'", ":", "invitation", ...
send an invitation email .
train
false
29,746
def format_keyvals(lst, key='key', val='text', indent=0): ret = [] if lst: maxk = min(max((len(i[0]) for i in lst if (i and i[0]))), KEY_MAX) for (i, kv) in enumerate(lst): if (kv is None): ret.append(urwid.Text('')) else: cols = [] if indent: cols.append(('fixed', indent, urwid.Text(''))) cols.extend([('fixed', maxk, urwid.Text([(key, (kv[0] or ''))])), (kv[1] if isinstance(kv[1], urwid.Widget) else urwid.Text([(val, kv[1])]))]) ret.append(urwid.Columns(cols, dividechars=2)) return ret
[ "def", "format_keyvals", "(", "lst", ",", "key", "=", "'key'", ",", "val", "=", "'text'", ",", "indent", "=", "0", ")", ":", "ret", "=", "[", "]", "if", "lst", ":", "maxk", "=", "min", "(", "max", "(", "(", "len", "(", "i", "[", "0", "]", "...
format a list of tuples .
train
false
29,748
def google_nest_count(style): nest_count = 0 if ('margin-left' in style): nest_count = (int(style['margin-left'][:(-2)]) / GOOGLE_LIST_INDENT) return nest_count
[ "def", "google_nest_count", "(", "style", ")", ":", "nest_count", "=", "0", "if", "(", "'margin-left'", "in", "style", ")", ":", "nest_count", "=", "(", "int", "(", "style", "[", "'margin-left'", "]", "[", ":", "(", "-", "2", ")", "]", ")", "/", "G...
calculate the nesting count of google doc lists .
train
false
29,749
def expr_md5(expr): exprstr = str(expr) if (not isinstance(exprstr, bytes)): exprstr = exprstr.encode('utf-8') return md5(exprstr).hexdigest()
[ "def", "expr_md5", "(", "expr", ")", ":", "exprstr", "=", "str", "(", "expr", ")", "if", "(", "not", "isinstance", "(", "exprstr", ",", "bytes", ")", ")", ":", "exprstr", "=", "exprstr", ".", "encode", "(", "'utf-8'", ")", "return", "md5", "(", "ex...
returns the md5 hash of the str of the expression .
train
false
29,750
def downscale_local_mean(image, factors, cval=0, clip=True): return block_reduce(image, factors, np.mean, cval)
[ "def", "downscale_local_mean", "(", "image", ",", "factors", ",", "cval", "=", "0", ",", "clip", "=", "True", ")", ":", "return", "block_reduce", "(", "image", ",", "factors", ",", "np", ".", "mean", ",", "cval", ")" ]
down-sample n-dimensional image by local averaging .
train
false
29,751
def _get_target_port(iscsi_string): if (iscsi_string and (':' in iscsi_string)): return iscsi_string.split(':')[1] return CONF.xenserver.target_port
[ "def", "_get_target_port", "(", "iscsi_string", ")", ":", "if", "(", "iscsi_string", "and", "(", "':'", "in", "iscsi_string", ")", ")", ":", "return", "iscsi_string", ".", "split", "(", "':'", ")", "[", "1", "]", "return", "CONF", ".", "xenserver", ".", ...
retrieve target port .
train
false
29,752
def release_local(local): local.__release_local__()
[ "def", "release_local", "(", "local", ")", ":", "local", ".", "__release_local__", "(", ")" ]
release the contents of the local for the current context .
train
false
29,753
def test_x_y_title(Chart): chart = Chart(title='I Am A Title', x_title='I am a x title', y_title='I am a y title') chart.add('1', [4, (-5), 123, 59, 38]) chart.add('2', [89, 0, 8, 0.12, 8]) q = chart.render_pyquery() assert (len(q('.titles .title')) == 3)
[ "def", "test_x_y_title", "(", "Chart", ")", ":", "chart", "=", "Chart", "(", "title", "=", "'I Am A Title'", ",", "x_title", "=", "'I am a x title'", ",", "y_title", "=", "'I am a y title'", ")", "chart", ".", "add", "(", "'1'", ",", "[", "4", ",", "(", ...
test x title and y title options .
train
false
29,756
def prepare_xml_rss(target_path, filename): files_path = os.path.join(target_path, 'http_torrent_files') os.mkdir(files_path) port = get_random_port() from Tribler.Test.common import TESTS_DATA_DIR with open(os.path.join(TESTS_DATA_DIR, filename), 'r') as source_xml: with open(os.path.join(target_path, filename), 'w') as destination_xml: for line in source_xml: destination_xml.write(line.replace('RANDOMPORT', str(port))) return (files_path, port)
[ "def", "prepare_xml_rss", "(", "target_path", ",", "filename", ")", ":", "files_path", "=", "os", ".", "path", ".", "join", "(", "target_path", ",", "'http_torrent_files'", ")", "os", ".", "mkdir", "(", "files_path", ")", "port", "=", "get_random_port", "(",...
function to prepare test_rss .
train
false
29,757
def remove_small_holes(ar, min_size=64, connectivity=1, in_place=False): _check_dtype_supported(ar) if (ar.dtype != bool): warn('Any labeled images will be returned as a boolean array. Did you mean to use a boolean array?', UserWarning) if in_place: out = ar else: out = ar.copy() if in_place: out = np.logical_not(out, out) else: out = np.logical_not(out) out = remove_small_objects(out, min_size, connectivity, in_place) if in_place: out = np.logical_not(out, out) else: out = np.logical_not(out) return out
[ "def", "remove_small_holes", "(", "ar", ",", "min_size", "=", "64", ",", "connectivity", "=", "1", ",", "in_place", "=", "False", ")", ":", "_check_dtype_supported", "(", "ar", ")", "if", "(", "ar", ".", "dtype", "!=", "bool", ")", ":", "warn", "(", ...
remove continguous holes smaller than the specified size .
train
false
29,758
def reapVarArgsCallback(option, optStr, value, parser): newValues = [] gotDot = False for arg in parser.rargs: if (arg.startswith('--') and (len(arg) > 2)): break if (arg.startswith('-') and (len(arg) > 1)): break if (arg == '.'): gotDot = True break newValues.append(arg) if (not newValues): raise optparse.OptionValueError(('Empty arg list for option %r expecting one or more args (remaining tokens: %r)' % (optStr, parser.rargs))) del parser.rargs[:(len(newValues) + int(gotDot))] value = getattr(parser.values, option.dest, []) if (value is None): value = [] value.extend(newValues) setattr(parser.values, option.dest, value)
[ "def", "reapVarArgsCallback", "(", "option", ",", "optStr", ",", "value", ",", "parser", ")", ":", "newValues", "=", "[", "]", "gotDot", "=", "False", "for", "arg", "in", "parser", ".", "rargs", ":", "if", "(", "arg", ".", "startswith", "(", "'--'", ...
used as optparse callback for reaping a variable number of option args .
train
true
29,760
def simple_matrix_print(matrix): return ('[%s]' % '\n '.join((('[%s]' % ' '.join((('% 1.4f' % v) for v in row))) for row in matrix)))
[ "def", "simple_matrix_print", "(", "matrix", ")", ":", "return", "(", "'[%s]'", "%", "'\\n '", ".", "join", "(", "(", "(", "'[%s]'", "%", "' '", ".", "join", "(", "(", "(", "'% 1.4f'", "%", "v", ")", "for", "v", "in", "row", ")", ")", ")", "for",...
simple string to display a floating point matrix this should give the same output on multiple systems .
train
false
29,761
def skip_unless_symlink(test): ok = can_symlink() msg = 'Requires functional symlink implementation' return (test if ok else unittest.skip(msg)(test))
[ "def", "skip_unless_symlink", "(", "test", ")", ":", "ok", "=", "can_symlink", "(", ")", "msg", "=", "'Requires functional symlink implementation'", "return", "(", "test", "if", "ok", "else", "unittest", ".", "skip", "(", "msg", ")", "(", "test", ")", ")" ]
skip decorator for tests that require functional symlink .
train
false
29,762
def _handleJAVAParameters(options): if ('inferenceType' not in options): prediction = options.get('prediction', {InferenceType.TemporalNextStep: {'optimize': True}}) inferenceType = None for (infType, value) in prediction.iteritems(): if value['optimize']: inferenceType = infType break if (inferenceType == 'temporal'): inferenceType = InferenceType.TemporalNextStep if (inferenceType != InferenceType.TemporalNextStep): raise _ExpGeneratorException(('Unsupported inference type %s' % inferenceType)) options['inferenceType'] = inferenceType if ('predictionField' in options): if ('inferenceArgs' not in options): options['inferenceArgs'] = {'predictedField': options['predictionField']} elif ('predictedField' not in options['inferenceArgs']): options['inferenceArgs']['predictedField'] = options['predictionField']
[ "def", "_handleJAVAParameters", "(", "options", ")", ":", "if", "(", "'inferenceType'", "not", "in", "options", ")", ":", "prediction", "=", "options", ".", "get", "(", "'prediction'", ",", "{", "InferenceType", ".", "TemporalNextStep", ":", "{", "'optimize'",...
handle legacy options .
train
true
29,763
def hg_revision_timestamp(rev): try: revset = ('last(sort(ancestors(%s), date))' % rev) res = subprocess.check_output(['hg', 'log', '-r', revset, '--template', '{date}'], stderr=subprocess.STDOUT) return float(res.strip()) except subprocess.CalledProcessError: return None
[ "def", "hg_revision_timestamp", "(", "rev", ")", ":", "try", ":", "revset", "=", "(", "'last(sort(ancestors(%s), date))'", "%", "rev", ")", "res", "=", "subprocess", ".", "check_output", "(", "[", "'hg'", ",", "'log'", ",", "'-r'", ",", "revset", ",", "'--...
returns the timestamp of rev .
train
false
29,764
def xyz2rgb(xyz): arr = _convert(rgb_from_xyz, xyz) mask = (arr > 0.0031308) arr[mask] = ((1.055 * np.power(arr[mask], (1 / 2.4))) - 0.055) arr[(~ mask)] *= 12.92 arr[(arr < 0)] = 0 arr[(arr > 1)] = 1 return arr
[ "def", "xyz2rgb", "(", "xyz", ")", ":", "arr", "=", "_convert", "(", "rgb_from_xyz", ",", "xyz", ")", "mask", "=", "(", "arr", ">", "0.0031308", ")", "arr", "[", "mask", "]", "=", "(", "(", "1.055", "*", "np", ".", "power", "(", "arr", "[", "ma...
xyz to rgb color space conversion .
train
false
29,765
def get_meta_refresh(response): if (response not in _metaref_cache): text = response.text[0:4096] _metaref_cache[response] = html.get_meta_refresh(text, response.url, response.encoding, ignore_tags=('script', 'noscript')) return _metaref_cache[response]
[ "def", "get_meta_refresh", "(", "response", ")", ":", "if", "(", "response", "not", "in", "_metaref_cache", ")", ":", "text", "=", "response", ".", "text", "[", "0", ":", "4096", "]", "_metaref_cache", "[", "response", "]", "=", "html", ".", "get_meta_re...
parse the http-equiv refrsh parameter from the given response .
train
false
29,766
def xattr_writes_supported(path): try: import xattr except ImportError: return False def set_xattr(path, key, value): xattr.setxattr(path, ('user.%s' % key), str(value)) fake_filepath = os.path.join(path, 'testing-checkme') result = True with open(fake_filepath, 'wb') as fake_file: fake_file.write('XXX') fake_file.flush() try: set_xattr(fake_filepath, 'hits', '1') except IOError as e: if (e.errno == errno.EOPNOTSUPP): result = False else: if os.path.exists(fake_filepath): os.unlink(fake_filepath) return result
[ "def", "xattr_writes_supported", "(", "path", ")", ":", "try", ":", "import", "xattr", "except", "ImportError", ":", "return", "False", "def", "set_xattr", "(", "path", ",", "key", ",", "value", ")", ":", "xattr", ".", "setxattr", "(", "path", ",", "(", ...
returns true if the we can write a file to the supplied path and subsequently write a xattr to that file .
train
false
29,767
def maybeDeferred(f, *args, **kw): try: result = f(*args, **kw) except: return fail(failure.Failure(captureVars=Deferred.debug)) if isinstance(result, Deferred): return result elif isinstance(result, failure.Failure): return fail(result) else: return succeed(result)
[ "def", "maybeDeferred", "(", "f", ",", "*", "args", ",", "**", "kw", ")", ":", "try", ":", "result", "=", "f", "(", "*", "args", ",", "**", "kw", ")", "except", ":", "return", "fail", "(", "failure", ".", "Failure", "(", "captureVars", "=", "Defe...
invoke a function that may or may not return a l{deferred} .
train
false
29,768
def addLineLoopsIntersections(loopLoopsIntersections, loops, pointBegin, pointEnd): normalizedSegment = (pointEnd - pointBegin) normalizedSegmentLength = abs(normalizedSegment) if (normalizedSegmentLength <= 0.0): return lineLoopsIntersections = [] normalizedSegment /= normalizedSegmentLength segmentYMirror = complex(normalizedSegment.real, (- normalizedSegment.imag)) pointBeginRotated = (segmentYMirror * pointBegin) pointEndRotated = (segmentYMirror * pointEnd) addLoopsXSegmentIntersections(lineLoopsIntersections, loops, pointBeginRotated.real, pointEndRotated.real, segmentYMirror, pointBeginRotated.imag) for lineLoopsIntersection in lineLoopsIntersections: point = (complex(lineLoopsIntersection, pointBeginRotated.imag) * normalizedSegment) loopLoopsIntersections.append(point)
[ "def", "addLineLoopsIntersections", "(", "loopLoopsIntersections", ",", "loops", ",", "pointBegin", ",", "pointEnd", ")", ":", "normalizedSegment", "=", "(", "pointEnd", "-", "pointBegin", ")", "normalizedSegmentLength", "=", "abs", "(", "normalizedSegment", ")", "i...
add intersections of the line with the loops .
train
false
29,769
def qnwsimp(n, a, b): return _make_multidim_func(_qnwsimp1, n, a, b)
[ "def", "qnwsimp", "(", "n", ",", "a", ",", "b", ")", ":", "return", "_make_multidim_func", "(", "_qnwsimp1", ",", "n", ",", "a", ",", "b", ")" ]
computes multivariate simpson quadrature nodes and weights .
train
false
29,770
def _branch_status(branch): staged = diff_filenames(branch) return {u'staged': staged, u'upstream_changed': staged}
[ "def", "_branch_status", "(", "branch", ")", ":", "staged", "=", "diff_filenames", "(", "branch", ")", "return", "{", "u'staged'", ":", "staged", ",", "u'upstream_changed'", ":", "staged", "}" ]
returns a tuple of staged .
train
false
29,772
@treeio_login_required def user_view(request, user_id, response_format='html'): user = get_object_or_404(User, pk=user_id) contact_id = Contact.objects.filter(related_user=user)[0].id return contact_view(request, contact_id, attribute='', response_format=response_format)
[ "@", "treeio_login_required", "def", "user_view", "(", "request", ",", "user_id", ",", "response_format", "=", "'html'", ")", ":", "user", "=", "get_object_or_404", "(", "User", ",", "pk", "=", "user_id", ")", "contact_id", "=", "Contact", ".", "objects", "....
user view .
train
false
29,775
def _set_file(path): cmd = 'debconf-set-selections {0}'.format(path) __salt__['cmd.run_stdout'](cmd, python_shell=False)
[ "def", "_set_file", "(", "path", ")", ":", "cmd", "=", "'debconf-set-selections {0}'", ".", "format", "(", "path", ")", "__salt__", "[", "'cmd.run_stdout'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")" ]
execute the set selections command for debconf .
train
false
29,776
def get_pkg_data_filenames(datadir, package=None, pattern=u'*'): path = _find_pkg_data_path(datadir, package=package) if os.path.isfile(path): raise IOError(u"Tried to access a data directory that's actually a package data file") elif os.path.isdir(path): for filename in os.listdir(path): if fnmatch.fnmatch(filename, pattern): (yield os.path.join(path, filename)) else: raise IOError(u'Path not found')
[ "def", "get_pkg_data_filenames", "(", "datadir", ",", "package", "=", "None", ",", "pattern", "=", "u'*'", ")", ":", "path", "=", "_find_pkg_data_path", "(", "datadir", ",", "package", "=", "package", ")", "if", "os", ".", "path", ".", "isfile", "(", "pa...
returns the path of all of the data files in a given directory that match a given glob pattern .
train
false
29,778
def get_scipy_status(): scipy_status = {} try: import scipy scipy_version = scipy.__version__ scipy_status['up_to_date'] = (parse_version(scipy_version) >= parse_version(SCIPY_MIN_VERSION)) scipy_status['version'] = scipy_version except ImportError: traceback.print_exc() scipy_status['up_to_date'] = False scipy_status['version'] = '' return scipy_status
[ "def", "get_scipy_status", "(", ")", ":", "scipy_status", "=", "{", "}", "try", ":", "import", "scipy", "scipy_version", "=", "scipy", ".", "__version__", "scipy_status", "[", "'up_to_date'", "]", "=", "(", "parse_version", "(", "scipy_version", ")", ">=", "...
returns a dictionary containing a boolean specifying whether scipy is up-to-date .
train
false
29,779
def CDLPIERCING(barDs, count): return call_talib_with_ohlc(barDs, count, talib.CDLPIERCING)
[ "def", "CDLPIERCING", "(", "barDs", ",", "count", ")", ":", "return", "call_talib_with_ohlc", "(", "barDs", ",", "count", ",", "talib", ".", "CDLPIERCING", ")" ]
piercing pattern .
train
false
29,780
def getVector3Index(index=0, x=0.0, y=0.0, z=0.0): return Vector3Index(index, x, y, z)
[ "def", "getVector3Index", "(", "index", "=", "0", ",", "x", "=", "0.0", ",", "y", "=", "0.0", ",", "z", "=", "0.0", ")", ":", "return", "Vector3Index", "(", "index", ",", "x", ",", "y", ",", "z", ")" ]
get the vector3 .
train
false
29,781
def base36_to_int(s): if (len(s) > 13): raise ValueError(u'Base36 input too large') value = int(s, 36) if ((not six.PY3) and (value > sys.maxint)): raise ValueError(u'Base36 input too large') return value
[ "def", "base36_to_int", "(", "s", ")", ":", "if", "(", "len", "(", "s", ")", ">", "13", ")", ":", "raise", "ValueError", "(", "u'Base36 input too large'", ")", "value", "=", "int", "(", "s", ",", "36", ")", "if", "(", "(", "not", "six", ".", "PY3...
converts a base 36 string to an int .
train
false
29,782
def title_to_ids(title): all_info = title.split(' ') id_info = all_info[0] rest = all_info[1:] descr = ' '.join(rest) id_info_items = id_info.split('|') if (len(id_info_items) >= 4): assert (id_info_items[2] in ['gb', 'emb', 'dbj', 'pdb']), title id = id_info_items[3] name = id_info_items[4] else: id = id_info_items[0] name = id_info_items[0] return (id, name, descr)
[ "def", "title_to_ids", "(", "title", ")", ":", "all_info", "=", "title", ".", "split", "(", "' '", ")", "id_info", "=", "all_info", "[", "0", "]", "rest", "=", "all_info", "[", "1", ":", "]", "descr", "=", "' '", ".", "join", "(", "rest", ")", "i...
function to convert a title into the id .
train
false
29,784
def NOTERM(v): if asbool(v): global term_mode term_mode = False
[ "def", "NOTERM", "(", "v", ")", ":", "if", "asbool", "(", "v", ")", ":", "global", "term_mode", "term_mode", "=", "False" ]
disables pretty terminal settings and animations .
train
false
29,785
@asyncio.coroutine def mock_async_subprocess(): async_popen = MagicMock() @asyncio.coroutine def communicate(input=None): 'Communicate mock.' fixture = bytes(load_fixture('alpr_stdout.txt'), 'utf-8') return (fixture, None) async_popen.communicate = communicate return async_popen
[ "@", "asyncio", ".", "coroutine", "def", "mock_async_subprocess", "(", ")", ":", "async_popen", "=", "MagicMock", "(", ")", "@", "asyncio", ".", "coroutine", "def", "communicate", "(", "input", "=", "None", ")", ":", "fixture", "=", "bytes", "(", "load_fix...
get a popen mock back .
train
false
29,786
def fp_field_name(name): return name.replace(u':', u'_').lower()
[ "def", "fp_field_name", "(", "name", ")", ":", "return", "name", ".", "replace", "(", "u':'", ",", "u'_'", ")", ".", "lower", "(", ")" ]
translates literal field name to the sanitized one feedparser will use .
train
false
29,787
def create_media_urls(nav, path_list): final_urls = [] for path in path_list: parsed = urlparse(path) if parsed.netloc: final_urls.append(path) continue url = path_to_url(path) relative_url = (u'%s/%s' % (nav.url_context.make_relative(u'/'), url)) final_urls.append(relative_url) return final_urls
[ "def", "create_media_urls", "(", "nav", ",", "path_list", ")", ":", "final_urls", "=", "[", "]", "for", "path", "in", "path_list", ":", "parsed", "=", "urlparse", "(", "path", ")", "if", "parsed", ".", "netloc", ":", "final_urls", ".", "append", "(", "...
return a list of urls that have been processed correctly for inclusion in a page .
train
false
29,788
def path_info_split(path_info): if (not path_info): return (None, '') assert path_info.startswith('/'), ('PATH_INFO should start with /: %r' % path_info) path_info = path_info.lstrip('/') if ('/' in path_info): (first, rest) = path_info.split('/', 1) return (first, ('/' + rest)) else: return (path_info, '')
[ "def", "path_info_split", "(", "path_info", ")", ":", "if", "(", "not", "path_info", ")", ":", "return", "(", "None", ",", "''", ")", "assert", "path_info", ".", "startswith", "(", "'/'", ")", ",", "(", "'PATH_INFO should start with /: %r'", "%", "path_info"...
splits off the first segment of the path .
train
false
29,790
def dmp_ground_monic(f, u, K): if (not u): return dup_monic(f, K) if dmp_zero_p(f, u): return f lc = dmp_ground_LC(f, u, K) if K.is_one(lc): return f else: return dmp_exquo_ground(f, lc, u, K)
[ "def", "dmp_ground_monic", "(", "f", ",", "u", ",", "K", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_monic", "(", "f", ",", "K", ")", "if", "dmp_zero_p", "(", "f", ",", "u", ")", ":", "return", "f", "lc", "=", "dmp_ground_LC", "("...
divide all coefficients by lc(f) in k[x] .
train
false
29,794
def notify_new_string(translation): mails = [] subscriptions = Profile.objects.subscribed_new_string(translation.subproject.project, translation.language) for subscription in subscriptions: mails.append(subscription.notify_new_string(translation)) send_mails(mails)
[ "def", "notify_new_string", "(", "translation", ")", ":", "mails", "=", "[", "]", "subscriptions", "=", "Profile", ".", "objects", ".", "subscribed_new_string", "(", "translation", ".", "subproject", ".", "project", ",", "translation", ".", "language", ")", "f...
notification on new string to translate .
train
false
29,796
def _get_format_control(values, option): return getattr(values, option.dest)
[ "def", "_get_format_control", "(", "values", ",", "option", ")", ":", "return", "getattr", "(", "values", ",", "option", ".", "dest", ")" ]
get a format_control object .
train
false
29,797
def is_ssl_error(exc): if isinstance(exc, SSLError): return True else: args = getattr(exc, 'args', []) if (args and isinstance(args[0], SSLError)): return True else: return False
[ "def", "is_ssl_error", "(", "exc", ")", ":", "if", "isinstance", "(", "exc", ",", "SSLError", ")", ":", "return", "True", "else", ":", "args", "=", "getattr", "(", "exc", ",", "'args'", ",", "[", "]", ")", "if", "(", "args", "and", "isinstance", "(...
checks if the given error is an ssl error .
train
false
29,798
def _get_unpacking_extra_info(node, infered): more = '' infered_module = infered.root().name if (node.root().name == infered_module): if (node.lineno == infered.lineno): more = (' %s' % infered.as_string()) elif infered.lineno: more = (' defined at line %s' % infered.lineno) elif infered.lineno: more = (' defined at line %s of %s' % (infered.lineno, infered_module)) return more
[ "def", "_get_unpacking_extra_info", "(", "node", ",", "infered", ")", ":", "more", "=", "''", "infered_module", "=", "infered", ".", "root", "(", ")", ".", "name", "if", "(", "node", ".", "root", "(", ")", ".", "name", "==", "infered_module", ")", ":",...
return extra information to add to the message for unpacking-non-sequence and unbalanced-tuple-unpacking errors .
train
true
29,799
def attach_image(img_dict, filename): img_path = img_dict['path'] if (not img_path.startswith('/')): img_path = file_path_finder(img_path) if img_path: with open(img_path, 'rb') as img: msg_image = MIMEImage(img.read(), name=os.path.basename(img_path)) msg_image.add_header('Content-ID', '<{}>'.format(img_dict['cid'])) msg_image.add_header('Content-Disposition', 'inline', filename=filename) return msg_image
[ "def", "attach_image", "(", "img_dict", ",", "filename", ")", ":", "img_path", "=", "img_dict", "[", "'path'", "]", "if", "(", "not", "img_path", ".", "startswith", "(", "'/'", ")", ")", ":", "img_path", "=", "file_path_finder", "(", "img_path", ")", "if...
attach images in the email headers .
train
false
29,801
def get_identifier_method(key): if hasattr(key, u'get_custom_haystack_id'): return key.get_custom_haystack_id() else: key_bytes = key.encode(u'utf-8') return hashlib.md5(key_bytes).hexdigest()
[ "def", "get_identifier_method", "(", "key", ")", ":", "if", "hasattr", "(", "key", ",", "u'get_custom_haystack_id'", ")", ":", "return", "key", ".", "get_custom_haystack_id", "(", ")", "else", ":", "key_bytes", "=", "key", ".", "encode", "(", "u'utf-8'", ")"...
custom get_identifier method used for testing the setting haystack_identifier_module .
train
false
29,802
def upgrade_to_float_no_complex(*types): for type in types: if (type in complex_types): raise TypeError('complex argument not supported') return upgrade_to_float(*types)
[ "def", "upgrade_to_float_no_complex", "(", "*", "types", ")", ":", "for", "type", "in", "types", ":", "if", "(", "type", "in", "complex_types", ")", ":", "raise", "TypeError", "(", "'complex argument not supported'", ")", "return", "upgrade_to_float", "(", "*", ...
dont accept complex .
train
false
29,803
@lower_builtin('getitem', types.Buffer, types.BaseTuple) def getitem_array_tuple(context, builder, sig, args): (aryty, tupty) = sig.args (ary, tup) = args ary = make_array(aryty)(context, builder, ary) index_types = tupty.types indices = cgutils.unpack_tuple(builder, tup, count=len(tupty)) (index_types, indices) = normalize_indices(context, builder, index_types, indices) if any((isinstance(ty, types.Array) for ty in index_types)): return fancy_getitem(context, builder, sig, args, aryty, ary, index_types, indices) res = _getitem_array_generic(context, builder, sig.return_type, aryty, ary, index_types, indices) return impl_ret_borrowed(context, builder, sig.return_type, res)
[ "@", "lower_builtin", "(", "'getitem'", ",", "types", ".", "Buffer", ",", "types", ".", "BaseTuple", ")", "def", "getitem_array_tuple", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "(", "aryty", ",", "tupty", ")", "=", "sig", ".",...
basic or advanced indexing with a tuple .
train
false
29,804
def update_module_attributes(object_names, module_name): module = sys.modules[module_name] for object_name in object_names: getattr(module, object_name).__module__ = module_name
[ "def", "update_module_attributes", "(", "object_names", ",", "module_name", ")", ":", "module", "=", "sys", ".", "modules", "[", "module_name", "]", "for", "object_name", "in", "object_names", ":", "getattr", "(", "module", ",", "object_name", ")", ".", "__mod...
update __module__ attribute of objects in module .
train
false
29,805
def clean_date_for_sort(dt, format): if (not format): format = 'yyMd' if (not isinstance(dt, datetime)): dt = datetime.combine(dt, time()) if hasattr(dt, 'tzinfo'): if (dt.tzinfo is None): dt = dt.replace(tzinfo=_local_tz) dt = as_local_time(dt) if (format == 'iso'): format = 'yyMdhms' tt = {'year': UNDEFINED_DATE.year, 'mon': UNDEFINED_DATE.month, 'day': UNDEFINED_DATE.day, 'hour': UNDEFINED_DATE.hour, 'min': UNDEFINED_DATE.minute, 'sec': UNDEFINED_DATE.second} repl_func = partial(cd_repl_func, tt, dt) re.sub('(s{1,2})|(m{1,2})|(h{1,2})|(d{1,4}|M{1,4}|(?:yyyy|yy))', repl_func, format) return dt.replace(year=tt['year'], month=tt['mon'], day=tt['day'], hour=tt['hour'], minute=tt['min'], second=tt['sec'], microsecond=0)
[ "def", "clean_date_for_sort", "(", "dt", ",", "format", ")", ":", "if", "(", "not", "format", ")", ":", "format", "=", "'yyMd'", "if", "(", "not", "isinstance", "(", "dt", ",", "datetime", ")", ")", ":", "dt", "=", "datetime", ".", "combine", "(", ...
return dt with fields not in shown in format set to a default .
train
false
29,807
def prepare_config(config): p = config.copy() p.update({k: json.dumps(v, ensure_ascii=False) for (k, v) in p.items() if (k not in (u'POSTS', u'PAGES', u'COMPILERS', u'TRANSLATIONS', u'NAVIGATION_LINKS', u'_SUPPORTED_LANGUAGES', u'_SUPPORTED_COMMENT_SYSTEMS', u'INDEX_READ_MORE_LINK', u'FEED_READ_MORE_LINK'))}) p[u'INDEX_READ_MORE_LINK'] = ((u"'" + p[u'INDEX_READ_MORE_LINK'].replace(u"'", u"\\'")) + u"'") p[u'FEED_READ_MORE_LINK'] = ((u"'" + p[u'FEED_READ_MORE_LINK'].replace(u"'", u"\\'")) + u"'") p.update({k: str(v) for (k, v) in config.items() if (isinstance(v, bool) or (v is None))}) return p
[ "def", "prepare_config", "(", "config", ")", ":", "p", "=", "config", ".", "copy", "(", ")", "p", ".", "update", "(", "{", "k", ":", "json", ".", "dumps", "(", "v", ",", "ensure_ascii", "=", "False", ")", "for", "(", "k", ",", "v", ")", "in", ...
parse sample config with json .
train
false
29,809
def foveate_channel(img, rings, output, start_idx): ring_w = numpy.sum(rings) inner_img = img[:, ring_w:(img.shape[1] - ring_w), ring_w:(img.shape[2] - ring_w)] inner_img = inner_img.reshape(len(output), (-1)) end_idx = (start_idx + inner_img.shape[1]) output[:, start_idx:end_idx] = inner_img idx = 0 start_idx = end_idx for rd in rings: start_idx = downsample_ring(img, idx, rd, output, start_idx) idx += rd return start_idx
[ "def", "foveate_channel", "(", "img", ",", "rings", ",", "output", ",", "start_idx", ")", ":", "ring_w", "=", "numpy", ".", "sum", "(", "rings", ")", "inner_img", "=", "img", "[", ":", ",", "ring_w", ":", "(", "img", ".", "shape", "[", "1", "]", ...
for a given channel .
train
false
29,810
def foldmarker(): return vim.eval('&foldmarker').split(',')
[ "def", "foldmarker", "(", ")", ":", "return", "vim", ".", "eval", "(", "'&foldmarker'", ")", ".", "split", "(", "','", ")" ]
return a tuple of .
train
false
29,811
def cmd_exists(cmds_list, cmd): return (cmd in cmds_list)
[ "def", "cmd_exists", "(", "cmds_list", ",", "cmd", ")", ":", "return", "(", "cmd", "in", "cmds_list", ")" ]
check if given command is in list of available commands .
train
false
29,812
def diverging_palette(h_neg, h_pos, s=75, l=50, sep=10, n=6, center='light', as_cmap=False): palfunc = (dark_palette if (center == 'dark') else light_palette) neg = palfunc((h_neg, s, l), (128 - (sep / 2)), reverse=True, input='husl') pos = palfunc((h_pos, s, l), (128 - (sep / 2)), input='husl') midpoint = dict(light=[(0.95, 0.95, 0.95, 1.0)], dark=[(0.133, 0.133, 0.133, 1.0)])[center] mid = (midpoint * sep) pal = blend_palette(np.concatenate([neg, mid, pos]), n, as_cmap=as_cmap) return pal
[ "def", "diverging_palette", "(", "h_neg", ",", "h_pos", ",", "s", "=", "75", ",", "l", "=", "50", ",", "sep", "=", "10", ",", "n", "=", "6", ",", "center", "=", "'light'", ",", "as_cmap", "=", "False", ")", ":", "palfunc", "=", "(", "dark_palette...
make a diverging palette between two husl colors .
train
false
29,813
def _extract_inventories(body, schema): data = util.extract_json(body, schema) inventories = {} for (res_class, raw_inventory) in data['inventories'].items(): inventory_data = copy.copy(INVENTORY_DEFAULTS) inventory_data.update(raw_inventory) inventories[res_class] = inventory_data data['inventories'] = inventories return data
[ "def", "_extract_inventories", "(", "body", ",", "schema", ")", ":", "data", "=", "util", ".", "extract_json", "(", "body", ",", "schema", ")", "inventories", "=", "{", "}", "for", "(", "res_class", ",", "raw_inventory", ")", "in", "data", "[", "'invento...
extract and validate multiple inventories from json body .
train
false
29,815
@dispatch(Selection) def simple_selections(expr): return SimpleSelection(simple_selections(expr._child), simple_selections(expr.predicate))
[ "@", "dispatch", "(", "Selection", ")", "def", "simple_selections", "(", "expr", ")", ":", "return", "SimpleSelection", "(", "simple_selections", "(", "expr", ".", "_child", ")", ",", "simple_selections", "(", "expr", ".", "predicate", ")", ")" ]
cast all selection nodes into simpleselection nodes .
train
false
29,818
def explore_module(package): module = importlib.import_module(package) if (not hasattr(module, '__path__')): return [] for (_, name, _) in pkgutil.iter_modules(module.__path__, (package + '.')): (yield name)
[ "def", "explore_module", "(", "package", ")", ":", "module", "=", "importlib", ".", "import_module", "(", "package", ")", "if", "(", "not", "hasattr", "(", "module", ",", "'__path__'", ")", ")", ":", "return", "[", "]", "for", "(", "_", ",", "name", ...
explore the modules .
train
false
29,820
def message_if_missing(filename): return (_CLIENT_SECRETS_MESSAGE % filename)
[ "def", "message_if_missing", "(", "filename", ")", ":", "return", "(", "_CLIENT_SECRETS_MESSAGE", "%", "filename", ")" ]
helpful message to display if the client_secrets file is missing .
train
false
29,824
def _read_coil_def(fid): coildef = {'position': read_double_matrix(fid, 1, 3), 'orientation': read_double_matrix(fid, 1, 3), 'radius': read_double(fid), 'wire_radius': read_double(fid), 'turns': read_int16(fid)} fid.seek(fid, 2, 1) coildef['checksum'] = read_int32(fid) coildef['reserved'] = read_str(fid, 32)
[ "def", "_read_coil_def", "(", "fid", ")", ":", "coildef", "=", "{", "'position'", ":", "read_double_matrix", "(", "fid", ",", "1", ",", "3", ")", ",", "'orientation'", ":", "read_double_matrix", "(", "fid", ",", "1", ",", "3", ")", ",", "'radius'", ":"...
read coil definition .
train
false
29,825
def _computeOverlap(x, y): return ((x + y) == 2).sum()
[ "def", "_computeOverlap", "(", "x", ",", "y", ")", ":", "return", "(", "(", "x", "+", "y", ")", "==", "2", ")", ".", "sum", "(", ")" ]
given two binary arrays .
train
false
29,826
def test_cnn_fit_sample_with_indices(): cnn = CondensedNearestNeighbour(return_indices=True, random_state=RND_SEED) (X_resampled, y_resampled, idx_under) = cnn.fit_sample(X, Y) X_gt = np.array([[(-0.10903849), (-0.12085181)], [0.01936241, 0.17799828], [0.05230552, 0.09043907], [(-1.25020462), (-0.40402054)], [0.70524765, 0.39816382], [0.35831463, 1.33483198], [(-0.284881), (-0.62730973)], [0.03394306, 0.03986753], [(-0.01252787), 0.34102657], [0.15198585, 0.12512646]]) y_gt = np.array([0, 0, 1, 1, 1, 2, 2, 2, 2, 2]) idx_gt = np.array([4, 11, 17, 12, 19, 9, 5, 7, 14, 18]) assert_array_equal(X_resampled, X_gt) assert_array_equal(y_resampled, y_gt) assert_array_equal(idx_under, idx_gt)
[ "def", "test_cnn_fit_sample_with_indices", "(", ")", ":", "cnn", "=", "CondensedNearestNeighbour", "(", "return_indices", "=", "True", ",", "random_state", "=", "RND_SEED", ")", "(", "X_resampled", ",", "y_resampled", ",", "idx_under", ")", "=", "cnn", ".", "fit...
test the fit sample routine with indices support .
train
false
29,827
def find_path(kwargs, var, root): defaults = PATH_DEFAULTS[var] if (kwargs.get(var, None) is not None): path = kwargs.get(var) else: for default in defaults: if os.path.exists(resolve_path(default, root)): path = default break else: path = defaults[(-1)] return resolve_path(path, root)
[ "def", "find_path", "(", "kwargs", ",", "var", ",", "root", ")", ":", "defaults", "=", "PATH_DEFAULTS", "[", "var", "]", "if", "(", "kwargs", ".", "get", "(", "var", ",", "None", ")", "is", "not", "None", ")", ":", "path", "=", "kwargs", ".", "ge...
find a configuration path that may exist at different defaults .
train
false
29,828
def must_use_qt(headless=True): global gui_thread ensure_app(headless=headless) if (gui_thread is None): gui_thread = QThread.currentThread() if (gui_thread is not QThread.currentThread()): raise RuntimeError('Cannot use Qt in non GUI thread')
[ "def", "must_use_qt", "(", "headless", "=", "True", ")", ":", "global", "gui_thread", "ensure_app", "(", "headless", "=", "headless", ")", "if", "(", "gui_thread", "is", "None", ")", ":", "gui_thread", "=", "QThread", ".", "currentThread", "(", ")", "if", ...
this function should be called if you want to use qt for some non-gui task like rendering html/svg or using a headless browser .
train
false
29,829
def describe_replication_groups(name=None, conn=None, region=None, key=None, keyid=None, profile=None): return _describe_resource(name=name, name_param='ReplicationGroupId', res_type='replication_group', info_node='ReplicationGroups', conn=conn, region=region, key=key, keyid=keyid, profile=profile)
[ "def", "describe_replication_groups", "(", "name", "=", "None", ",", "conn", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "return", "_describe_resource", "(", "name", ...
return details about all elasticache replication groups .
train
true
29,830
def getIntersectionOfXIntersectionsTables(xIntersectionsTables): intersectionOfXIntersectionsTables = {} firstIntersectionTable = xIntersectionsTables[0] for firstIntersectionTableKey in firstIntersectionTable.keys(): xIntersectionIndexList = [] for xIntersectionsTableIndex in xrange(len(xIntersectionsTables)): xIntersectionsTable = xIntersectionsTables[xIntersectionsTableIndex] addXIntersectionIndexesFromXIntersections(xIntersectionsTableIndex, xIntersectionIndexList, xIntersectionsTable[firstIntersectionTableKey]) xIntersections = getIntersectionOfXIntersectionIndexes(len(xIntersectionsTables), xIntersectionIndexList) if (len(xIntersections) > 0): intersectionOfXIntersectionsTables[firstIntersectionTableKey] = xIntersections return intersectionOfXIntersectionsTables
[ "def", "getIntersectionOfXIntersectionsTables", "(", "xIntersectionsTables", ")", ":", "intersectionOfXIntersectionsTables", "=", "{", "}", "firstIntersectionTable", "=", "xIntersectionsTables", "[", "0", "]", "for", "firstIntersectionTableKey", "in", "firstIntersectionTable", ...
get the intersection of both xintersections tables .
train
false
29,831
def _kpoints(data, k): if (data.ndim > 1): n = data.shape[0] else: n = data.size p = np.random.permutation(n) x = data[p[:k], :].copy() return x
[ "def", "_kpoints", "(", "data", ",", "k", ")", ":", "if", "(", "data", ".", "ndim", ">", "1", ")", ":", "n", "=", "data", ".", "shape", "[", "0", "]", "else", ":", "n", "=", "data", ".", "size", "p", "=", "np", ".", "random", ".", "permutat...
pick k points at random in data .
train
false
29,833
def jobs_get_by_tag_range(jobid_range): job_list = [] for job_index in jobid_range: tag_pattern = ('%s-' % job_index) jobs = Job.objects.filter(tag__startswith=tag_pattern) job_list += jobs return job_list
[ "def", "jobs_get_by_tag_range", "(", "jobid_range", ")", ":", "job_list", "=", "[", "]", "for", "job_index", "in", "jobid_range", ":", "tag_pattern", "=", "(", "'%s-'", "%", "job_index", ")", "jobs", "=", "Job", ".", "objects", ".", "filter", "(", "tag__st...
return jobs based on range of job ids .
train
false
29,834
def bind_module_config(mod, conf_data, config_key): if hasattr(mod, 'CONFIGURATION_SECTION'): section = mod.CONFIGURATION_SECTION elif mod.__name__.endswith('.conf'): section = mod.__name__[:(- len('.conf'))] else: section = mod.__name__ if (config_key is None): bind_data = conf_data.get(section, {}) else: section = config_key bind_data = conf_data.get(config_key, {}) members = _bind_module_members(mod, bind_data, section) return ConfigSection(section, members=members, help=mod.__doc__)
[ "def", "bind_module_config", "(", "mod", ",", "conf_data", ",", "config_key", ")", ":", "if", "hasattr", "(", "mod", ",", "'CONFIGURATION_SECTION'", ")", ":", "section", "=", "mod", ".", "CONFIGURATION_SECTION", "elif", "mod", ".", "__name__", ".", "endswith",...
binds the configuration for the module to the given data .
train
false
29,835
def get_http_line(header_lines, http_methods): for header in header_lines: for method in http_methods: if header.startswith(method): http_line = header return http_line
[ "def", "get_http_line", "(", "header_lines", ",", "http_methods", ")", ":", "for", "header", "in", "header_lines", ":", "for", "method", "in", "http_methods", ":", "if", "header", ".", "startswith", "(", "method", ")", ":", "http_line", "=", "header", "retur...
get the header with the http command .
train
false
29,836
def addNegatives(derivation, negatives, paths): portionDirections = getSpacedPortionDirections(derivation.interpolationDictionary) for path in paths: endMultiplier = 1.000001 geometryOutput = trianglemesh.getPillarsOutput(getLoopListsByPath(endMultiplier, derivation, path, portionDirections)) negatives.append(geometryOutput)
[ "def", "addNegatives", "(", "derivation", ",", "negatives", ",", "paths", ")", ":", "portionDirections", "=", "getSpacedPortionDirections", "(", "derivation", ".", "interpolationDictionary", ")", "for", "path", "in", "paths", ":", "endMultiplier", "=", "1.000001", ...
add pillars output to negatives .
train
false
29,837
def dmp_nest(f, l, K): if (not isinstance(f, list)): return dmp_ground(f, l) for i in range(l): f = [f] return f
[ "def", "dmp_nest", "(", "f", ",", "l", ",", "K", ")", ":", "if", "(", "not", "isinstance", "(", "f", ",", "list", ")", ")", ":", "return", "dmp_ground", "(", "f", ",", "l", ")", "for", "i", "in", "range", "(", "l", ")", ":", "f", "=", "[", ...
return a multivariate value nested l-levels .
train
false
29,838
@contextmanager def override_current_site_id(site_id): override_current_site_id.thread_local.site_id = site_id (yield) del override_current_site_id.thread_local.site_id
[ "@", "contextmanager", "def", "override_current_site_id", "(", "site_id", ")", ":", "override_current_site_id", ".", "thread_local", ".", "site_id", "=", "site_id", "(", "yield", ")", "del", "override_current_site_id", ".", "thread_local", ".", "site_id" ]
context manager that overrides the current site id for code executed within it .
train
false
29,842
@raises(TestException) def test_abort_with_exception(): with settings(abort_exception=TestException): abort('Test')
[ "@", "raises", "(", "TestException", ")", "def", "test_abort_with_exception", "(", ")", ":", "with", "settings", "(", "abort_exception", "=", "TestException", ")", ":", "abort", "(", "'Test'", ")" ]
abort() should raise a provided exception .
train
false
29,843
def LoadSingleDos(dos_info, open_fn=None): builder = yaml_object.ObjectBuilder(DosInfoExternal) handler = yaml_builder.BuilderHandler(builder) listener = yaml_listener.EventListener(handler) listener.Parse(dos_info) parsed_yaml = handler.GetResults() if (not parsed_yaml): return DosInfoExternal() if (len(parsed_yaml) > 1): raise MalformedDosConfiguration('Multiple blacklist: sections in configuration.') return parsed_yaml[0]
[ "def", "LoadSingleDos", "(", "dos_info", ",", "open_fn", "=", "None", ")", ":", "builder", "=", "yaml_object", ".", "ObjectBuilder", "(", "DosInfoExternal", ")", "handler", "=", "yaml_builder", ".", "BuilderHandler", "(", "builder", ")", "listener", "=", "yaml...
load a dos .
train
false
29,844
def mod_check(x, y): if ((as_tensor_variable(x).dtype in complex_dtypes) or (as_tensor_variable(y).dtype in complex_dtypes)): raise scal.Mod.complex_error else: return mod(x, y)
[ "def", "mod_check", "(", "x", ",", "y", ")", ":", "if", "(", "(", "as_tensor_variable", "(", "x", ")", ".", "dtype", "in", "complex_dtypes", ")", "or", "(", "as_tensor_variable", "(", "y", ")", ".", "dtype", "in", "complex_dtypes", ")", ")", ":", "ra...
make sure we do not try to use complex numbers .
train
false
29,845
def exp(x): np = import_module('numpy') if isinstance(x, (int, float)): return interval(np.exp(x), np.exp(x)) elif isinstance(x, interval): return interval(np.exp(x.start), np.exp(x.end), is_valid=x.is_valid) else: raise NotImplementedError
[ "def", "exp", "(", "x", ")", ":", "np", "=", "import_module", "(", "'numpy'", ")", "if", "isinstance", "(", "x", ",", "(", "int", ",", "float", ")", ")", ":", "return", "interval", "(", "np", ".", "exp", "(", "x", ")", ",", "np", ".", "exp", ...
apply the exponential function to each element of the matrix mat .
train
false
29,847
def get_xml_text(element, mini_dom=False): text = '' if mini_dom: node = element for child in node.childNodes: if (child.nodeType in (Node.CDATA_SECTION_NODE, Node.TEXT_NODE)): text += child.data elif (element is not None): for child in ([element] + element.findall('.//*')): if child.text: text += child.text return text.strip()
[ "def", "get_xml_text", "(", "element", ",", "mini_dom", "=", "False", ")", ":", "text", "=", "''", "if", "mini_dom", ":", "node", "=", "element", "for", "child", "in", "node", ".", "childNodes", ":", "if", "(", "child", ".", "nodeType", "in", "(", "N...
get all text inside a xml element element: a xml element either created with elementtree .
train
false
29,850
def get_ppui_logintime(): return str(random.randint(52000, 58535))
[ "def", "get_ppui_logintime", "(", ")", ":", "return", "str", "(", "random", ".", "randint", "(", "52000", ",", "58535", ")", ")" ]
ppui_ligintime 这个字段 .
train
false
29,851
def decode_network_string(msgtype, plen, buf): return buf[header.size:(plen - 1)]
[ "def", "decode_network_string", "(", "msgtype", ",", "plen", ",", "buf", ")", ":", "return", "buf", "[", "header", ".", "size", ":", "(", "plen", "-", "1", ")", "]" ]
decodes a string from collectd network format .
train
false
29,852
@protocol.commands.add(u'pause', state=protocol.BOOL) def pause(context, state=None): if (state is None): deprecation.warn(u'mpd.protocol.playback.pause:state_arg') playback_state = context.core.playback.get_state().get() if (playback_state == PlaybackState.PLAYING): context.core.playback.pause().get() elif (playback_state == PlaybackState.PAUSED): context.core.playback.resume().get() elif state: context.core.playback.pause().get() else: context.core.playback.resume().get()
[ "@", "protocol", ".", "commands", ".", "add", "(", "u'pause'", ",", "state", "=", "protocol", ".", "BOOL", ")", "def", "pause", "(", "context", ",", "state", "=", "None", ")", ":", "if", "(", "state", "is", "None", ")", ":", "deprecation", ".", "wa...
pause stream display .
train
false
29,853
def _parse_image_ref(image_href): o = urlparse.urlparse(image_href) port = (o.port or 80) host = o.netloc.split(':', 1)[0] image_id = o.path.split('/')[(-1)] use_ssl = (o.scheme == 'https') return (image_id, host, port, use_ssl)
[ "def", "_parse_image_ref", "(", "image_href", ")", ":", "o", "=", "urlparse", ".", "urlparse", "(", "image_href", ")", "port", "=", "(", "o", ".", "port", "or", "80", ")", "host", "=", "o", ".", "netloc", ".", "split", "(", "':'", ",", "1", ")", ...
parse an image href into composite parts .
train
false
29,855
def libvlc_media_get_state(p_md): f = (_Cfunctions.get('libvlc_media_get_state', None) or _Cfunction('libvlc_media_get_state', ((1,),), None, State, Media)) return f(p_md)
[ "def", "libvlc_media_get_state", "(", "p_md", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_get_state'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_get_state'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ...
get current state of media descriptor object .
train
true
29,856
@task(rate_limit='3/h') @timeit def rebuild_kb(): cache.delete(settings.WIKI_REBUILD_TOKEN) d = Document.objects.using('default').filter(current_revision__isnull=False).values_list('id', flat=True) for chunk in chunked(d, 50): _rebuild_kb_chunk.apply_async(args=[chunk])
[ "@", "task", "(", "rate_limit", "=", "'3/h'", ")", "@", "timeit", "def", "rebuild_kb", "(", ")", ":", "cache", ".", "delete", "(", "settings", ".", "WIKI_REBUILD_TOKEN", ")", "d", "=", "Document", ".", "objects", ".", "using", "(", "'default'", ")", "....
re-render all documents in the kb in chunks .
train
false
29,857
def attributive(adjective): return adjective
[ "def", "attributive", "(", "adjective", ")", ":", "return", "adjective" ]
for a predicative adjective .
train
false
29,858
def _suppress_warnings(): import warnings import sys import os if (os.path.basename(sys.argv[0]) != 'trial'): warnings.simplefilter('ignore')
[ "def", "_suppress_warnings", "(", ")", ":", "import", "warnings", "import", "sys", "import", "os", "if", "(", "os", ".", "path", ".", "basename", "(", "sys", ".", "argv", "[", "0", "]", ")", "!=", "'trial'", ")", ":", "warnings", ".", "simplefilter", ...
suppress warnings when not running under trial .
train
false
29,859
def _checkout(requestedVersion): if (currentTag() == requestedVersion): return requestedVersion if (requestedVersion not in _localVersions(forceCheck=True)): msg = _translate("Couldn't find version {} locally. Trying github...") logging.info(msg.format(requestedVersion)) subprocess.check_output('git fetch github --tags'.split(), cwd=VERSIONSDIR) if (requestedVersion not in _localVersions(forceCheck=True)): msg = _translate('{} is not currently available.') logging.error(msg.format(requestedVersion)) return '' cmd = ['git', 'checkout', requestedVersion] out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, cwd=VERSIONSDIR) logging.debug(out) logging.exp(('Success: ' + ' '.join(cmd))) return requestedVersion
[ "def", "_checkout", "(", "requestedVersion", ")", ":", "if", "(", "currentTag", "(", ")", "==", "requestedVersion", ")", ":", "return", "requestedVersion", "if", "(", "requestedVersion", "not", "in", "_localVersions", "(", "forceCheck", "=", "True", ")", ")", ...
look for a maj .
train
false