id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
49,794
def _in6_getifaddr(ifname): try: f = os.popen(('%s %s' % (conf.prog.ifconfig, ifname))) except OSError as msg: log_interactive.warning('Failed to execute ifconfig.') return [] ret = [] for line in f: if ('inet6' in line): addr = line.rstrip().split(None, 2)[1] else: continue if ('%' in line): addr = addr.split('%', 1)[0] try: socket.inet_pton(socket.AF_INET6, addr) except: continue scope = in6_getscope(addr) ret.append((addr, scope, ifname)) return ret
[ "def", "_in6_getifaddr", "(", "ifname", ")", ":", "try", ":", "f", "=", "os", ".", "popen", "(", "(", "'%s %s'", "%", "(", "conf", ".", "prog", ".", "ifconfig", ",", "ifname", ")", ")", ")", "except", "OSError", "as", "msg", ":", "log_interactive", ".", "warning", "(", "'Failed to execute ifconfig.'", ")", "return", "[", "]", "ret", "=", "[", "]", "for", "line", "in", "f", ":", "if", "(", "'inet6'", "in", "line", ")", ":", "addr", "=", "line", ".", "rstrip", "(", ")", ".", "split", "(", "None", ",", "2", ")", "[", "1", "]", "else", ":", "continue", "if", "(", "'%'", "in", "line", ")", ":", "addr", "=", "addr", ".", "split", "(", "'%'", ",", "1", ")", "[", "0", "]", "try", ":", "socket", ".", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "addr", ")", "except", ":", "continue", "scope", "=", "in6_getscope", "(", "addr", ")", "ret", ".", "append", "(", "(", "addr", ",", "scope", ",", "ifname", ")", ")", "return", "ret" ]
returns a list of ipv6 addresses configured on the interface ifname .
train
true
49,795
def create_mock_ssh_script(path): with open(path, 'w') as f: f.write('#!/bin/sh\n') f.write(('%s %s "$@"\n' % (pipes.quote(sys.executable), pipes.quote(os.path.abspath(__file__))))) os.chmod(path, (stat.S_IREAD | stat.S_IEXEC))
[ "def", "create_mock_ssh_script", "(", "path", ")", ":", "with", "open", "(", "path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'#!/bin/sh\\n'", ")", "f", ".", "write", "(", "(", "'%s %s \"$@\"\\n'", "%", "(", "pipes", ".", "quote", "(", "sys", ".", "executable", ")", ",", "pipes", ".", "quote", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ")", ")", ")", "os", ".", "chmod", "(", "path", ",", "(", "stat", ".", "S_IREAD", "|", "stat", ".", "S_IEXEC", ")", ")" ]
dump a wrapper script to the given file object that runs this python script .
train
false
49,796
def get_impl_tag(): return '{0}{1}'.format(get_abbr_impl(), get_impl_ver())
[ "def", "get_impl_tag", "(", ")", ":", "return", "'{0}{1}'", ".", "format", "(", "get_abbr_impl", "(", ")", ",", "get_impl_ver", "(", ")", ")" ]
returns the tag for this specific implementation .
train
false
49,798
@slow_test @testing.requires_testing_data def test_bads_reconstruction(): raw = read_crop(raw_fname, (0.0, 1.0)) raw.info['bads'] = bads raw_sss = maxwell_filter(raw, origin=mf_head_origin, regularize=None, bad_condition='ignore') assert_meg_snr(raw_sss, read_crop(sss_bad_recon_fname), 300.0)
[ "@", "slow_test", "@", "testing", ".", "requires_testing_data", "def", "test_bads_reconstruction", "(", ")", ":", "raw", "=", "read_crop", "(", "raw_fname", ",", "(", "0.0", ",", "1.0", ")", ")", "raw", ".", "info", "[", "'bads'", "]", "=", "bads", "raw_sss", "=", "maxwell_filter", "(", "raw", ",", "origin", "=", "mf_head_origin", ",", "regularize", "=", "None", ",", "bad_condition", "=", "'ignore'", ")", "assert_meg_snr", "(", "raw_sss", ",", "read_crop", "(", "sss_bad_recon_fname", ")", ",", "300.0", ")" ]
test maxwell filter reconstruction of bad channels .
train
false
49,799
@not_implemented_for('directed') def normalized_laplacian_matrix(G, nodelist=None, weight='weight'): import scipy import scipy.sparse if (nodelist is None): nodelist = list(G) A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, format='csr') (n, m) = A.shape diags = A.sum(axis=1).flatten() D = scipy.sparse.spdiags(diags, [0], m, n, format='csr') L = (D - A) with scipy.errstate(divide='ignore'): diags_sqrt = (1.0 / scipy.sqrt(diags)) diags_sqrt[scipy.isinf(diags_sqrt)] = 0 DH = scipy.sparse.spdiags(diags_sqrt, [0], m, n, format='csr') return DH.dot(L.dot(DH))
[ "@", "not_implemented_for", "(", "'directed'", ")", "def", "normalized_laplacian_matrix", "(", "G", ",", "nodelist", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "import", "scipy", "import", "scipy", ".", "sparse", "if", "(", "nodelist", "is", "None", ")", ":", "nodelist", "=", "list", "(", "G", ")", "A", "=", "nx", ".", "to_scipy_sparse_matrix", "(", "G", ",", "nodelist", "=", "nodelist", ",", "weight", "=", "weight", ",", "format", "=", "'csr'", ")", "(", "n", ",", "m", ")", "=", "A", ".", "shape", "diags", "=", "A", ".", "sum", "(", "axis", "=", "1", ")", ".", "flatten", "(", ")", "D", "=", "scipy", ".", "sparse", ".", "spdiags", "(", "diags", ",", "[", "0", "]", ",", "m", ",", "n", ",", "format", "=", "'csr'", ")", "L", "=", "(", "D", "-", "A", ")", "with", "scipy", ".", "errstate", "(", "divide", "=", "'ignore'", ")", ":", "diags_sqrt", "=", "(", "1.0", "/", "scipy", ".", "sqrt", "(", "diags", ")", ")", "diags_sqrt", "[", "scipy", ".", "isinf", "(", "diags_sqrt", ")", "]", "=", "0", "DH", "=", "scipy", ".", "sparse", ".", "spdiags", "(", "diags_sqrt", ",", "[", "0", "]", ",", "m", ",", "n", ",", "format", "=", "'csr'", ")", "return", "DH", ".", "dot", "(", "L", ".", "dot", "(", "DH", ")", ")" ]
return the normalized laplacian matrix of g .
train
false
49,802
@register.filter(name='int2str') def int2str(value): return str(value)
[ "@", "register", ".", "filter", "(", "name", "=", "'int2str'", ")", "def", "int2str", "(", "value", ")", ":", "return", "str", "(", "value", ")" ]
int 转换为 str .
train
false
49,803
def getSource(ao): return indentify(('app = ' + prettify(ao)))
[ "def", "getSource", "(", "ao", ")", ":", "return", "indentify", "(", "(", "'app = '", "+", "prettify", "(", "ao", ")", ")", ")" ]
pass me an ao .
train
false
49,804
def token_view(request): context = RequestContext(request, processors=[csrf]) template = Template(u'{% csrf_token %}') return HttpResponse(template.render(context))
[ "def", "token_view", "(", "request", ")", ":", "context", "=", "RequestContext", "(", "request", ",", "processors", "=", "[", "csrf", "]", ")", "template", "=", "Template", "(", "u'{% csrf_token %}'", ")", "return", "HttpResponse", "(", "template", ".", "render", "(", "context", ")", ")" ]
a view that uses {% csrf_token %} .
train
false
49,805
def _is_attribute_explicitly_set(attribute_name, resource, target): return (('default' in resource[attribute_name]) and (attribute_name in target) and (target[attribute_name] is not attributes.ATTR_NOT_SPECIFIED) and (target[attribute_name] != resource[attribute_name]['default']))
[ "def", "_is_attribute_explicitly_set", "(", "attribute_name", ",", "resource", ",", "target", ")", ":", "return", "(", "(", "'default'", "in", "resource", "[", "attribute_name", "]", ")", "and", "(", "attribute_name", "in", "target", ")", "and", "(", "target", "[", "attribute_name", "]", "is", "not", "attributes", ".", "ATTR_NOT_SPECIFIED", ")", "and", "(", "target", "[", "attribute_name", "]", "!=", "resource", "[", "attribute_name", "]", "[", "'default'", "]", ")", ")" ]
verify that an attribute is present and has a non-default value .
train
false
49,807
def org_site_check(site_id, user_id=None): if user_id: auth.s3_impersonate(user_id) customise = settings.get_org_site_check() if customise: customise(site_id) db.commit()
[ "def", "org_site_check", "(", "site_id", ",", "user_id", "=", "None", ")", ":", "if", "user_id", ":", "auth", ".", "s3_impersonate", "(", "user_id", ")", "customise", "=", "settings", ".", "get_org_site_check", "(", ")", "if", "customise", ":", "customise", "(", "site_id", ")", "db", ".", "commit", "(", ")" ]
check the status for sites .
train
false
49,808
def check_if_parallel(dx1, dy1, dx2, dy2, tolerence=1e-05): theta1 = np.arctan2(dx1, dy1) theta2 = np.arctan2(dx2, dy2) dtheta = np.abs((theta1 - theta2)) if (dtheta < tolerence): return 1 elif (np.abs((dtheta - np.pi)) < tolerence): return (-1) else: return False
[ "def", "check_if_parallel", "(", "dx1", ",", "dy1", ",", "dx2", ",", "dy2", ",", "tolerence", "=", "1e-05", ")", ":", "theta1", "=", "np", ".", "arctan2", "(", "dx1", ",", "dy1", ")", "theta2", "=", "np", ".", "arctan2", "(", "dx2", ",", "dy2", ")", "dtheta", "=", "np", ".", "abs", "(", "(", "theta1", "-", "theta2", ")", ")", "if", "(", "dtheta", "<", "tolerence", ")", ":", "return", "1", "elif", "(", "np", ".", "abs", "(", "(", "dtheta", "-", "np", ".", "pi", ")", ")", "<", "tolerence", ")", ":", "return", "(", "-", "1", ")", "else", ":", "return", "False" ]
returns * 1 if two lines are parralel in same direction * -1 if two lines are parralel in opposite direction * 0 otherwise .
train
false
49,810
def isub(a, b): a -= b return a
[ "def", "isub", "(", "a", ",", "b", ")", ":", "a", "-=", "b", "return", "a" ]
same as a -= b .
train
false
49,811
def clusterSanity(): topo = SingleSwitchTopo() net = MininetCluster(topo=topo) net.start() CLI(net) net.stop()
[ "def", "clusterSanity", "(", ")", ":", "topo", "=", "SingleSwitchTopo", "(", ")", "net", "=", "MininetCluster", "(", "topo", "=", "topo", ")", "net", ".", "start", "(", ")", "CLI", "(", "net", ")", "net", ".", "stop", "(", ")" ]
sanity check for cluster mode .
train
false
49,813
def _ProjectionPartialEntityGenerator(cursor): for row in cursor: entity_original = entity_pb.EntityProto(row[1]) entity = entity_pb.EntityProto() entity.mutable_key().MergeFrom(entity_original.key()) entity.mutable_entity_group().MergeFrom(entity_original.entity_group()) for (name, value_data) in zip(row[2::2], row[3::2]): prop_to_add = entity.add_property() prop_to_add.set_name(ToUtf8(name)) value_decoder = sortable_pb_encoder.Decoder(array.array('B', str(value_data))) prop_to_add.mutable_value().Merge(value_decoder) prop_to_add.set_multiple(False) datastore_stub_util.PrepareSpecialPropertiesForLoad(entity) (yield entity)
[ "def", "_ProjectionPartialEntityGenerator", "(", "cursor", ")", ":", "for", "row", "in", "cursor", ":", "entity_original", "=", "entity_pb", ".", "EntityProto", "(", "row", "[", "1", "]", ")", "entity", "=", "entity_pb", ".", "EntityProto", "(", ")", "entity", ".", "mutable_key", "(", ")", ".", "MergeFrom", "(", "entity_original", ".", "key", "(", ")", ")", "entity", ".", "mutable_entity_group", "(", ")", ".", "MergeFrom", "(", "entity_original", ".", "entity_group", "(", ")", ")", "for", "(", "name", ",", "value_data", ")", "in", "zip", "(", "row", "[", "2", ":", ":", "2", "]", ",", "row", "[", "3", ":", ":", "2", "]", ")", ":", "prop_to_add", "=", "entity", ".", "add_property", "(", ")", "prop_to_add", ".", "set_name", "(", "ToUtf8", "(", "name", ")", ")", "value_decoder", "=", "sortable_pb_encoder", ".", "Decoder", "(", "array", ".", "array", "(", "'B'", ",", "str", "(", "value_data", ")", ")", ")", "prop_to_add", ".", "mutable_value", "(", ")", ".", "Merge", "(", "value_decoder", ")", "prop_to_add", ".", "set_multiple", "(", "False", ")", "datastore_stub_util", ".", "PrepareSpecialPropertiesForLoad", "(", "entity", ")", "(", "yield", "entity", ")" ]
generator that creates partial entities for projection .
train
false
49,814
def server_error_404(request, template_name='404.html'): response = get_redirect_response(request, path=request.get_full_path()) if response: return response r = render_to_response(template_name, context_instance=RequestContext(request)) r.status_code = 404 return r
[ "def", "server_error_404", "(", "request", ",", "template_name", "=", "'404.html'", ")", ":", "response", "=", "get_redirect_response", "(", "request", ",", "path", "=", "request", ".", "get_full_path", "(", ")", ")", "if", "response", ":", "return", "response", "r", "=", "render_to_response", "(", "template_name", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")", "r", ".", "status_code", "=", "404", "return", "r" ]
a simple 404 handler so we get media .
train
false
49,815
def check_consistent_length(*arrays): lengths = [_num_samples(X) for X in arrays if (X is not None)] uniques = np.unique(lengths) if (len(uniques) > 1): raise ValueError(('Found input variables with inconsistent numbers of samples: %r' % [int(l) for l in lengths]))
[ "def", "check_consistent_length", "(", "*", "arrays", ")", ":", "lengths", "=", "[", "_num_samples", "(", "X", ")", "for", "X", "in", "arrays", "if", "(", "X", "is", "not", "None", ")", "]", "uniques", "=", "np", ".", "unique", "(", "lengths", ")", "if", "(", "len", "(", "uniques", ")", ">", "1", ")", ":", "raise", "ValueError", "(", "(", "'Found input variables with inconsistent numbers of samples: %r'", "%", "[", "int", "(", "l", ")", "for", "l", "in", "lengths", "]", ")", ")" ]
check that all arrays have consistent first dimensions .
train
false
49,816
def construct_author2doc(corpus, doc2author): authors_ids = set() for (d, a_doc_ids) in doc2author.items(): for a in a_doc_ids: authors_ids.add(a) author2doc = {} for a in authors_ids: author2doc[a] = [] for (d, a_ids) in doc2author.items(): if (a in a_ids): author2doc[a].append(d) return author2doc
[ "def", "construct_author2doc", "(", "corpus", ",", "doc2author", ")", ":", "authors_ids", "=", "set", "(", ")", "for", "(", "d", ",", "a_doc_ids", ")", "in", "doc2author", ".", "items", "(", ")", ":", "for", "a", "in", "a_doc_ids", ":", "authors_ids", ".", "add", "(", "a", ")", "author2doc", "=", "{", "}", "for", "a", "in", "authors_ids", ":", "author2doc", "[", "a", "]", "=", "[", "]", "for", "(", "d", ",", "a_ids", ")", "in", "doc2author", ".", "items", "(", ")", ":", "if", "(", "a", "in", "a_ids", ")", ":", "author2doc", "[", "a", "]", ".", "append", "(", "d", ")", "return", "author2doc" ]
make a mapping from author ids to document ids .
train
false
49,817
def _check_path_overlap(path): from ..config import CONF specerr = 0 if CONF['workpath'].startswith(path): logger.error('Specfile error: The output path "%s" contains WORKPATH (%s)', path, CONF['workpath']) specerr += 1 if CONF['specpath'].startswith(path): logger.error('Specfile error: The output path "%s" contains SPECPATH (%s)', path, CONF['specpath']) specerr += 1 if specerr: raise SystemExit(('Error: Please edit/recreate the specfile (%s) and set a different output name (e.g. "dist").' % CONF['spec'])) return True
[ "def", "_check_path_overlap", "(", "path", ")", ":", "from", ".", ".", "config", "import", "CONF", "specerr", "=", "0", "if", "CONF", "[", "'workpath'", "]", ".", "startswith", "(", "path", ")", ":", "logger", ".", "error", "(", "'Specfile error: The output path \"%s\" contains WORKPATH (%s)'", ",", "path", ",", "CONF", "[", "'workpath'", "]", ")", "specerr", "+=", "1", "if", "CONF", "[", "'specpath'", "]", ".", "startswith", "(", "path", ")", ":", "logger", ".", "error", "(", "'Specfile error: The output path \"%s\" contains SPECPATH (%s)'", ",", "path", ",", "CONF", "[", "'specpath'", "]", ")", "specerr", "+=", "1", "if", "specerr", ":", "raise", "SystemExit", "(", "(", "'Error: Please edit/recreate the specfile (%s) and set a different output name (e.g. \"dist\").'", "%", "CONF", "[", "'spec'", "]", ")", ")", "return", "True" ]
check that path does not overlap with workpath or specpath raise systemexit if there is overlap .
train
false
49,818
def se(actual, predicted): return np.power((np.array(actual) - np.array(predicted)), 2)
[ "def", "se", "(", "actual", ",", "predicted", ")", ":", "return", "np", ".", "power", "(", "(", "np", ".", "array", "(", "actual", ")", "-", "np", ".", "array", "(", "predicted", ")", ")", ",", "2", ")" ]
computes the squared error .
train
true
49,820
@receiver(post_save, sender=Story) def update_successstories_supernav(sender, instance, signal, created, **kwargs): if kwargs.get('raw', False): return if (instance.is_published and instance.featured): content = render_to_string('successstories/supernav.html', {'story': instance}) (box, _) = Box.objects.get_or_create(label='supernav-python-success-stories') box.content = content box.save()
[ "@", "receiver", "(", "post_save", ",", "sender", "=", "Story", ")", "def", "update_successstories_supernav", "(", "sender", ",", "instance", ",", "signal", ",", "created", ",", "**", "kwargs", ")", ":", "if", "kwargs", ".", "get", "(", "'raw'", ",", "False", ")", ":", "return", "if", "(", "instance", ".", "is_published", "and", "instance", ".", "featured", ")", ":", "content", "=", "render_to_string", "(", "'successstories/supernav.html'", ",", "{", "'story'", ":", "instance", "}", ")", "(", "box", ",", "_", ")", "=", "Box", ".", "objects", ".", "get_or_create", "(", "label", "=", "'supernav-python-success-stories'", ")", "box", ".", "content", "=", "content", "box", ".", "save", "(", ")" ]
update download supernav .
train
false
49,821
def margeff_cov_params(model, params, exog, cov_params, at, derivative, dummy_ind, count_ind, method, J): if callable(derivative): from statsmodels.tools.numdiff import approx_fprime_cs params = params.ravel('F') try: jacobian_mat = approx_fprime_cs(params, derivative, args=(exog, method)) except TypeError: from statsmodels.tools.numdiff import approx_fprime jacobian_mat = approx_fprime(params, derivative, args=(exog, method)) if (at == 'overall'): jacobian_mat = np.mean(jacobian_mat, axis=1) else: jacobian_mat = jacobian_mat.squeeze() if (dummy_ind is not None): jacobian_mat = _margeff_cov_params_dummy(model, jacobian_mat, params, exog, dummy_ind, method, J) if (count_ind is not None): jacobian_mat = _margeff_cov_params_count(model, jacobian_mat, params, exog, count_ind, method, J) else: jacobian_mat = derivative return np.dot(np.dot(jacobian_mat, cov_params), jacobian_mat.T)
[ "def", "margeff_cov_params", "(", "model", ",", "params", ",", "exog", ",", "cov_params", ",", "at", ",", "derivative", ",", "dummy_ind", ",", "count_ind", ",", "method", ",", "J", ")", ":", "if", "callable", "(", "derivative", ")", ":", "from", "statsmodels", ".", "tools", ".", "numdiff", "import", "approx_fprime_cs", "params", "=", "params", ".", "ravel", "(", "'F'", ")", "try", ":", "jacobian_mat", "=", "approx_fprime_cs", "(", "params", ",", "derivative", ",", "args", "=", "(", "exog", ",", "method", ")", ")", "except", "TypeError", ":", "from", "statsmodels", ".", "tools", ".", "numdiff", "import", "approx_fprime", "jacobian_mat", "=", "approx_fprime", "(", "params", ",", "derivative", ",", "args", "=", "(", "exog", ",", "method", ")", ")", "if", "(", "at", "==", "'overall'", ")", ":", "jacobian_mat", "=", "np", ".", "mean", "(", "jacobian_mat", ",", "axis", "=", "1", ")", "else", ":", "jacobian_mat", "=", "jacobian_mat", ".", "squeeze", "(", ")", "if", "(", "dummy_ind", "is", "not", "None", ")", ":", "jacobian_mat", "=", "_margeff_cov_params_dummy", "(", "model", ",", "jacobian_mat", ",", "params", ",", "exog", ",", "dummy_ind", ",", "method", ",", "J", ")", "if", "(", "count_ind", "is", "not", "None", ")", ":", "jacobian_mat", "=", "_margeff_cov_params_count", "(", "model", ",", "jacobian_mat", ",", "params", ",", "exog", ",", "count_ind", ",", "method", ",", "J", ")", "else", ":", "jacobian_mat", "=", "derivative", "return", "np", ".", "dot", "(", "np", ".", "dot", "(", "jacobian_mat", ",", "cov_params", ")", ",", "jacobian_mat", ".", "T", ")" ]
computes the variance-covariance of marginal effects by the delta method .
train
false
49,822
def all_events(number=(-1), etag=None): return gh.all_events(number, etag)
[ "def", "all_events", "(", "number", "=", "(", "-", "1", ")", ",", "etag", "=", "None", ")", ":", "return", "gh", ".", "all_events", "(", "number", ",", "etag", ")" ]
iterate over public events .
train
false
49,823
def create_assets_env(generator): theme_static_dir = generator.settings[u'THEME_STATIC_DIR'] assets_destination = os.path.join(generator.output_path, theme_static_dir) generator.env.assets_environment = Environment(assets_destination, theme_static_dir) if (u'ASSET_CONFIG' in generator.settings): for item in generator.settings[u'ASSET_CONFIG']: generator.env.assets_environment.config[item[0]] = item[1] if (u'ASSET_BUNDLES' in generator.settings): for (name, args, kwargs) in generator.settings[u'ASSET_BUNDLES']: generator.env.assets_environment.register(name, *args, **kwargs) if (u'ASSET_DEBUG' in generator.settings): generator.env.assets_environment.debug = generator.settings[u'ASSET_DEBUG'] elif (logging.getLevelName(logger.getEffectiveLevel()) == u'DEBUG'): generator.env.assets_environment.debug = True for path in (generator.settings[u'THEME_STATIC_PATHS'] + generator.settings.get(u'ASSET_SOURCE_PATHS', [])): full_path = os.path.join(generator.theme, path) generator.env.assets_environment.append_path(full_path)
[ "def", "create_assets_env", "(", "generator", ")", ":", "theme_static_dir", "=", "generator", ".", "settings", "[", "u'THEME_STATIC_DIR'", "]", "assets_destination", "=", "os", ".", "path", ".", "join", "(", "generator", ".", "output_path", ",", "theme_static_dir", ")", "generator", ".", "env", ".", "assets_environment", "=", "Environment", "(", "assets_destination", ",", "theme_static_dir", ")", "if", "(", "u'ASSET_CONFIG'", "in", "generator", ".", "settings", ")", ":", "for", "item", "in", "generator", ".", "settings", "[", "u'ASSET_CONFIG'", "]", ":", "generator", ".", "env", ".", "assets_environment", ".", "config", "[", "item", "[", "0", "]", "]", "=", "item", "[", "1", "]", "if", "(", "u'ASSET_BUNDLES'", "in", "generator", ".", "settings", ")", ":", "for", "(", "name", ",", "args", ",", "kwargs", ")", "in", "generator", ".", "settings", "[", "u'ASSET_BUNDLES'", "]", ":", "generator", ".", "env", ".", "assets_environment", ".", "register", "(", "name", ",", "*", "args", ",", "**", "kwargs", ")", "if", "(", "u'ASSET_DEBUG'", "in", "generator", ".", "settings", ")", ":", "generator", ".", "env", ".", "assets_environment", ".", "debug", "=", "generator", ".", "settings", "[", "u'ASSET_DEBUG'", "]", "elif", "(", "logging", ".", "getLevelName", "(", "logger", ".", "getEffectiveLevel", "(", ")", ")", "==", "u'DEBUG'", ")", ":", "generator", ".", "env", ".", "assets_environment", ".", "debug", "=", "True", "for", "path", "in", "(", "generator", ".", "settings", "[", "u'THEME_STATIC_PATHS'", "]", "+", "generator", ".", "settings", ".", "get", "(", "u'ASSET_SOURCE_PATHS'", ",", "[", "]", ")", ")", ":", "full_path", "=", "os", ".", "path", ".", "join", "(", "generator", ".", "theme", ",", "path", ")", "generator", ".", "env", ".", "assets_environment", ".", "append_path", "(", "full_path", ")" ]
define the assets environment and pass it to the generator .
train
true
49,824
def setXMLElementDictionaryToOtherElementDictionary(fromXMLElement, matrix4X4, prefix, xmlElement): matrix4X4.getFromXMLElement(prefix, fromXMLElement) setAttributeDictionaryToMatrix(xmlElement.attributeDictionary, matrix4X4)
[ "def", "setXMLElementDictionaryToOtherElementDictionary", "(", "fromXMLElement", ",", "matrix4X4", ",", "prefix", ",", "xmlElement", ")", ":", "matrix4X4", ".", "getFromXMLElement", "(", "prefix", ",", "fromXMLElement", ")", "setAttributeDictionaryToMatrix", "(", "xmlElement", ".", "attributeDictionary", ",", "matrix4X4", ")" ]
set the xml element to the matrix attribute dictionary .
train
false
49,825
def libvlc_media_player_navigate(p_mi, navigate): f = (_Cfunctions.get('libvlc_media_player_navigate', None) or _Cfunction('libvlc_media_player_navigate', ((1,), (1,)), None, None, MediaPlayer, ctypes.c_uint)) return f(p_mi, navigate)
[ "def", "libvlc_media_player_navigate", "(", "p_mi", ",", "navigate", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_player_navigate'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_player_navigate'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "ctypes", ".", "c_uint", ")", ")", "return", "f", "(", "p_mi", ",", "navigate", ")" ]
navigate through dvd menu .
train
true
49,826
def get_random_cohort(course_key): course = courses.get_course(course_key) cohorts = get_course_cohorts(course, assignment_type=CourseCohort.RANDOM) if cohorts: cohort = local_random().choice(cohorts) else: cohort = CourseCohort.create(cohort_name=DEFAULT_COHORT_NAME, course_id=course_key, assignment_type=CourseCohort.RANDOM).course_user_group return cohort
[ "def", "get_random_cohort", "(", "course_key", ")", ":", "course", "=", "courses", ".", "get_course", "(", "course_key", ")", "cohorts", "=", "get_course_cohorts", "(", "course", ",", "assignment_type", "=", "CourseCohort", ".", "RANDOM", ")", "if", "cohorts", ":", "cohort", "=", "local_random", "(", ")", ".", "choice", "(", "cohorts", ")", "else", ":", "cohort", "=", "CourseCohort", ".", "create", "(", "cohort_name", "=", "DEFAULT_COHORT_NAME", ",", "course_id", "=", "course_key", ",", "assignment_type", "=", "CourseCohort", ".", "RANDOM", ")", ".", "course_user_group", "return", "cohort" ]
helper method to get a cohort for random assignment .
train
false
49,827
def isenumclass(x): if (enum is None): return False return issubclass(x, enum.Enum)
[ "def", "isenumclass", "(", "x", ")", ":", "if", "(", "enum", "is", "None", ")", ":", "return", "False", "return", "issubclass", "(", "x", ",", "enum", ".", "Enum", ")" ]
check if the object is subclass of enum .
train
false
49,828
def getResponse(direction): event.clearEvents() while 1: for key in event.getKeys(): if (key in ['escape', 'q']): win.close() core.quit() elif (key in ['down', 'up']): if (((key in ['down']) and (direction == (-1))) or ((key in ['up']) and (direction == (+ 1)))): return 0 else: return 1 else: print ('hit DOWN or UP (or Esc) (You hit %s)' % key)
[ "def", "getResponse", "(", "direction", ")", ":", "event", ".", "clearEvents", "(", ")", "while", "1", ":", "for", "key", "in", "event", ".", "getKeys", "(", ")", ":", "if", "(", "key", "in", "[", "'escape'", ",", "'q'", "]", ")", ":", "win", ".", "close", "(", ")", "core", ".", "quit", "(", ")", "elif", "(", "key", "in", "[", "'down'", ",", "'up'", "]", ")", ":", "if", "(", "(", "(", "key", "in", "[", "'down'", "]", ")", "and", "(", "direction", "==", "(", "-", "1", ")", ")", ")", "or", "(", "(", "key", "in", "[", "'up'", "]", ")", "and", "(", "direction", "==", "(", "+", "1", ")", ")", ")", ")", ":", "return", "0", "else", ":", "return", "1", "else", ":", "print", "(", "'hit DOWN or UP (or Esc) (You hit %s)'", "%", "key", ")" ]
if subject said up when direction was up then increase gamma otherwise .
train
false
49,829
def funshion_download_by_url(url, output_dir='.', merge=False, info_only=False): if re.match('http://www.fun.tv/vplay/v-(\\w+)', url): match = re.search('http://www.fun.tv/vplay/v-(\\d+)(.?)', url) vid = match.group(1) funshion_download_by_vid(vid, output_dir=output_dir, merge=merge, info_only=info_only)
[ "def", "funshion_download_by_url", "(", "url", ",", "output_dir", "=", "'.'", ",", "merge", "=", "False", ",", "info_only", "=", "False", ")", ":", "if", "re", ".", "match", "(", "'http://www.fun.tv/vplay/v-(\\\\w+)'", ",", "url", ")", ":", "match", "=", "re", ".", "search", "(", "'http://www.fun.tv/vplay/v-(\\\\d+)(.?)'", ",", "url", ")", "vid", "=", "match", ".", "group", "(", "1", ")", "funshion_download_by_vid", "(", "vid", ",", "output_dir", "=", "output_dir", ",", "merge", "=", "merge", ",", "info_only", "=", "info_only", ")" ]
lots of stuff->none main wrapper for single video download .
train
false
49,831
@pytest.mark.parametrize('parallel', [pytest.mark.xfail((os.name == 'nt'), reason='Multiprocessing is currently unsupported on Windows')(True), False]) def test_fortran_invalid_exp(parallel): if (parallel and TRAVIS): pytest.xfail('Multiprocessing can sometimes fail on Travis CI') fields = ['1.0001+1', '.42d1', '2.3+10', '0.5', '3+1001', '3000.', '2', '4.56e-2.3', '8000', '4.2-122'] values = ['1.0001+1', 4.2, '2.3+10', 0.5, '3+1001', 3000.0, 2, '4.56e-2.3', 8000, 4.2e-122] t = ascii.read(StringIO(' '.join(fields)), format='no_header', guess=False, fast_reader={'parallel': parallel, 'exponent_style': 'A'}) read_values = [col[0] for col in t.itercols()] assert (read_values == values)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'parallel'", ",", "[", "pytest", ".", "mark", ".", "xfail", "(", "(", "os", ".", "name", "==", "'nt'", ")", ",", "reason", "=", "'Multiprocessing is currently unsupported on Windows'", ")", "(", "True", ")", ",", "False", "]", ")", "def", "test_fortran_invalid_exp", "(", "parallel", ")", ":", "if", "(", "parallel", "and", "TRAVIS", ")", ":", "pytest", ".", "xfail", "(", "'Multiprocessing can sometimes fail on Travis CI'", ")", "fields", "=", "[", "'1.0001+1'", ",", "'.42d1'", ",", "'2.3+10'", ",", "'0.5'", ",", "'3+1001'", ",", "'3000.'", ",", "'2'", ",", "'4.56e-2.3'", ",", "'8000'", ",", "'4.2-122'", "]", "values", "=", "[", "'1.0001+1'", ",", "4.2", ",", "'2.3+10'", ",", "0.5", ",", "'3+1001'", ",", "3000.0", ",", "2", ",", "'4.56e-2.3'", ",", "8000", ",", "4.2e-122", "]", "t", "=", "ascii", ".", "read", "(", "StringIO", "(", "' '", ".", "join", "(", "fields", ")", ")", ",", "format", "=", "'no_header'", ",", "guess", "=", "False", ",", "fast_reader", "=", "{", "'parallel'", ":", "parallel", ",", "'exponent_style'", ":", "'A'", "}", ")", "read_values", "=", "[", "col", "[", "0", "]", "for", "col", "in", "t", ".", "itercols", "(", ")", "]", "assert", "(", "read_values", "==", "values", ")" ]
test fortran-style exponential notation in the fast_reader with invalid exponent-like patterns to make sure they are returned as strings instead .
train
false
49,832
def reregister(model_class): def decorator(cls): '\n The actual decorator that does the work.\n ' site.unregister(model_class) site.register(model_class, cls) return cls return decorator
[ "def", "reregister", "(", "model_class", ")", ":", "def", "decorator", "(", "cls", ")", ":", "site", ".", "unregister", "(", "model_class", ")", "site", ".", "register", "(", "model_class", ",", "cls", ")", "return", "cls", "return", "decorator" ]
remove the existing admin .
train
false
49,833
def Line(data=None, x=None, y=None, **kws): kws['x'] = x kws['y'] = y return create_and_build(LineBuilder, data, **kws)
[ "def", "Line", "(", "data", "=", "None", ",", "x", "=", "None", ",", "y", "=", "None", ",", "**", "kws", ")", ":", "kws", "[", "'x'", "]", "=", "x", "kws", "[", "'y'", "]", "=", "y", "return", "create_and_build", "(", "LineBuilder", ",", "data", ",", "**", "kws", ")" ]
create a line chart using :class:linebuilder <bokeh .
train
false
49,834
def remove_projection(X, w): return [remove_projection_from_vector(x_i, w) for x_i in X]
[ "def", "remove_projection", "(", "X", ",", "w", ")", ":", "return", "[", "remove_projection_from_vector", "(", "x_i", ",", "w", ")", "for", "x_i", "in", "X", "]" ]
for each row of x projects the row onto w .
train
false
49,835
def acovf_fft(x, demean=True): from scipy import signal x = np.asarray(x) if demean: x = (x - x.mean()) (signal.fftconvolve(x, x[::(-1)])[(len(x) - 1):(len(x) + 10)] / x.shape[0])
[ "def", "acovf_fft", "(", "x", ",", "demean", "=", "True", ")", ":", "from", "scipy", "import", "signal", "x", "=", "np", ".", "asarray", "(", "x", ")", "if", "demean", ":", "x", "=", "(", "x", "-", "x", ".", "mean", "(", ")", ")", "(", "signal", ".", "fftconvolve", "(", "x", ",", "x", "[", ":", ":", "(", "-", "1", ")", "]", ")", "[", "(", "len", "(", "x", ")", "-", "1", ")", ":", "(", "len", "(", "x", ")", "+", "10", ")", "]", "/", "x", ".", "shape", "[", "0", "]", ")" ]
autocovariance function with call to fftconvolve .
train
false
49,837
def generate_totp(secret, period=30, timestamp=None): if (timestamp is None): timestamp = time.time() counter = (int(timestamp) // period) return generate_hotp(secret, counter)
[ "def", "generate_totp", "(", "secret", ",", "period", "=", "30", ",", "timestamp", "=", "None", ")", ":", "if", "(", "timestamp", "is", "None", ")", ":", "timestamp", "=", "time", ".", "time", "(", ")", "counter", "=", "(", "int", "(", "timestamp", ")", "//", "period", ")", "return", "generate_hotp", "(", "secret", ",", "counter", ")" ]
generate a totp code .
train
true
49,838
def mimic_wrap(lines, wrap_at=65, **kwargs): l0 = textwrap.fill(lines[0], wrap_at, drop_whitespace=False).split(u'\n') (yield l0) def _(line): il0 = 0 while (line and (il0 < (len(l0) - 1))): (yield line[:len(l0[il0])]) line = line[len(l0[il0]):] il0 += 1 if line: for ln in textwrap.fill(line, wrap_at, drop_whitespace=False).split(u'\n'): (yield ln) for l in lines[1:]: (yield list(_(l)))
[ "def", "mimic_wrap", "(", "lines", ",", "wrap_at", "=", "65", ",", "**", "kwargs", ")", ":", "l0", "=", "textwrap", ".", "fill", "(", "lines", "[", "0", "]", ",", "wrap_at", ",", "drop_whitespace", "=", "False", ")", ".", "split", "(", "u'\\n'", ")", "(", "yield", "l0", ")", "def", "_", "(", "line", ")", ":", "il0", "=", "0", "while", "(", "line", "and", "(", "il0", "<", "(", "len", "(", "l0", ")", "-", "1", ")", ")", ")", ":", "(", "yield", "line", "[", ":", "len", "(", "l0", "[", "il0", "]", ")", "]", ")", "line", "=", "line", "[", "len", "(", "l0", "[", "il0", "]", ")", ":", "]", "il0", "+=", "1", "if", "line", ":", "for", "ln", "in", "textwrap", ".", "fill", "(", "line", ",", "wrap_at", ",", "drop_whitespace", "=", "False", ")", ".", "split", "(", "u'\\n'", ")", ":", "(", "yield", "ln", ")", "for", "l", "in", "lines", "[", "1", ":", "]", ":", "(", "yield", "list", "(", "_", "(", "l", ")", ")", ")" ]
wrap the first of lines with textwrap and the remaining lines at exactly the same positions as the first .
train
false
49,839
def _same_ImageCollection(collection1, collection2): if (len(collection1) != len(collection2)): return False for (ext1, ext2) in zip(collection1, collection2): if (not np.all((ext1 == ext2))): return False return True
[ "def", "_same_ImageCollection", "(", "collection1", ",", "collection2", ")", ":", "if", "(", "len", "(", "collection1", ")", "!=", "len", "(", "collection2", ")", ")", ":", "return", "False", "for", "(", "ext1", ",", "ext2", ")", "in", "zip", "(", "collection1", ",", "collection2", ")", ":", "if", "(", "not", "np", ".", "all", "(", "(", "ext1", "==", "ext2", ")", ")", ")", ":", "return", "False", "return", "True" ]
ancillary function to compare two imagecollection objects .
train
false
49,840
def read_sns(fname): p = re.compile((('\\d,[A-Za-z]*,([\\.\\-0-9]+),' + '([\\.\\-0-9]+),([\\.\\-0-9]+),') + '([\\.\\-0-9]+),([\\.\\-0-9]+)')) with open(fname) as fid: locs = np.array(p.findall(fid.read()), dtype=float) return locs
[ "def", "read_sns", "(", "fname", ")", ":", "p", "=", "re", ".", "compile", "(", "(", "(", "'\\\\d,[A-Za-z]*,([\\\\.\\\\-0-9]+),'", "+", "'([\\\\.\\\\-0-9]+),([\\\\.\\\\-0-9]+),'", ")", "+", "'([\\\\.\\\\-0-9]+),([\\\\.\\\\-0-9]+)'", ")", ")", "with", "open", "(", "fname", ")", "as", "fid", ":", "locs", "=", "np", ".", "array", "(", "p", ".", "findall", "(", "fid", ".", "read", "(", ")", ")", ",", "dtype", "=", "float", ")", "return", "locs" ]
sensor coordinate extraction in meg space .
train
false
49,842
def revoke_auth(preserve_minion_cache=False): masters = list() ret = True if ('master_uri_list' in __opts__): for master_uri in __opts__['master_uri_list']: masters.append(master_uri) else: masters.append(__opts__['master_uri']) for master in masters: channel = salt.transport.Channel.factory(__opts__, master_uri=master) tok = channel.auth.gen_token('salt') load = {'cmd': 'revoke_auth', 'id': __opts__['id'], 'tok': tok, 'preserve_minion_cache': preserve_minion_cache} try: channel.send(load) except SaltReqTimeoutError: ret = False return ret
[ "def", "revoke_auth", "(", "preserve_minion_cache", "=", "False", ")", ":", "masters", "=", "list", "(", ")", "ret", "=", "True", "if", "(", "'master_uri_list'", "in", "__opts__", ")", ":", "for", "master_uri", "in", "__opts__", "[", "'master_uri_list'", "]", ":", "masters", ".", "append", "(", "master_uri", ")", "else", ":", "masters", ".", "append", "(", "__opts__", "[", "'master_uri'", "]", ")", "for", "master", "in", "masters", ":", "channel", "=", "salt", ".", "transport", ".", "Channel", ".", "factory", "(", "__opts__", ",", "master_uri", "=", "master", ")", "tok", "=", "channel", ".", "auth", ".", "gen_token", "(", "'salt'", ")", "load", "=", "{", "'cmd'", ":", "'revoke_auth'", ",", "'id'", ":", "__opts__", "[", "'id'", "]", ",", "'tok'", ":", "tok", ",", "'preserve_minion_cache'", ":", "preserve_minion_cache", "}", "try", ":", "channel", ".", "send", "(", "load", ")", "except", "SaltReqTimeoutError", ":", "ret", "=", "False", "return", "ret" ]
the minion sends a request to the master to revoke its own key .
train
true
49,844
def random_product(*args, **kwargs): repeat = kwargs.pop('repeat', 1) if (kwargs != {}): raise TypeError(('random_product() does not support argument %s' % kwargs.popitem())) pools = (map(tuple, args) * repeat) return tuple((random.choice(pool) for pool in pools))
[ "def", "random_product", "(", "*", "args", ",", "**", "kwargs", ")", ":", "repeat", "=", "kwargs", ".", "pop", "(", "'repeat'", ",", "1", ")", "if", "(", "kwargs", "!=", "{", "}", ")", ":", "raise", "TypeError", "(", "(", "'random_product() does not support argument %s'", "%", "kwargs", ".", "popitem", "(", ")", ")", ")", "pools", "=", "(", "map", "(", "tuple", ",", "args", ")", "*", "repeat", ")", "return", "tuple", "(", "(", "random", ".", "choice", "(", "pool", ")", "for", "pool", "in", "pools", ")", ")" ]
random_product -> tuple arguments: args: one or more iterables repeat: number of times to repeat args .
train
false
49,845
@snippet def sink_bigquery(client, to_delete): dataset = _sink_bigquery_setup(client) to_delete.append(dataset) SINK_NAME = ('robots-bigquery-%d' % (_millis(),)) FILTER = 'textPayload:robot' DESTINATION = ('bigquery.googleapis.com%s' % (dataset.path,)) sink = client.sink(SINK_NAME, filter_=FILTER, destination=DESTINATION) assert (not sink.exists()) sink.create() assert sink.exists() to_delete.insert(0, sink)
[ "@", "snippet", "def", "sink_bigquery", "(", "client", ",", "to_delete", ")", ":", "dataset", "=", "_sink_bigquery_setup", "(", "client", ")", "to_delete", ".", "append", "(", "dataset", ")", "SINK_NAME", "=", "(", "'robots-bigquery-%d'", "%", "(", "_millis", "(", ")", ",", ")", ")", "FILTER", "=", "'textPayload:robot'", "DESTINATION", "=", "(", "'bigquery.googleapis.com%s'", "%", "(", "dataset", ".", "path", ",", ")", ")", "sink", "=", "client", ".", "sink", "(", "SINK_NAME", ",", "filter_", "=", "FILTER", ",", "destination", "=", "DESTINATION", ")", "assert", "(", "not", "sink", ".", "exists", "(", ")", ")", "sink", ".", "create", "(", ")", "assert", "sink", ".", "exists", "(", ")", "to_delete", ".", "insert", "(", "0", ",", "sink", ")" ]
sink log entries to bigquery .
train
true
49,846
def flatten_list_bytes(list_of_data): if (not PY34): list_of_data = ((bytes(data) if isinstance(data, memoryview) else data) for data in list_of_data) return ''.join(list_of_data)
[ "def", "flatten_list_bytes", "(", "list_of_data", ")", ":", "if", "(", "not", "PY34", ")", ":", "list_of_data", "=", "(", "(", "bytes", "(", "data", ")", "if", "isinstance", "(", "data", ",", "memoryview", ")", "else", "data", ")", "for", "data", "in", "list_of_data", ")", "return", "''", ".", "join", "(", "list_of_data", ")" ]
concatenate a sequence of bytes-like objects .
train
false
49,847
def limited_by_marker(items, request, max_limit=FLAGS.osapi_max_limit): params = get_pagination_params(request) limit = params.get('limit', max_limit) marker = params.get('marker') limit = min(max_limit, limit) start_index = 0 if marker: start_index = (-1) for (i, item) in enumerate(items): if ('flavorid' in item): if (item['flavorid'] == marker): start_index = (i + 1) break elif ((item['id'] == marker) or (item.get('uuid') == marker)): start_index = (i + 1) break if (start_index < 0): msg = (_('marker [%s] not found') % marker) raise webob.exc.HTTPBadRequest(explanation=msg) range_end = (start_index + limit) return items[start_index:range_end]
[ "def", "limited_by_marker", "(", "items", ",", "request", ",", "max_limit", "=", "FLAGS", ".", "osapi_max_limit", ")", ":", "params", "=", "get_pagination_params", "(", "request", ")", "limit", "=", "params", ".", "get", "(", "'limit'", ",", "max_limit", ")", "marker", "=", "params", ".", "get", "(", "'marker'", ")", "limit", "=", "min", "(", "max_limit", ",", "limit", ")", "start_index", "=", "0", "if", "marker", ":", "start_index", "=", "(", "-", "1", ")", "for", "(", "i", ",", "item", ")", "in", "enumerate", "(", "items", ")", ":", "if", "(", "'flavorid'", "in", "item", ")", ":", "if", "(", "item", "[", "'flavorid'", "]", "==", "marker", ")", ":", "start_index", "=", "(", "i", "+", "1", ")", "break", "elif", "(", "(", "item", "[", "'id'", "]", "==", "marker", ")", "or", "(", "item", ".", "get", "(", "'uuid'", ")", "==", "marker", ")", ")", ":", "start_index", "=", "(", "i", "+", "1", ")", "break", "if", "(", "start_index", "<", "0", ")", ":", "msg", "=", "(", "_", "(", "'marker [%s] not found'", ")", "%", "marker", ")", "raise", "webob", ".", "exc", ".", "HTTPBadRequest", "(", "explanation", "=", "msg", ")", "range_end", "=", "(", "start_index", "+", "limit", ")", "return", "items", "[", "start_index", ":", "range_end", "]" ]
return a slice of items according to the requested marker and limit .
train
false
49,848
def load_conf(path=None, file=None): conf = ConfigParser() if file: conf.readfp(file) return conf confpath = None if (not path): try: confpath = os.environ['DULWICH_SWIFT_CFG'] except KeyError: raise Exception('You need to specify a configuration file') else: confpath = path if (not os.path.isfile(confpath)): raise Exception(('Unable to read configuration file %s' % confpath)) conf.read(confpath) return conf
[ "def", "load_conf", "(", "path", "=", "None", ",", "file", "=", "None", ")", ":", "conf", "=", "ConfigParser", "(", ")", "if", "file", ":", "conf", ".", "readfp", "(", "file", ")", "return", "conf", "confpath", "=", "None", "if", "(", "not", "path", ")", ":", "try", ":", "confpath", "=", "os", ".", "environ", "[", "'DULWICH_SWIFT_CFG'", "]", "except", "KeyError", ":", "raise", "Exception", "(", "'You need to specify a configuration file'", ")", "else", ":", "confpath", "=", "path", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "confpath", ")", ")", ":", "raise", "Exception", "(", "(", "'Unable to read configuration file %s'", "%", "confpath", ")", ")", "conf", ".", "read", "(", "confpath", ")", "return", "conf" ]
load configuration in global var conf .
train
false
49,849
@require_POST @login_required @permitted def undo_vote_for_thread(request, course_id, thread_id): return _vote_or_unvote(request, course_id, cc.Thread.find(thread_id), undo_vote=True)
[ "@", "require_POST", "@", "login_required", "@", "permitted", "def", "undo_vote_for_thread", "(", "request", ",", "course_id", ",", "thread_id", ")", ":", "return", "_vote_or_unvote", "(", "request", ",", "course_id", ",", "cc", ".", "Thread", ".", "find", "(", "thread_id", ")", ",", "undo_vote", "=", "True", ")" ]
given a course id and thread id .
train
false
49,850
def _determine_termination_policies(termination_policies, termination_policies_from_pillar): pillar_termination_policies = copy.deepcopy(__salt__['config.option'](termination_policies_from_pillar, [])) if ((not termination_policies) and (len(pillar_termination_policies) > 0)): termination_policies = pillar_termination_policies return termination_policies
[ "def", "_determine_termination_policies", "(", "termination_policies", ",", "termination_policies_from_pillar", ")", ":", "pillar_termination_policies", "=", "copy", ".", "deepcopy", "(", "__salt__", "[", "'config.option'", "]", "(", "termination_policies_from_pillar", ",", "[", "]", ")", ")", "if", "(", "(", "not", "termination_policies", ")", "and", "(", "len", "(", "pillar_termination_policies", ")", ">", "0", ")", ")", ":", "termination_policies", "=", "pillar_termination_policies", "return", "termination_policies" ]
helper method for present .
train
true
49,852
def hosts_append(hostsfile='/etc/hosts', ip_addr=None, entries=None): host_list = entries.split(',') hosts = parse_hosts(hostsfile=hostsfile) if (ip_addr in hosts): for host in host_list: if (host in hosts[ip_addr]): host_list.remove(host) if (not host_list): return 'No additional hosts were added to {0}'.format(hostsfile) append_line = '\n{0} {1}'.format(ip_addr, ' '.join(host_list)) with salt.utils.fopen(hostsfile, 'a') as fp_: fp_.write(append_line) return 'The following line was added to {0}:{1}'.format(hostsfile, append_line)
[ "def", "hosts_append", "(", "hostsfile", "=", "'/etc/hosts'", ",", "ip_addr", "=", "None", ",", "entries", "=", "None", ")", ":", "host_list", "=", "entries", ".", "split", "(", "','", ")", "hosts", "=", "parse_hosts", "(", "hostsfile", "=", "hostsfile", ")", "if", "(", "ip_addr", "in", "hosts", ")", ":", "for", "host", "in", "host_list", ":", "if", "(", "host", "in", "hosts", "[", "ip_addr", "]", ")", ":", "host_list", ".", "remove", "(", "host", ")", "if", "(", "not", "host_list", ")", ":", "return", "'No additional hosts were added to {0}'", ".", "format", "(", "hostsfile", ")", "append_line", "=", "'\\n{0} {1}'", ".", "format", "(", "ip_addr", ",", "' '", ".", "join", "(", "host_list", ")", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "hostsfile", ",", "'a'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "append_line", ")", "return", "'The following line was added to {0}:{1}'", ".", "format", "(", "hostsfile", ",", "append_line", ")" ]
append a single line to the /etc/hosts file .
train
true
49,854
def make_multithread(inner_func, numthreads): def func_mt(*args): length = len(args[0]) result = np.empty(length, dtype=np.float64) args = ((result,) + args) chunklen = (((length + numthreads) - 1) // numthreads) chunks = [[arg[(i * chunklen):((i + 1) * chunklen)] for arg in args] for i in range(numthreads)] threads = [threading.Thread(target=inner_func, args=chunk) for chunk in chunks] for thread in threads: thread.start() for thread in threads: thread.join() return result return func_mt
[ "def", "make_multithread", "(", "inner_func", ",", "numthreads", ")", ":", "def", "func_mt", "(", "*", "args", ")", ":", "length", "=", "len", "(", "args", "[", "0", "]", ")", "result", "=", "np", ".", "empty", "(", "length", ",", "dtype", "=", "np", ".", "float64", ")", "args", "=", "(", "(", "result", ",", ")", "+", "args", ")", "chunklen", "=", "(", "(", "(", "length", "+", "numthreads", ")", "-", "1", ")", "//", "numthreads", ")", "chunks", "=", "[", "[", "arg", "[", "(", "i", "*", "chunklen", ")", ":", "(", "(", "i", "+", "1", ")", "*", "chunklen", ")", "]", "for", "arg", "in", "args", "]", "for", "i", "in", "range", "(", "numthreads", ")", "]", "threads", "=", "[", "threading", ".", "Thread", "(", "target", "=", "inner_func", ",", "args", "=", "chunk", ")", "for", "chunk", "in", "chunks", "]", "for", "thread", "in", "threads", ":", "thread", ".", "start", "(", ")", "for", "thread", "in", "threads", ":", "thread", ".", "join", "(", ")", "return", "result", "return", "func_mt" ]
run the given function inside *numthreads* threads .
train
false
49,855
def register_account(): LOGGER.info('Registering account...') (code, result) = _send_signed_request((DEFAULT_CA + '/acme/new-reg'), {'resource': 'new-reg', 'agreement': 'https://letsencrypt.org/documents/LE-SA-v1.1.1-August-1-2016.pdf'}) if (code == 201): LOGGER.info('Registered!') elif (code == 409): LOGGER.info('Already registered!') else: raise ValueError('Error registering: {0} {1}'.format(code, result))
[ "def", "register_account", "(", ")", ":", "LOGGER", ".", "info", "(", "'Registering account...'", ")", "(", "code", ",", "result", ")", "=", "_send_signed_request", "(", "(", "DEFAULT_CA", "+", "'/acme/new-reg'", ")", ",", "{", "'resource'", ":", "'new-reg'", ",", "'agreement'", ":", "'https://letsencrypt.org/documents/LE-SA-v1.1.1-August-1-2016.pdf'", "}", ")", "if", "(", "code", "==", "201", ")", ":", "LOGGER", ".", "info", "(", "'Registered!'", ")", "elif", "(", "code", "==", "409", ")", ":", "LOGGER", ".", "info", "(", "'Already registered!'", ")", "else", ":", "raise", "ValueError", "(", "'Error registering: {0} {1}'", ".", "format", "(", "code", ",", "result", ")", ")" ]
agree to le tos .
train
true
49,856
def _publish(tgt, fun, arg=None, tgt_type='glob', returner='', timeout=5, form='clean'): if (fun == 'publish.publish'): log.info("Function name is 'publish.publish'. Returning {}") return {} arg = _parse_args(arg) load = {'cmd': 'minion_pub', 'fun': fun, 'arg': arg, 'tgt': tgt, 'tgt_type': tgt_type, 'ret': returner, 'tmo': timeout, 'form': form, 'id': __opts__['id']} channel = salt.transport.Channel.factory(__opts__) try: peer_data = channel.send(load) except SaltReqTimeoutError: return "'{0}' publish timed out".format(fun) if (not peer_data): return {} time.sleep(float(timeout)) load = {'cmd': 'pub_ret', 'id': __opts__['id'], 'jid': str(peer_data['jid'])} ret = channel.send(load) if (form == 'clean'): cret = {} for host in ret: cret[host] = ret[host]['ret'] return cret else: return ret
[ "def", "_publish", "(", "tgt", ",", "fun", ",", "arg", "=", "None", ",", "tgt_type", "=", "'glob'", ",", "returner", "=", "''", ",", "timeout", "=", "5", ",", "form", "=", "'clean'", ")", ":", "if", "(", "fun", "==", "'publish.publish'", ")", ":", "log", ".", "info", "(", "\"Function name is 'publish.publish'. Returning {}\"", ")", "return", "{", "}", "arg", "=", "_parse_args", "(", "arg", ")", "load", "=", "{", "'cmd'", ":", "'minion_pub'", ",", "'fun'", ":", "fun", ",", "'arg'", ":", "arg", ",", "'tgt'", ":", "tgt", ",", "'tgt_type'", ":", "tgt_type", ",", "'ret'", ":", "returner", ",", "'tmo'", ":", "timeout", ",", "'form'", ":", "form", ",", "'id'", ":", "__opts__", "[", "'id'", "]", "}", "channel", "=", "salt", ".", "transport", ".", "Channel", ".", "factory", "(", "__opts__", ")", "try", ":", "peer_data", "=", "channel", ".", "send", "(", "load", ")", "except", "SaltReqTimeoutError", ":", "return", "\"'{0}' publish timed out\"", ".", "format", "(", "fun", ")", "if", "(", "not", "peer_data", ")", ":", "return", "{", "}", "time", ".", "sleep", "(", "float", "(", "timeout", ")", ")", "load", "=", "{", "'cmd'", ":", "'pub_ret'", ",", "'id'", ":", "__opts__", "[", "'id'", "]", ",", "'jid'", ":", "str", "(", "peer_data", "[", "'jid'", "]", ")", "}", "ret", "=", "channel", ".", "send", "(", "load", ")", "if", "(", "form", "==", "'clean'", ")", ":", "cret", "=", "{", "}", "for", "host", "in", "ret", ":", "cret", "[", "host", "]", "=", "ret", "[", "host", "]", "[", "'ret'", "]", "return", "cret", "else", ":", "return", "ret" ]
publish a command from the minion out to other minions .
train
false
49,857
def process_handler(cmd, callback, stderr=subprocess.PIPE): sys.stdout.flush() sys.stderr.flush() close_fds = (sys.platform != 'win32') shell = isinstance(cmd, str) executable = None if (shell and (os.name == 'posix') and ('SHELL' in os.environ)): executable = os.environ['SHELL'] p = subprocess.Popen(cmd, shell=shell, executable=executable, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=stderr, close_fds=close_fds) try: out = callback(p) except KeyboardInterrupt: print '^C' sys.stdout.flush() sys.stderr.flush() out = None finally: if (p.returncode is None): try: p.terminate() p.poll() except OSError: pass if (p.returncode is None): try: p.kill() except OSError: pass return out
[ "def", "process_handler", "(", "cmd", ",", "callback", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", ":", "sys", ".", "stdout", ".", "flush", "(", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "close_fds", "=", "(", "sys", ".", "platform", "!=", "'win32'", ")", "shell", "=", "isinstance", "(", "cmd", ",", "str", ")", "executable", "=", "None", "if", "(", "shell", "and", "(", "os", ".", "name", "==", "'posix'", ")", "and", "(", "'SHELL'", "in", "os", ".", "environ", ")", ")", ":", "executable", "=", "os", ".", "environ", "[", "'SHELL'", "]", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "shell", "=", "shell", ",", "executable", "=", "executable", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "stderr", ",", "close_fds", "=", "close_fds", ")", "try", ":", "out", "=", "callback", "(", "p", ")", "except", "KeyboardInterrupt", ":", "print", "'^C'", "sys", ".", "stdout", ".", "flush", "(", ")", "sys", ".", "stderr", ".", "flush", "(", ")", "out", "=", "None", "finally", ":", "if", "(", "p", ".", "returncode", "is", "None", ")", ":", "try", ":", "p", ".", "terminate", "(", ")", "p", ".", "poll", "(", ")", "except", "OSError", ":", "pass", "if", "(", "p", ".", "returncode", "is", "None", ")", ":", "try", ":", "p", ".", "kill", "(", ")", "except", "OSError", ":", "pass", "return", "out" ]
open a command in a shell subprocess and execute a callback .
train
false
49,858
def check_sfffile(): if (not which('sfffile')): raise ApplicationNotFoundError((_MISSING_APP_MESSAGE % 'sfffile'))
[ "def", "check_sfffile", "(", ")", ":", "if", "(", "not", "which", "(", "'sfffile'", ")", ")", ":", "raise", "ApplicationNotFoundError", "(", "(", "_MISSING_APP_MESSAGE", "%", "'sfffile'", ")", ")" ]
raise error if sfffile is not in $path .
train
false
49,859
def is_valid_uri(uri, encoding='utf-8', **kwargs): return URIReference.from_string(uri, encoding).is_valid(**kwargs)
[ "def", "is_valid_uri", "(", "uri", ",", "encoding", "=", "'utf-8'", ",", "**", "kwargs", ")", ":", "return", "URIReference", ".", "from_string", "(", "uri", ",", "encoding", ")", ".", "is_valid", "(", "**", "kwargs", ")" ]
determine if the uri given is valid .
train
false
49,860
def queryset_manager(func): return QuerySetManager(func)
[ "def", "queryset_manager", "(", "func", ")", ":", "return", "QuerySetManager", "(", "func", ")" ]
decorator that allows you to define custom queryset managers on :class:~mongoengine .
train
false
49,861
def key_absent(name, region=None, key=None, keyid=None, profile=None): ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} exists = __salt__['boto_ec2.get_key'](name, region, key, keyid, profile) if exists: if __opts__['test']: ret['comment'] = 'The key {0} is set to be deleted.'.format(name) ret['result'] = None return ret deleted = __salt__['boto_ec2.delete_key'](name, region, key, keyid, profile) log.debug('exists is {0}'.format(deleted)) if deleted: ret['result'] = True ret['comment'] = 'The key {0} is deleted.'.format(name) ret['changes']['old'] = name else: ret['result'] = False ret['comment'] = 'Could not delete key {0} '.format(name) else: ret['result'] = True ret['comment'] = 'The key name {0} does not exist'.format(name) return ret
[ "def", "key_absent", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "exists", "=", "__salt__", "[", "'boto_ec2.get_key'", "]", "(", "name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "exists", ":", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'The key {0} is set to be deleted.'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "deleted", "=", "__salt__", "[", "'boto_ec2.delete_key'", "]", "(", "name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "log", ".", "debug", "(", "'exists is {0}'", ".", "format", "(", "deleted", ")", ")", "if", "deleted", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'The key {0} is deleted.'", ".", "format", "(", "name", ")", "ret", "[", "'changes'", "]", "[", "'old'", "]", "=", "name", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Could not delete key {0} '", ".", "format", "(", "name", ")", "else", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'The key name {0} does not exist'", ".", "format", "(", "name", ")", "return", "ret" ]
deletes a key pair .
train
true
49,862
def unixtime(dt_obj): return time.mktime(dt_obj.utctimetuple())
[ "def", "unixtime", "(", "dt_obj", ")", ":", "return", "time", ".", "mktime", "(", "dt_obj", ".", "utctimetuple", "(", ")", ")" ]
format datetime object as unix timestamp .
train
false
49,863
def clear_dir(dir): shutil.rmtree(dir, True)
[ "def", "clear_dir", "(", "dir", ")", ":", "shutil", ".", "rmtree", "(", "dir", ",", "True", ")" ]
clears the given directory .
train
false
49,865
def get_field_info(model): opts = model._meta.concrete_model._meta pk = _get_pk(opts) fields = _get_fields(opts) forward_relations = _get_forward_relationships(opts) reverse_relations = _get_reverse_relationships(opts) fields_and_pk = _merge_fields_and_pk(pk, fields) relationships = _merge_relationships(forward_relations, reverse_relations) return FieldInfo(pk, fields, forward_relations, reverse_relations, fields_and_pk, relationships)
[ "def", "get_field_info", "(", "model", ")", ":", "opts", "=", "model", ".", "_meta", ".", "concrete_model", ".", "_meta", "pk", "=", "_get_pk", "(", "opts", ")", "fields", "=", "_get_fields", "(", "opts", ")", "forward_relations", "=", "_get_forward_relationships", "(", "opts", ")", "reverse_relations", "=", "_get_reverse_relationships", "(", "opts", ")", "fields_and_pk", "=", "_merge_fields_and_pk", "(", "pk", ",", "fields", ")", "relationships", "=", "_merge_relationships", "(", "forward_relations", ",", "reverse_relations", ")", "return", "FieldInfo", "(", "pk", ",", "fields", ",", "forward_relations", ",", "reverse_relations", ",", "fields_and_pk", ",", "relationships", ")" ]
given a model class .
train
true
49,868
def getReadProfileRepository(): return settings.getReadRepository(ProfileRepository())
[ "def", "getReadProfileRepository", "(", ")", ":", "return", "settings", ".", "getReadRepository", "(", "ProfileRepository", "(", ")", ")" ]
get the read profile repository .
train
false
49,869
def can_connect(ip, port): try: socket.create_connection((ip, port)) except socket.error as e: if (e.errno not in {errno.ECONNREFUSED, errno.ETIMEDOUT}): app_log.error('Unexpected error connecting to %s:%i %s', ip, port, e) return False else: return True
[ "def", "can_connect", "(", "ip", ",", "port", ")", ":", "try", ":", "socket", ".", "create_connection", "(", "(", "ip", ",", "port", ")", ")", "except", "socket", ".", "error", "as", "e", ":", "if", "(", "e", ".", "errno", "not", "in", "{", "errno", ".", "ECONNREFUSED", ",", "errno", ".", "ETIMEDOUT", "}", ")", ":", "app_log", ".", "error", "(", "'Unexpected error connecting to %s:%i %s'", ",", "ip", ",", "port", ",", "e", ")", "return", "False", "else", ":", "return", "True" ]
check if we can connect to an ip:port return true if we can connect .
train
false
49,870
def decode_region(code): for (tag, (language, region, iso639, iso3166)) in LANGUAGE_REGION.items(): if (iso3166 == code.upper()): return region
[ "def", "decode_region", "(", "code", ")", ":", "for", "(", "tag", ",", "(", "language", ",", "region", ",", "iso639", ",", "iso3166", ")", ")", "in", "LANGUAGE_REGION", ".", "items", "(", ")", ":", "if", "(", "iso3166", "==", "code", ".", "upper", "(", ")", ")", ":", "return", "region" ]
returns the region name for the given region code .
train
false
49,871
def inject_data(image, key=None, net=None, metadata=None, admin_password=None, files=None, partition=None, use_cow=False, mandatory=()): LOG.debug((_('Inject data image=%(image)s key=%(key)s net=%(net)s metadata=%(metadata)s admin_password=ha-ha-not-telling-you files=%(files)s partition=%(partition)s use_cow=%(use_cow)s') % locals())) fmt = 'raw' if use_cow: fmt = 'qcow2' try: fs = vfs.VFS.instance_for_image(image, fmt, partition) fs.setup() except Exception as e: for inject in mandatory: inject_val = locals()[inject] if inject_val: raise LOG.warn((_('Ignoring error injecting data into image (%(e)s)') % locals())) return False try: return inject_data_into_fs(fs, key, net, metadata, admin_password, files, mandatory) finally: fs.teardown()
[ "def", "inject_data", "(", "image", ",", "key", "=", "None", ",", "net", "=", "None", ",", "metadata", "=", "None", ",", "admin_password", "=", "None", ",", "files", "=", "None", ",", "partition", "=", "None", ",", "use_cow", "=", "False", ",", "mandatory", "=", "(", ")", ")", ":", "LOG", ".", "debug", "(", "(", "_", "(", "'Inject data image=%(image)s key=%(key)s net=%(net)s metadata=%(metadata)s admin_password=ha-ha-not-telling-you files=%(files)s partition=%(partition)s use_cow=%(use_cow)s'", ")", "%", "locals", "(", ")", ")", ")", "fmt", "=", "'raw'", "if", "use_cow", ":", "fmt", "=", "'qcow2'", "try", ":", "fs", "=", "vfs", ".", "VFS", ".", "instance_for_image", "(", "image", ",", "fmt", ",", "partition", ")", "fs", ".", "setup", "(", ")", "except", "Exception", "as", "e", ":", "for", "inject", "in", "mandatory", ":", "inject_val", "=", "locals", "(", ")", "[", "inject", "]", "if", "inject_val", ":", "raise", "LOG", ".", "warn", "(", "(", "_", "(", "'Ignoring error injecting data into image (%(e)s)'", ")", "%", "locals", "(", ")", ")", ")", "return", "False", "try", ":", "return", "inject_data_into_fs", "(", "fs", ",", "key", ",", "net", ",", "metadata", ",", "admin_password", ",", "files", ",", "mandatory", ")", "finally", ":", "fs", ".", "teardown", "(", ")" ]
inject the specified items into a disk image .
train
false
49,872
@contextmanager def _indent_context(): try: (yield) finally: dedent()
[ "@", "contextmanager", "def", "_indent_context", "(", ")", ":", "try", ":", "(", "yield", ")", "finally", ":", "dedent", "(", ")" ]
indentation context manager .
train
false
49,875
def test_custom_rendering(): countries = CountryTable(MEMORY_DATA) context = Context({u'countries': countries}) template = Template(u'{% for column in countries.columns %}{{ column }}/{{ column.name }} {% endfor %}') result = u'Name/name Capital/capital Population Size/population Phone Ext./calling_code ' assert (result == template.render(context)) template = Template(u'{% for row in countries.rows %}{% for value in row %}{{ value }} {% endfor %}{% endfor %}') result = u'Germany Berlin 83 49 France \u2014 64 33 Netherlands Amsterdam \u2014 31 Austria \u2014 8 43 ' assert (result == template.render(context))
[ "def", "test_custom_rendering", "(", ")", ":", "countries", "=", "CountryTable", "(", "MEMORY_DATA", ")", "context", "=", "Context", "(", "{", "u'countries'", ":", "countries", "}", ")", "template", "=", "Template", "(", "u'{% for column in countries.columns %}{{ column }}/{{ column.name }} {% endfor %}'", ")", "result", "=", "u'Name/name Capital/capital Population Size/population Phone Ext./calling_code '", "assert", "(", "result", "==", "template", ".", "render", "(", "context", ")", ")", "template", "=", "Template", "(", "u'{% for row in countries.rows %}{% for value in row %}{{ value }} {% endfor %}{% endfor %}'", ")", "result", "=", "u'Germany Berlin 83 49 France \\u2014 64 33 Netherlands Amsterdam \\u2014 31 Austria \\u2014 8 43 '", "assert", "(", "result", "==", "template", ".", "render", "(", "context", ")", ")" ]
for good measure .
train
false
49,876
def enhex(x): return x.encode('hex')
[ "def", "enhex", "(", "x", ")", ":", "return", "x", ".", "encode", "(", "'hex'", ")" ]
enhex(x) -> str hex-encodes a string .
train
false
49,877
def action_hooks(context, hook_cls, action_key=u'action', template_name=u'extensions/action.html'): s = u'' for hook in hook_cls.hooks: try: for actions in hook.get_actions(context): if actions: context.push() context[action_key] = actions try: s += render_to_string(template_name, context) except Exception as e: logging.error(u'Error when rendering template for action "%s" for hook %r in extension "%s": %s', action_key, hook, hook.extension.id, e, exc_info=1) context.pop() except Exception as e: logging.error(u'Error when running get_actions() on hook %r in extension "%s": %s', hook, hook.extension.id, e, exc_info=1) return s
[ "def", "action_hooks", "(", "context", ",", "hook_cls", ",", "action_key", "=", "u'action'", ",", "template_name", "=", "u'extensions/action.html'", ")", ":", "s", "=", "u''", "for", "hook", "in", "hook_cls", ".", "hooks", ":", "try", ":", "for", "actions", "in", "hook", ".", "get_actions", "(", "context", ")", ":", "if", "actions", ":", "context", ".", "push", "(", ")", "context", "[", "action_key", "]", "=", "actions", "try", ":", "s", "+=", "render_to_string", "(", "template_name", ",", "context", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "u'Error when rendering template for action \"%s\" for hook %r in extension \"%s\": %s'", ",", "action_key", ",", "hook", ",", "hook", ".", "extension", ".", "id", ",", "e", ",", "exc_info", "=", "1", ")", "context", ".", "pop", "(", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "u'Error when running get_actions() on hook %r in extension \"%s\": %s'", ",", "hook", ",", "hook", ".", "extension", ".", "id", ",", "e", ",", "exc_info", "=", "1", ")", "return", "s" ]
displays all registered action hooks from the specified actionhook .
train
false
49,879
def automap_base(declarative_base=None, **kw): if (declarative_base is None): Base = _declarative_base(**kw) else: Base = declarative_base return type(Base.__name__, (AutomapBase, Base), {'__abstract__': True, 'classes': util.Properties({})})
[ "def", "automap_base", "(", "declarative_base", "=", "None", ",", "**", "kw", ")", ":", "if", "(", "declarative_base", "is", "None", ")", ":", "Base", "=", "_declarative_base", "(", "**", "kw", ")", "else", ":", "Base", "=", "declarative_base", "return", "type", "(", "Base", ".", "__name__", ",", "(", "AutomapBase", ",", "Base", ")", ",", "{", "'__abstract__'", ":", "True", ",", "'classes'", ":", "util", ".", "Properties", "(", "{", "}", ")", "}", ")" ]
produce a declarative automap base .
train
false
49,882
def _change_activity_status(committer_id, activity_id, activity_type, new_status, commit_message): activity_rights = _get_activity_rights(activity_type, activity_id) old_status = activity_rights.status activity_rights.status = new_status if (activity_type == feconf.ACTIVITY_TYPE_EXPLORATION): cmd_type = CMD_CHANGE_EXPLORATION_STATUS elif (activity_type == feconf.ACTIVITY_TYPE_COLLECTION): cmd_type = CMD_CHANGE_COLLECTION_STATUS commit_cmds = [{'cmd': cmd_type, 'old_status': old_status, 'new_status': new_status}] if (new_status != ACTIVITY_STATUS_PRIVATE): activity_rights.viewer_ids = [] if (activity_rights.first_published_msec is None): activity_rights.first_published_msec = utils.get_current_time_in_millisecs() _save_activity_rights(committer_id, activity_rights, activity_type, commit_message, commit_cmds) _update_activity_summary(activity_type, activity_rights)
[ "def", "_change_activity_status", "(", "committer_id", ",", "activity_id", ",", "activity_type", ",", "new_status", ",", "commit_message", ")", ":", "activity_rights", "=", "_get_activity_rights", "(", "activity_type", ",", "activity_id", ")", "old_status", "=", "activity_rights", ".", "status", "activity_rights", ".", "status", "=", "new_status", "if", "(", "activity_type", "==", "feconf", ".", "ACTIVITY_TYPE_EXPLORATION", ")", ":", "cmd_type", "=", "CMD_CHANGE_EXPLORATION_STATUS", "elif", "(", "activity_type", "==", "feconf", ".", "ACTIVITY_TYPE_COLLECTION", ")", ":", "cmd_type", "=", "CMD_CHANGE_COLLECTION_STATUS", "commit_cmds", "=", "[", "{", "'cmd'", ":", "cmd_type", ",", "'old_status'", ":", "old_status", ",", "'new_status'", ":", "new_status", "}", "]", "if", "(", "new_status", "!=", "ACTIVITY_STATUS_PRIVATE", ")", ":", "activity_rights", ".", "viewer_ids", "=", "[", "]", "if", "(", "activity_rights", ".", "first_published_msec", "is", "None", ")", ":", "activity_rights", ".", "first_published_msec", "=", "utils", ".", "get_current_time_in_millisecs", "(", ")", "_save_activity_rights", "(", "committer_id", ",", "activity_rights", ",", "activity_type", ",", "commit_message", ",", "commit_cmds", ")", "_update_activity_summary", "(", "activity_type", ",", "activity_rights", ")" ]
change the status of an activity .
train
false
49,884
def test_sum_squared(): X = np.random.RandomState(0).randint(0, 50, (3, 3)) assert_equal(np.sum((X ** 2)), sum_squared(X))
[ "def", "test_sum_squared", "(", ")", ":", "X", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", ".", "randint", "(", "0", ",", "50", ",", "(", "3", ",", "3", ")", ")", "assert_equal", "(", "np", ".", "sum", "(", "(", "X", "**", "2", ")", ")", ",", "sum_squared", "(", "X", ")", ")" ]
test optimized sum of squares .
train
false
49,885
def knownfailureif(fail_condition, msg=None): if (msg is None): msg = 'Test skipped due to known failure' if callable(fail_condition): fail_val = (lambda : fail_condition()) else: fail_val = (lambda : fail_condition) def knownfail_decorator(f): import nose def knownfailer(*args, **kwargs): if fail_val(): raise KnownFailureTest(msg) else: return f(*args, **kwargs) return nose.tools.make_decorator(f)(knownfailer) return knownfail_decorator
[ "def", "knownfailureif", "(", "fail_condition", ",", "msg", "=", "None", ")", ":", "if", "(", "msg", "is", "None", ")", ":", "msg", "=", "'Test skipped due to known failure'", "if", "callable", "(", "fail_condition", ")", ":", "fail_val", "=", "(", "lambda", ":", "fail_condition", "(", ")", ")", "else", ":", "fail_val", "=", "(", "lambda", ":", "fail_condition", ")", "def", "knownfail_decorator", "(", "f", ")", ":", "import", "nose", "def", "knownfailer", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "fail_val", "(", ")", ":", "raise", "KnownFailureTest", "(", "msg", ")", "else", ":", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "nose", ".", "tools", ".", "make_decorator", "(", "f", ")", "(", "knownfailer", ")", "return", "knownfail_decorator" ]
assume a will fail if *fail_condition* is true .
train
true
49,886
@mock_ec2 def test_igw_detach_invalid_vpc(): conn = boto.connect_vpc(u'the_key', u'the_secret') igw = conn.create_internet_gateway() vpc = conn.create_vpc(VPC_CIDR) conn.attach_internet_gateway(igw.id, vpc.id) with assert_raises(EC2ResponseError) as cm: conn.detach_internet_gateway(igw.id, BAD_VPC) cm.exception.code.should.equal(u'Gateway.NotAttached') cm.exception.status.should.equal(400) cm.exception.request_id.should_not.be.none
[ "@", "mock_ec2", "def", "test_igw_detach_invalid_vpc", "(", ")", ":", "conn", "=", "boto", ".", "connect_vpc", "(", "u'the_key'", ",", "u'the_secret'", ")", "igw", "=", "conn", ".", "create_internet_gateway", "(", ")", "vpc", "=", "conn", ".", "create_vpc", "(", "VPC_CIDR", ")", "conn", ".", "attach_internet_gateway", "(", "igw", ".", "id", ",", "vpc", ".", "id", ")", "with", "assert_raises", "(", "EC2ResponseError", ")", "as", "cm", ":", "conn", ".", "detach_internet_gateway", "(", "igw", ".", "id", ",", "BAD_VPC", ")", "cm", ".", "exception", ".", "code", ".", "should", ".", "equal", "(", "u'Gateway.NotAttached'", ")", "cm", ".", "exception", ".", "status", ".", "should", ".", "equal", "(", "400", ")", "cm", ".", "exception", ".", "request_id", ".", "should_not", ".", "be", ".", "none" ]
internet gateway fail to detach w/ invalid vpc .
train
false
49,887
def _api_options(name, output, kwargs): return options_list(output)
[ "def", "_api_options", "(", "name", ",", "output", ",", "kwargs", ")", ":", "return", "options_list", "(", "output", ")" ]
api: accepts output .
train
false
49,888
def formatLong(title, message, frontTab=True, spacing=16): lines = textwrap.wrap(textwrap.dedent(message).strip(), width=50) returnString = '' i = 1 if (len(lines) > 0): if frontTab: returnString += (' DCTB %s%s' % (('{0: <%s}' % spacing).format(title), lines[0])) else: returnString += (' %s%s' % (('{0: <%s}' % (spacing - 1)).format(title), lines[0])) while (i < len(lines)): if frontTab: returnString += (('\n DCTB ' + (' ' * spacing)) + lines[i]) else: returnString += (('\n' + (' ' * spacing)) + lines[i]) i += 1 return returnString
[ "def", "formatLong", "(", "title", ",", "message", ",", "frontTab", "=", "True", ",", "spacing", "=", "16", ")", ":", "lines", "=", "textwrap", ".", "wrap", "(", "textwrap", ".", "dedent", "(", "message", ")", ".", "strip", "(", ")", ",", "width", "=", "50", ")", "returnString", "=", "''", "i", "=", "1", "if", "(", "len", "(", "lines", ")", ">", "0", ")", ":", "if", "frontTab", ":", "returnString", "+=", "(", "' DCTB %s%s'", "%", "(", "(", "'{0: <%s}'", "%", "spacing", ")", ".", "format", "(", "title", ")", ",", "lines", "[", "0", "]", ")", ")", "else", ":", "returnString", "+=", "(", "' %s%s'", "%", "(", "(", "'{0: <%s}'", "%", "(", "spacing", "-", "1", ")", ")", ".", "format", "(", "title", ")", ",", "lines", "[", "0", "]", ")", ")", "while", "(", "i", "<", "len", "(", "lines", ")", ")", ":", "if", "frontTab", ":", "returnString", "+=", "(", "(", "'\\n DCTB '", "+", "(", "' '", "*", "spacing", ")", ")", "+", "lines", "[", "i", "]", ")", "else", ":", "returnString", "+=", "(", "(", "'\\n'", "+", "(", "' '", "*", "spacing", ")", ")", "+", "lines", "[", "i", "]", ")", "i", "+=", "1", "return", "returnString" ]
print a long title:message with our standardized formatting .
train
false
49,889
@gen.engine def Dispatch(client, callback): job = Job(client, 'dbchk') if options.options.require_lock: got_lock = (yield gen.Task(job.AcquireLock)) if (got_lock == False): logging.warning('Failed to acquire job lock: exiting.') callback() return try: (yield gen.Task(RunOnce, client, job)) finally: (yield gen.Task(job.ReleaseLock)) callback()
[ "@", "gen", ".", "engine", "def", "Dispatch", "(", "client", ",", "callback", ")", ":", "job", "=", "Job", "(", "client", ",", "'dbchk'", ")", "if", "options", ".", "options", ".", "require_lock", ":", "got_lock", "=", "(", "yield", "gen", ".", "Task", "(", "job", ".", "AcquireLock", ")", ")", "if", "(", "got_lock", "==", "False", ")", ":", "logging", ".", "warning", "(", "'Failed to acquire job lock: exiting.'", ")", "callback", "(", ")", "return", "try", ":", "(", "yield", "gen", ".", "Task", "(", "RunOnce", ",", "client", ",", "job", ")", ")", "finally", ":", "(", "yield", "gen", ".", "Task", "(", "job", ".", "ReleaseLock", ")", ")", "callback", "(", ")" ]
dispatches according to command-line options .
train
false
49,890
def get_number_of_parts(xml_source): parts_size = {} parts_names = [] root = fromstring(xml_source) heading_pairs = root.find(QName('http://schemas.openxmlformats.org/officeDocument/2006/extended-properties', 'HeadingPairs').text) vector = heading_pairs.find(QName(NAMESPACES['vt'], 'vector').text) children = vector.getchildren() for child_id in range(0, len(children), 2): part_name = children[child_id].find(QName(NAMESPACES['vt'], 'lpstr').text).text if (not (part_name in parts_names)): parts_names.append(part_name) part_size = int(children[(child_id + 1)].find(QName(NAMESPACES['vt'], 'i4').text).text) parts_size[part_name] = part_size return (parts_size, parts_names)
[ "def", "get_number_of_parts", "(", "xml_source", ")", ":", "parts_size", "=", "{", "}", "parts_names", "=", "[", "]", "root", "=", "fromstring", "(", "xml_source", ")", "heading_pairs", "=", "root", ".", "find", "(", "QName", "(", "'http://schemas.openxmlformats.org/officeDocument/2006/extended-properties'", ",", "'HeadingPairs'", ")", ".", "text", ")", "vector", "=", "heading_pairs", ".", "find", "(", "QName", "(", "NAMESPACES", "[", "'vt'", "]", ",", "'vector'", ")", ".", "text", ")", "children", "=", "vector", ".", "getchildren", "(", ")", "for", "child_id", "in", "range", "(", "0", ",", "len", "(", "children", ")", ",", "2", ")", ":", "part_name", "=", "children", "[", "child_id", "]", ".", "find", "(", "QName", "(", "NAMESPACES", "[", "'vt'", "]", ",", "'lpstr'", ")", ".", "text", ")", ".", "text", "if", "(", "not", "(", "part_name", "in", "parts_names", ")", ")", ":", "parts_names", ".", "append", "(", "part_name", ")", "part_size", "=", "int", "(", "children", "[", "(", "child_id", "+", "1", ")", "]", ".", "find", "(", "QName", "(", "NAMESPACES", "[", "'vt'", "]", ",", "'i4'", ")", ".", "text", ")", ".", "text", ")", "parts_size", "[", "part_name", "]", "=", "part_size", "return", "(", "parts_size", ",", "parts_names", ")" ]
get a list of contents of the workbook .
train
false
49,891
def getRMSBins(data, chunk=64): bins = [] i = chunk while (i <= len(data)): r = getRMS(data[(i - chunk):i]) bins.append(r) i += chunk return np.array(bins)
[ "def", "getRMSBins", "(", "data", ",", "chunk", "=", "64", ")", ":", "bins", "=", "[", "]", "i", "=", "chunk", "while", "(", "i", "<=", "len", "(", "data", ")", ")", ":", "r", "=", "getRMS", "(", "data", "[", "(", "i", "-", "chunk", ")", ":", "i", "]", ")", "bins", ".", "append", "(", "r", ")", "i", "+=", "chunk", "return", "np", ".", "array", "(", "bins", ")" ]
return rms in bins of chunk samples .
train
false
49,892
def l2_inner_product(a, b, lim): return integrate((conjugate(a) * b), lim)
[ "def", "l2_inner_product", "(", "a", ",", "b", ",", "lim", ")", ":", "return", "integrate", "(", "(", "conjugate", "(", "a", ")", "*", "b", ")", ",", "lim", ")" ]
calculates the l2 inner product over the domain lim .
train
false
49,893
def test_caplog_bug_workaround_2(): caplog_handler = None for h in logging.getLogger().handlers: if isinstance(h, pytest_catchlog.LogCaptureHandler): assert (caplog_handler is None) caplog_handler = h
[ "def", "test_caplog_bug_workaround_2", "(", ")", ":", "caplog_handler", "=", "None", "for", "h", "in", "logging", ".", "getLogger", "(", ")", ".", "handlers", ":", "if", "isinstance", "(", "h", ",", "pytest_catchlog", ".", "LogCaptureHandler", ")", ":", "assert", "(", "caplog_handler", "is", "None", ")", "caplog_handler", "=", "h" ]
make sure caplog_bug_workaround works correctly after a skipped test .
train
false
49,894
def get_loss(name): try: return globals()[name] except: raise ValueError('Invalid metric function.')
[ "def", "get_loss", "(", "name", ")", ":", "try", ":", "return", "globals", "(", ")", "[", "name", "]", "except", ":", "raise", "ValueError", "(", "'Invalid metric function.'", ")" ]
returns loss function by the name .
train
false
49,895
def path_joiner(target, search_paths): return (os.path.join(path, target) for path in search_paths)
[ "def", "path_joiner", "(", "target", ",", "search_paths", ")", ":", "return", "(", "os", ".", "path", ".", "join", "(", "path", ",", "target", ")", "for", "path", "in", "search_paths", ")" ]
create a generator that joins target to each search path .
train
false
49,896
def later_than(after, before): if isinstance(after, basestring): after = str_to_time(after) elif isinstance(after, int): after = time.gmtime(after) if isinstance(before, basestring): before = str_to_time(before) elif isinstance(before, int): before = time.gmtime(before) return (after >= before)
[ "def", "later_than", "(", "after", ",", "before", ")", ":", "if", "isinstance", "(", "after", ",", "basestring", ")", ":", "after", "=", "str_to_time", "(", "after", ")", "elif", "isinstance", "(", "after", ",", "int", ")", ":", "after", "=", "time", ".", "gmtime", "(", "after", ")", "if", "isinstance", "(", "before", ",", "basestring", ")", ":", "before", "=", "str_to_time", "(", "before", ")", "elif", "isinstance", "(", "before", ",", "int", ")", ":", "before", "=", "time", ".", "gmtime", "(", "before", ")", "return", "(", "after", ">=", "before", ")" ]
true if then is later or equal to that .
train
true
49,897
def has_author_view(descriptor): return getattr(descriptor, 'has_author_view', False)
[ "def", "has_author_view", "(", "descriptor", ")", ":", "return", "getattr", "(", "descriptor", ",", "'has_author_view'", ",", "False", ")" ]
returns true if the xmodule linked to the descriptor supports "author_view" .
train
false
49,898
def _smartos_computenode_data(): if salt.utils.is_proxy(): return {} grains = {} grains['computenode_vms_total'] = len(__salt__['cmd.run']('vmadm list -p').split('\n')) grains['computenode_vms_running'] = len(__salt__['cmd.run']('vmadm list -p state=running').split('\n')) grains['computenode_vms_stopped'] = len(__salt__['cmd.run']('vmadm list -p state=stopped').split('\n')) sysinfo = json.loads(__salt__['cmd.run']('sysinfo')) grains['computenode_sdc_version'] = sysinfo['SDC Version'] grains['computenode_vm_capable'] = sysinfo['VM Capable'] if sysinfo['VM Capable']: grains['computenode_vm_hw_virt'] = sysinfo['CPU Virtualization'] grains['manufacturer'] = sysinfo['Manufacturer'] grains['productname'] = sysinfo['Product'] grains['uuid'] = sysinfo['UUID'] return grains
[ "def", "_smartos_computenode_data", "(", ")", ":", "if", "salt", ".", "utils", ".", "is_proxy", "(", ")", ":", "return", "{", "}", "grains", "=", "{", "}", "grains", "[", "'computenode_vms_total'", "]", "=", "len", "(", "__salt__", "[", "'cmd.run'", "]", "(", "'vmadm list -p'", ")", ".", "split", "(", "'\\n'", ")", ")", "grains", "[", "'computenode_vms_running'", "]", "=", "len", "(", "__salt__", "[", "'cmd.run'", "]", "(", "'vmadm list -p state=running'", ")", ".", "split", "(", "'\\n'", ")", ")", "grains", "[", "'computenode_vms_stopped'", "]", "=", "len", "(", "__salt__", "[", "'cmd.run'", "]", "(", "'vmadm list -p state=stopped'", ")", ".", "split", "(", "'\\n'", ")", ")", "sysinfo", "=", "json", ".", "loads", "(", "__salt__", "[", "'cmd.run'", "]", "(", "'sysinfo'", ")", ")", "grains", "[", "'computenode_sdc_version'", "]", "=", "sysinfo", "[", "'SDC Version'", "]", "grains", "[", "'computenode_vm_capable'", "]", "=", "sysinfo", "[", "'VM Capable'", "]", "if", "sysinfo", "[", "'VM Capable'", "]", ":", "grains", "[", "'computenode_vm_hw_virt'", "]", "=", "sysinfo", "[", "'CPU Virtualization'", "]", "grains", "[", "'manufacturer'", "]", "=", "sysinfo", "[", "'Manufacturer'", "]", "grains", "[", "'productname'", "]", "=", "sysinfo", "[", "'Product'", "]", "grains", "[", "'uuid'", "]", "=", "sysinfo", "[", "'UUID'", "]", "return", "grains" ]
return useful information from a smartos compute node .
train
false
49,899
def get_uploader(upload_to, old_filename=None): upload = None for plugin in plugins.PluginImplementations(plugins.IUploader): upload = plugin.get_uploader(upload_to, old_filename) if (upload is None): upload = Upload(upload_to, old_filename) return upload
[ "def", "get_uploader", "(", "upload_to", ",", "old_filename", "=", "None", ")", ":", "upload", "=", "None", "for", "plugin", "in", "plugins", ".", "PluginImplementations", "(", "plugins", ".", "IUploader", ")", ":", "upload", "=", "plugin", ".", "get_uploader", "(", "upload_to", ",", "old_filename", ")", "if", "(", "upload", "is", "None", ")", ":", "upload", "=", "Upload", "(", "upload_to", ",", "old_filename", ")", "return", "upload" ]
query iuploader plugins and return an uploader instance for general files .
train
false
49,900
def spaceless(parser, token): nodelist = parser.parse(('endspaceless',)) parser.delete_first_token() return SpacelessNode(nodelist)
[ "def", "spaceless", "(", "parser", ",", "token", ")", ":", "nodelist", "=", "parser", ".", "parse", "(", "(", "'endspaceless'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "return", "SpacelessNode", "(", "nodelist", ")" ]
normalize whitespace between html tags to a single space .
train
false
49,901
def promoted_build(registry, xml_parent, data): promoted = XML.SubElement(xml_parent, 'hudson.plugins.promoted__builds.JobPropertyImpl') names = data.get('names', []) if names: active_processes = XML.SubElement(promoted, 'activeProcessNames') for n in names: XML.SubElement(active_processes, 'string').text = str(n)
[ "def", "promoted_build", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "promoted", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.promoted__builds.JobPropertyImpl'", ")", "names", "=", "data", ".", "get", "(", "'names'", ",", "[", "]", ")", "if", "names", ":", "active_processes", "=", "XML", ".", "SubElement", "(", "promoted", ",", "'activeProcessNames'", ")", "for", "n", "in", "names", ":", "XML", ".", "SubElement", "(", "active_processes", ",", "'string'", ")", ".", "text", "=", "str", "(", "n", ")" ]
yaml: promoted-build marks a build for promotion .
train
false
49,904
def get_insert_dict(tree, names): d = {} for n in tree.tips(): if (n.Name and (n.Name in names)): if (n.Name not in d): d[n.Name] = [] d[n.Name].append(n) return d
[ "def", "get_insert_dict", "(", "tree", ",", "names", ")", ":", "d", "=", "{", "}", "for", "n", "in", "tree", ".", "tips", "(", ")", ":", "if", "(", "n", ".", "Name", "and", "(", "n", ".", "Name", "in", "names", ")", ")", ":", "if", "(", "n", ".", "Name", "not", "in", "d", ")", ":", "d", "[", "n", ".", "Name", "]", "=", "[", "]", "d", "[", "n", ".", "Name", "]", ".", "append", "(", "n", ")", "return", "d" ]
this function returns the nodes labeled as inserted .
train
false
49,908
def init_parser(): parser = argparse.ArgumentParser(description='Checks if any upgrade is required and runs the script for the process.') parser.add_argument('--keyname', help='The deployment keyname') parser.add_argument('--log-postfix', help='An identifier for the status log') parser.add_argument('--db-master', required=True, help='The IP address of the DB master') parser.add_argument('--zookeeper', nargs='+', help='A list of ZooKeeper IP addresses') parser.add_argument('--database', nargs='+', help='A list of DB IP addresses') parser.add_argument('--replication', type=int, help='The keyspace replication factor') return parser
[ "def", "init_parser", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Checks if any upgrade is required and runs the script for the process.'", ")", "parser", ".", "add_argument", "(", "'--keyname'", ",", "help", "=", "'The deployment keyname'", ")", "parser", ".", "add_argument", "(", "'--log-postfix'", ",", "help", "=", "'An identifier for the status log'", ")", "parser", ".", "add_argument", "(", "'--db-master'", ",", "required", "=", "True", ",", "help", "=", "'The IP address of the DB master'", ")", "parser", ".", "add_argument", "(", "'--zookeeper'", ",", "nargs", "=", "'+'", ",", "help", "=", "'A list of ZooKeeper IP addresses'", ")", "parser", ".", "add_argument", "(", "'--database'", ",", "nargs", "=", "'+'", ",", "help", "=", "'A list of DB IP addresses'", ")", "parser", ".", "add_argument", "(", "'--replication'", ",", "type", "=", "int", ",", "help", "=", "'The keyspace replication factor'", ")", "return", "parser" ]
initializes the command line argument parser .
train
false
49,909
def error_list(train_sents, test_sents): hdr = (((((('%25s | %s | %s\n' + ('-' * 26)) + '+') + ('-' * 24)) + '+') + ('-' * 26)) % ('left context', 'word/test->gold'.center(22), 'right context')) errors = [hdr] for (train_sent, test_sent) in zip(train_sents, test_sents): for (wordnum, (word, train_pos)) in enumerate(train_sent): test_pos = test_sent[wordnum][1] if (train_pos != test_pos): left = ' '.join((('%s/%s' % w) for w in train_sent[:wordnum])) right = ' '.join((('%s/%s' % w) for w in train_sent[(wordnum + 1):])) mid = ('%s/%s->%s' % (word, test_pos, train_pos)) errors.append(('%25s | %s | %s' % (left[(-25):], mid.center(22), right[:25]))) return errors
[ "def", "error_list", "(", "train_sents", ",", "test_sents", ")", ":", "hdr", "=", "(", "(", "(", "(", "(", "(", "'%25s | %s | %s\\n'", "+", "(", "'-'", "*", "26", ")", ")", "+", "'+'", ")", "+", "(", "'-'", "*", "24", ")", ")", "+", "'+'", ")", "+", "(", "'-'", "*", "26", ")", ")", "%", "(", "'left context'", ",", "'word/test->gold'", ".", "center", "(", "22", ")", ",", "'right context'", ")", ")", "errors", "=", "[", "hdr", "]", "for", "(", "train_sent", ",", "test_sent", ")", "in", "zip", "(", "train_sents", ",", "test_sents", ")", ":", "for", "(", "wordnum", ",", "(", "word", ",", "train_pos", ")", ")", "in", "enumerate", "(", "train_sent", ")", ":", "test_pos", "=", "test_sent", "[", "wordnum", "]", "[", "1", "]", "if", "(", "train_pos", "!=", "test_pos", ")", ":", "left", "=", "' '", ".", "join", "(", "(", "(", "'%s/%s'", "%", "w", ")", "for", "w", "in", "train_sent", "[", ":", "wordnum", "]", ")", ")", "right", "=", "' '", ".", "join", "(", "(", "(", "'%s/%s'", "%", "w", ")", "for", "w", "in", "train_sent", "[", "(", "wordnum", "+", "1", ")", ":", "]", ")", ")", "mid", "=", "(", "'%s/%s->%s'", "%", "(", "word", ",", "test_pos", ",", "train_pos", ")", ")", "errors", ".", "append", "(", "(", "'%25s | %s | %s'", "%", "(", "left", "[", "(", "-", "25", ")", ":", "]", ",", "mid", ".", "center", "(", "22", ")", ",", "right", "[", ":", "25", "]", ")", ")", ")", "return", "errors" ]
returns a list of human-readable strings indicating the errors in the given tagging of the corpus .
train
false
49,910
def nopackages(pkg_list): pkg_list = [pkg for pkg in pkg_list if is_installed(pkg)] if pkg_list: uninstall(pkg_list)
[ "def", "nopackages", "(", "pkg_list", ")", ":", "pkg_list", "=", "[", "pkg", "for", "pkg", "in", "pkg_list", "if", "is_installed", "(", "pkg", ")", "]", "if", "pkg_list", ":", "uninstall", "(", "pkg_list", ")" ]
require several deb packages to be uninstalled .
train
false
49,913
def _find_display(): display = 1 while os.path.exists('/tmp/.X{0}-lock'.format(display)): display += 1 return display
[ "def", "_find_display", "(", ")", ":", "display", "=", "1", "while", "os", ".", "path", ".", "exists", "(", "'/tmp/.X{0}-lock'", ".", "format", "(", "display", ")", ")", ":", "display", "+=", "1", "return", "display" ]
returns the next available display .
train
false
49,914
def run_process(command, *args, **kwargs): kwargs['stdout'] = PIPE kwargs['stderr'] = STDOUT action = start_action(action_type='run_process', command=command, args=args, kwargs=kwargs) with action: process = Popen(command, *args, **kwargs) output = process.stdout.read() status = process.wait() result = _ProcessResult(command=command, output=output, status=status) Message.new(command=result.command, output=result.output, status=result.status).write() if result.status: raise _CalledProcessError(returncode=status, cmd=command, output=output) return result
[ "def", "run_process", "(", "command", ",", "*", "args", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'stdout'", "]", "=", "PIPE", "kwargs", "[", "'stderr'", "]", "=", "STDOUT", "action", "=", "start_action", "(", "action_type", "=", "'run_process'", ",", "command", "=", "command", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")", "with", "action", ":", "process", "=", "Popen", "(", "command", ",", "*", "args", ",", "**", "kwargs", ")", "output", "=", "process", ".", "stdout", ".", "read", "(", ")", "status", "=", "process", ".", "wait", "(", ")", "result", "=", "_ProcessResult", "(", "command", "=", "command", ",", "output", "=", "output", ",", "status", "=", "status", ")", "Message", ".", "new", "(", "command", "=", "result", ".", "command", ",", "output", "=", "result", ".", "output", ",", "status", "=", "result", ".", "status", ")", ".", "write", "(", ")", "if", "result", ".", "status", ":", "raise", "_CalledProcessError", "(", "returncode", "=", "status", ",", "cmd", "=", "command", ",", "output", "=", "output", ")", "return", "result" ]
run a child process .
train
false
49,915
def process_mapping_file(map_f, barcode_len, barcode_type, BARCODE_COLUMN, REVERSE_PRIMER_COLUMN): (_, _, bc_to_sid, _, _, bc_to_fwd_primers, _) = check_map(map_f, False) map_f.seek(0) metadata_map = parse_mapping_file_to_dict(map_f)[0] bc_to_rev_primers = {} for (sid, md) in metadata_map.items(): if (REVERSE_PRIMER_COLUMN in md): bc_to_rev_primers[md[BARCODE_COLUMN]] = expand_degeneracies(md[REVERSE_PRIMER_COLUMN].upper().split(',')) else: raise Exception(('The %s column does not exist in the mapping file. %s is required.' % (REVERSE_PRIMER_COLUMN, REVERSE_PRIMER_COLUMN))) check_barcodes(bc_to_sid, barcode_len, barcode_type) return (bc_to_sid, bc_to_fwd_primers, bc_to_rev_primers)
[ "def", "process_mapping_file", "(", "map_f", ",", "barcode_len", ",", "barcode_type", ",", "BARCODE_COLUMN", ",", "REVERSE_PRIMER_COLUMN", ")", ":", "(", "_", ",", "_", ",", "bc_to_sid", ",", "_", ",", "_", ",", "bc_to_fwd_primers", ",", "_", ")", "=", "check_map", "(", "map_f", ",", "False", ")", "map_f", ".", "seek", "(", "0", ")", "metadata_map", "=", "parse_mapping_file_to_dict", "(", "map_f", ")", "[", "0", "]", "bc_to_rev_primers", "=", "{", "}", "for", "(", "sid", ",", "md", ")", "in", "metadata_map", ".", "items", "(", ")", ":", "if", "(", "REVERSE_PRIMER_COLUMN", "in", "md", ")", ":", "bc_to_rev_primers", "[", "md", "[", "BARCODE_COLUMN", "]", "]", "=", "expand_degeneracies", "(", "md", "[", "REVERSE_PRIMER_COLUMN", "]", ".", "upper", "(", ")", ".", "split", "(", "','", ")", ")", "else", ":", "raise", "Exception", "(", "(", "'The %s column does not exist in the mapping file. %s is required.'", "%", "(", "REVERSE_PRIMER_COLUMN", ",", "REVERSE_PRIMER_COLUMN", ")", ")", ")", "check_barcodes", "(", "bc_to_sid", ",", "barcode_len", ",", "barcode_type", ")", "return", "(", "bc_to_sid", ",", "bc_to_fwd_primers", ",", "bc_to_rev_primers", ")" ]
ensures that sample ids and barcodes are unique .
train
false
49,916
def _UsecToSec(t): return (t / 1000000.0)
[ "def", "_UsecToSec", "(", "t", ")", ":", "return", "(", "t", "/", "1000000.0", ")" ]
converts a time in usec since the epoch to seconds since the epoch .
train
false
49,917
def get_redirect_flv_stream_url(url): (host, path, params, query) = urlparse.urlparse(url)[1:5] try: conn = httplib.HTTPConnection(host) conn.request('HEAD', ((path + '?') + query)) return conn.getresponse().getheader('location') except StandardError: return None
[ "def", "get_redirect_flv_stream_url", "(", "url", ")", ":", "(", "host", ",", "path", ",", "params", ",", "query", ")", "=", "urlparse", ".", "urlparse", "(", "url", ")", "[", "1", ":", "5", "]", "try", ":", "conn", "=", "httplib", ".", "HTTPConnection", "(", "host", ")", "conn", ".", "request", "(", "'HEAD'", ",", "(", "(", "path", "+", "'?'", ")", "+", "query", ")", ")", "return", "conn", ".", "getresponse", "(", ")", ".", "getheader", "(", "'location'", ")", "except", "StandardError", ":", "return", "None" ]
al qahera al youms server redirects a video url to a flv file location in most times .
train
false
49,918
def gen_downloads(app_dir, md5): try: print '[INFO] Generating Downloads' directory = os.path.join(app_dir, 'java_source/') dwd_dir = os.path.join(settings.DWD_DIR, (md5 + '-java.zip')) zipf = zipfile.ZipFile(dwd_dir, 'w') zipdir(directory, zipf) zipf.close() directory = os.path.join(app_dir, 'smali_source/') dwd_dir = os.path.join(settings.DWD_DIR, (md5 + '-smali.zip')) zipf = zipfile.ZipFile(dwd_dir, 'w') zipdir(directory, zipf) zipf.close() except: PrintException('[ERROR] Generating Downloads')
[ "def", "gen_downloads", "(", "app_dir", ",", "md5", ")", ":", "try", ":", "print", "'[INFO] Generating Downloads'", "directory", "=", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'java_source/'", ")", "dwd_dir", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "DWD_DIR", ",", "(", "md5", "+", "'-java.zip'", ")", ")", "zipf", "=", "zipfile", ".", "ZipFile", "(", "dwd_dir", ",", "'w'", ")", "zipdir", "(", "directory", ",", "zipf", ")", "zipf", ".", "close", "(", ")", "directory", "=", "os", ".", "path", ".", "join", "(", "app_dir", ",", "'smali_source/'", ")", "dwd_dir", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "DWD_DIR", ",", "(", "md5", "+", "'-smali.zip'", ")", ")", "zipf", "=", "zipfile", ".", "ZipFile", "(", "dwd_dir", ",", "'w'", ")", "zipdir", "(", "directory", ",", "zipf", ")", "zipf", ".", "close", "(", ")", "except", ":", "PrintException", "(", "'[ERROR] Generating Downloads'", ")" ]
generate downloads for java and smali .
train
false
49,919
def is_rdn_equal(rdn1, rdn2): if (len(rdn1) != len(rdn2)): return False for (attr_type_1, val1, dummy) in rdn1: found = False for (attr_type_2, val2, dummy) in rdn2: if (attr_type_1.lower() != attr_type_2.lower()): continue found = True if (not is_ava_value_equal(attr_type_1, val1, val2)): return False break if (not found): return False return True
[ "def", "is_rdn_equal", "(", "rdn1", ",", "rdn2", ")", ":", "if", "(", "len", "(", "rdn1", ")", "!=", "len", "(", "rdn2", ")", ")", ":", "return", "False", "for", "(", "attr_type_1", ",", "val1", ",", "dummy", ")", "in", "rdn1", ":", "found", "=", "False", "for", "(", "attr_type_2", ",", "val2", ",", "dummy", ")", "in", "rdn2", ":", "if", "(", "attr_type_1", ".", "lower", "(", ")", "!=", "attr_type_2", ".", "lower", "(", ")", ")", ":", "continue", "found", "=", "True", "if", "(", "not", "is_ava_value_equal", "(", "attr_type_1", ",", "val1", ",", "val2", ")", ")", ":", "return", "False", "break", "if", "(", "not", "found", ")", ":", "return", "False", "return", "True" ]
return true if and only if the rdns are equal .
train
false
49,920
def add_volume_type_access(context, volume_type_id, project_id): if (volume_type_id is None): msg = _('volume_type_id cannot be None') raise exception.InvalidVolumeType(reason=msg) elevated = (context if context.is_admin else context.elevated()) if is_public_volume_type(elevated, volume_type_id): msg = _('Type access modification is not applicable to public volume type.') raise exception.InvalidVolumeType(reason=msg) db.volume_type_access_add(elevated, volume_type_id, project_id) notify_about_volume_type_access_usage(context, volume_type_id, project_id, 'access.add')
[ "def", "add_volume_type_access", "(", "context", ",", "volume_type_id", ",", "project_id", ")", ":", "if", "(", "volume_type_id", "is", "None", ")", ":", "msg", "=", "_", "(", "'volume_type_id cannot be None'", ")", "raise", "exception", ".", "InvalidVolumeType", "(", "reason", "=", "msg", ")", "elevated", "=", "(", "context", "if", "context", ".", "is_admin", "else", "context", ".", "elevated", "(", ")", ")", "if", "is_public_volume_type", "(", "elevated", ",", "volume_type_id", ")", ":", "msg", "=", "_", "(", "'Type access modification is not applicable to public volume type.'", ")", "raise", "exception", ".", "InvalidVolumeType", "(", "reason", "=", "msg", ")", "db", ".", "volume_type_access_add", "(", "elevated", ",", "volume_type_id", ",", "project_id", ")", "notify_about_volume_type_access_usage", "(", "context", ",", "volume_type_id", ",", "project_id", ",", "'access.add'", ")" ]
add access to volume type for project_id .
train
false
49,922
def new_promotion(is_self, title, content, author, ip): sr = Subreddit._byID(Subreddit.get_promote_srid()) l = Link._submit(is_self=is_self, title=title, content=content, author=author, sr=sr, ip=ip) l.promoted = True l.disable_comments = False l.sendreplies = True PromotionLog.add(l, 'promotion created') update_promote_status(l, PROMOTE_STATUS.unpaid) if (author.pref_show_promote is not False): author.pref_show_promote = True author._commit() emailer.new_promo(l) return l
[ "def", "new_promotion", "(", "is_self", ",", "title", ",", "content", ",", "author", ",", "ip", ")", ":", "sr", "=", "Subreddit", ".", "_byID", "(", "Subreddit", ".", "get_promote_srid", "(", ")", ")", "l", "=", "Link", ".", "_submit", "(", "is_self", "=", "is_self", ",", "title", "=", "title", ",", "content", "=", "content", ",", "author", "=", "author", ",", "sr", "=", "sr", ",", "ip", "=", "ip", ")", "l", ".", "promoted", "=", "True", "l", ".", "disable_comments", "=", "False", "l", ".", "sendreplies", "=", "True", "PromotionLog", ".", "add", "(", "l", ",", "'promotion created'", ")", "update_promote_status", "(", "l", ",", "PROMOTE_STATUS", ".", "unpaid", ")", "if", "(", "author", ".", "pref_show_promote", "is", "not", "False", ")", ":", "author", ".", "pref_show_promote", "=", "True", "author", ".", "_commit", "(", ")", "emailer", ".", "new_promo", "(", "l", ")", "return", "l" ]
creates a new promotion with the provided title .
train
false
49,924
def prepare_model(ninputs=9600, nclass=5): lrmodel = Sequential() lrmodel.add(Dense(ninputs, nclass)) lrmodel.add(Activation('softmax')) lrmodel.compile(loss='categorical_crossentropy', optimizer='adam') return lrmodel
[ "def", "prepare_model", "(", "ninputs", "=", "9600", ",", "nclass", "=", "5", ")", ":", "lrmodel", "=", "Sequential", "(", ")", "lrmodel", ".", "add", "(", "Dense", "(", "ninputs", ",", "nclass", ")", ")", "lrmodel", ".", "add", "(", "Activation", "(", "'softmax'", ")", ")", "lrmodel", ".", "compile", "(", "loss", "=", "'categorical_crossentropy'", ",", "optimizer", "=", "'adam'", ")", "return", "lrmodel" ]
set up and compile the model architecture .
train
false