id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
19,292
def test_deriv2(): np.random.seed(24235) for link in Links: if (type(link) == type(probit)): continue for k in range(10): p = np.random.uniform(0, 1) p = np.clip(p, 0.01, 0.99) if (type(link) == type(cauchy)): p = np.clip(p, 0.03, 0.97) d = link.deriv2(p) da = nd.approx_fprime(np.r_[p], link.deriv) assert_allclose(d, da, rtol=1e-06, atol=1e-06, err_msg=str(link))
[ "def", "test_deriv2", "(", ")", ":", "np", ".", "random", ".", "seed", "(", "24235", ")", "for", "link", "in", "Links", ":", "if", "(", "type", "(", "link", ")", "==", "type", "(", "probit", ")", ")", ":", "continue", "for", "k", "in", "range", "(", "10", ")", ":", "p", "=", "np", ".", "random", ".", "uniform", "(", "0", ",", "1", ")", "p", "=", "np", ".", "clip", "(", "p", ",", "0.01", ",", "0.99", ")", "if", "(", "type", "(", "link", ")", "==", "type", "(", "cauchy", ")", ")", ":", "p", "=", "np", ".", "clip", "(", "p", ",", "0.03", ",", "0.97", ")", "d", "=", "link", ".", "deriv2", "(", "p", ")", "da", "=", "nd", ".", "approx_fprime", "(", "np", ".", "r_", "[", "p", "]", ",", "link", ".", "deriv", ")", "assert_allclose", "(", "d", ",", "da", ",", "rtol", "=", "1e-06", ",", "atol", "=", "1e-06", ",", "err_msg", "=", "str", "(", "link", ")", ")" ]
check link function second derivatives using numeric differentiation .
train
false
19,293
def get_current_version(projdir=PROJDIR, pattern=PATTERN, logger=None): tags = get_recent_tags(projdir) try: tag = tags[0][0] except IndexError: return matches = re.match(pattern, tag) try: current_version = matches.group(1) except (IndexError, AttributeError) as err: if logger: logger.exception(err) return tag return current_version
[ "def", "get_current_version", "(", "projdir", "=", "PROJDIR", ",", "pattern", "=", "PATTERN", ",", "logger", "=", "None", ")", ":", "tags", "=", "get_recent_tags", "(", "projdir", ")", "try", ":", "tag", "=", "tags", "[", "0", "]", "[", "0", "]", "except", "IndexError", ":", "return", "matches", "=", "re", ".", "match", "(", "pattern", ",", "tag", ")", "try", ":", "current_version", "=", "matches", ".", "group", "(", "1", ")", "except", "(", "IndexError", ",", "AttributeError", ")", "as", "err", ":", "if", "logger", ":", "logger", ".", "exception", "(", "err", ")", "return", "tag", "return", "current_version" ]
return the most recent tag .
train
false
19,294
def to_gpuarray(x, copyif=False): if (not isinstance(x, cuda.CudaNdarray)): raise ValueError('We can transfer only CudaNdarray to pycuda.gpuarray.GPUArray') else: size = 1 c_contiguous = True for i in range((x.ndim - 1), (-1), (-1)): if (x.shape[i] == 1): continue if (x._strides[i] != size): c_contiguous = False break size *= x.shape[i] if (not c_contiguous): if copyif: x = x.copy() else: raise ValueError('We were asked to not copy memory, but the memory is not c contiguous.') px = pycuda.gpuarray.GPUArray(x.shape, x.dtype, base=x, gpudata=x.gpudata) return px
[ "def", "to_gpuarray", "(", "x", ",", "copyif", "=", "False", ")", ":", "if", "(", "not", "isinstance", "(", "x", ",", "cuda", ".", "CudaNdarray", ")", ")", ":", "raise", "ValueError", "(", "'We can transfer only CudaNdarray to pycuda.gpuarray.GPUArray'", ")", "else", ":", "size", "=", "1", "c_contiguous", "=", "True", "for", "i", "in", "range", "(", "(", "x", ".", "ndim", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", ":", "if", "(", "x", ".", "shape", "[", "i", "]", "==", "1", ")", ":", "continue", "if", "(", "x", ".", "_strides", "[", "i", "]", "!=", "size", ")", ":", "c_contiguous", "=", "False", "break", "size", "*=", "x", ".", "shape", "[", "i", "]", "if", "(", "not", "c_contiguous", ")", ":", "if", "copyif", ":", "x", "=", "x", ".", "copy", "(", ")", "else", ":", "raise", "ValueError", "(", "'We were asked to not copy memory, but the memory is not c contiguous.'", ")", "px", "=", "pycuda", ".", "gpuarray", ".", "GPUArray", "(", "x", ".", "shape", ",", "x", ".", "dtype", ",", "base", "=", "x", ",", "gpudata", "=", "x", ".", "gpudata", ")", "return", "px" ]
take a cudandarray and return a pycuda .
train
false
19,295
def instance_group_delete(context, group_uuid): return IMPL.instance_group_delete(context, group_uuid)
[ "def", "instance_group_delete", "(", "context", ",", "group_uuid", ")", ":", "return", "IMPL", ".", "instance_group_delete", "(", "context", ",", "group_uuid", ")" ]
delete an group .
train
false
19,297
def renderer(path=None, string=None, default_renderer='jinja|yaml', **kwargs): if ((not path) and (not string)): raise salt.exceptions.SaltInvocationError('Must pass either path or string') renderers = salt.loader.render(__opts__, __salt__) if path: path_or_string = path elif string: path_or_string = ':string:' kwargs['input_data'] = string return salt.template.compile_template(path_or_string, renderers, default_renderer, __opts__['renderer_blacklist'], __opts__['renderer_whitelist'], **kwargs)
[ "def", "renderer", "(", "path", "=", "None", ",", "string", "=", "None", ",", "default_renderer", "=", "'jinja|yaml'", ",", "**", "kwargs", ")", ":", "if", "(", "(", "not", "path", ")", "and", "(", "not", "string", ")", ")", ":", "raise", "salt", ".", "exceptions", ".", "SaltInvocationError", "(", "'Must pass either path or string'", ")", "renderers", "=", "salt", ".", "loader", ".", "render", "(", "__opts__", ",", "__salt__", ")", "if", "path", ":", "path_or_string", "=", "path", "elif", "string", ":", "path_or_string", "=", "':string:'", "kwargs", "[", "'input_data'", "]", "=", "string", "return", "salt", ".", "template", ".", "compile_template", "(", "path_or_string", ",", "renderers", ",", "default_renderer", ",", "__opts__", "[", "'renderer_blacklist'", "]", ",", "__opts__", "[", "'renderer_whitelist'", "]", ",", "**", "kwargs", ")" ]
parse a string or file through salts renderer system this is an open-ended function and can be used for a variety of tasks .
train
false
19,299
def case_flag(): return s3_rest_controller()
[ "def", "case_flag", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
case flags: restful crud controller .
train
false
19,300
def move_to_trash(prefix, f, tempdir=None): return move_path_to_trash((join(prefix, f) if f else prefix))
[ "def", "move_to_trash", "(", "prefix", ",", "f", ",", "tempdir", "=", "None", ")", ":", "return", "move_path_to_trash", "(", "(", "join", "(", "prefix", ",", "f", ")", "if", "f", "else", "prefix", ")", ")" ]
move a file or folder f from prefix to the trash tempdir is a deprecated parameter .
train
false
19,301
def dataprovider_factory(name, settings=None): def parse_query_string_settings(query_kwargs): return _parse_query_string_settings(query_kwargs, settings) def named_dataprovider_factory(func): setattr(func, _DATAPROVIDER_METHOD_NAME_KEY, name) setattr(func, 'parse_query_string_settings', parse_query_string_settings) setattr(func, 'settings', settings) @wraps(func) def wrapped_dataprovider_factory(self, *args, **kwargs): return func(self, *args, **kwargs) return wrapped_dataprovider_factory return named_dataprovider_factory
[ "def", "dataprovider_factory", "(", "name", ",", "settings", "=", "None", ")", ":", "def", "parse_query_string_settings", "(", "query_kwargs", ")", ":", "return", "_parse_query_string_settings", "(", "query_kwargs", ",", "settings", ")", "def", "named_dataprovider_factory", "(", "func", ")", ":", "setattr", "(", "func", ",", "_DATAPROVIDER_METHOD_NAME_KEY", ",", "name", ")", "setattr", "(", "func", ",", "'parse_query_string_settings'", ",", "parse_query_string_settings", ")", "setattr", "(", "func", ",", "'settings'", ",", "settings", ")", "@", "wraps", "(", "func", ")", "def", "wrapped_dataprovider_factory", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped_dataprovider_factory", "return", "named_dataprovider_factory" ]
wraps a class method and marks it as a dataprovider factory and creates a function to parse query strings to __init__ arguments as the parse_query_string_settings attribute of the factory function .
train
false
19,302
def test_install_with_pax_header(script, data): script.pip('install', 'paxpkg.tar.bz2', cwd=data.packages)
[ "def", "test_install_with_pax_header", "(", "script", ",", "data", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'paxpkg.tar.bz2'", ",", "cwd", "=", "data", ".", "packages", ")" ]
test installing from a tarball with pax header for python<2 .
train
false
19,303
def test_to_string(): coord = u'1h2m3s 1d2m3s' for wrap in ((lambda x: x), (lambda x: [x])): sc = SkyCoord(wrap(coord)) assert (sc.to_string() == wrap(u'15.5125 1.03417')) assert (sc.to_string(u'dms') == wrap(u'15d30m45s 1d02m03s')) assert (sc.to_string(u'hmsdms') == wrap(u'01h02m03s +01d02m03s')) with_kwargs = sc.to_string(u'hmsdms', precision=3, pad=True, alwayssign=True) assert (with_kwargs == wrap(u'+01h02m03.000s +01d02m03.000s'))
[ "def", "test_to_string", "(", ")", ":", "coord", "=", "u'1h2m3s 1d2m3s'", "for", "wrap", "in", "(", "(", "lambda", "x", ":", "x", ")", ",", "(", "lambda", "x", ":", "[", "x", "]", ")", ")", ":", "sc", "=", "SkyCoord", "(", "wrap", "(", "coord", ")", ")", "assert", "(", "sc", ".", "to_string", "(", ")", "==", "wrap", "(", "u'15.5125 1.03417'", ")", ")", "assert", "(", "sc", ".", "to_string", "(", "u'dms'", ")", "==", "wrap", "(", "u'15d30m45s 1d02m03s'", ")", ")", "assert", "(", "sc", ".", "to_string", "(", "u'hmsdms'", ")", "==", "wrap", "(", "u'01h02m03s +01d02m03s'", ")", ")", "with_kwargs", "=", "sc", ".", "to_string", "(", "u'hmsdms'", ",", "precision", "=", "3", ",", "pad", "=", "True", ",", "alwayssign", "=", "True", ")", "assert", "(", "with_kwargs", "==", "wrap", "(", "u'+01h02m03.000s +01d02m03.000s'", ")", ")" ]
basic testing of converting skycoord to strings .
train
false
19,304
def callable_reference(object, callback=None): if (hasattr(object, 'im_self') and (object.im_self is not None)): return BoundMethodWeakref(target=object, on_delete=callback) elif (hasattr(object, '__self__') and (object.__self__ is not None)): return BoundMethodWeakref(target=object, on_delete=callback) return annotatable_weakref(object, callback)
[ "def", "callable_reference", "(", "object", ",", "callback", "=", "None", ")", ":", "if", "(", "hasattr", "(", "object", ",", "'im_self'", ")", "and", "(", "object", ".", "im_self", "is", "not", "None", ")", ")", ":", "return", "BoundMethodWeakref", "(", "target", "=", "object", ",", "on_delete", "=", "callback", ")", "elif", "(", "hasattr", "(", "object", ",", "'__self__'", ")", "and", "(", "object", ".", "__self__", "is", "not", "None", ")", ")", ":", "return", "BoundMethodWeakref", "(", "target", "=", "object", ",", "on_delete", "=", "callback", ")", "return", "annotatable_weakref", "(", "object", ",", "callback", ")" ]
return an annotated weak ref .
train
false
19,305
def get_comma_sep_string_from_list(items): if (not items): return '' if (len(items) == 1): return items[0] return ('%s and %s' % (', '.join(items[:(-1)]), items[(-1)]))
[ "def", "get_comma_sep_string_from_list", "(", "items", ")", ":", "if", "(", "not", "items", ")", ":", "return", "''", "if", "(", "len", "(", "items", ")", "==", "1", ")", ":", "return", "items", "[", "0", "]", "return", "(", "'%s and %s'", "%", "(", "', '", ".", "join", "(", "items", "[", ":", "(", "-", "1", ")", "]", ")", ",", "items", "[", "(", "-", "1", ")", "]", ")", ")" ]
turns a list of items into a comma-separated string .
train
false
19,306
def send_instant_feedback_message_email(recipient_id, sender_id, message, email_subject, exploration_title, exploration_id, thread_title): email_body_template = 'Hi %s,<br><br>New update to thread "%s" on <a href="https://www.oppia.org/create/%s#/feedback">%s</a>:<br><ul><li>%s: %s<br></li></ul>(You received this message because you are a participant in this thread.)<br><br>Best wishes,<br>The Oppia team<br><br>%s' if (not feconf.CAN_SEND_EMAILS): log_new_error('This app cannot send emails to users.') return if (not feconf.CAN_SEND_FEEDBACK_MESSAGE_EMAILS): log_new_error('This app cannot send feedback message emails to users.') return sender_settings = user_services.get_user_settings(sender_id) recipient_settings = user_services.get_user_settings(recipient_id) recipient_preferences = user_services.get_email_preferences(recipient_id) if recipient_preferences.can_receive_feedback_message_email: email_body = (email_body_template % (recipient_settings.username, thread_title, exploration_id, exploration_title, sender_settings.username, message, EMAIL_FOOTER.value)) _send_email(recipient_id, feconf.SYSTEM_COMMITTER_ID, feconf.EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION, email_subject, email_body, feconf.NOREPLY_EMAIL_ADDRESS)
[ "def", "send_instant_feedback_message_email", "(", "recipient_id", ",", "sender_id", ",", "message", ",", "email_subject", ",", "exploration_title", ",", "exploration_id", ",", "thread_title", ")", ":", "email_body_template", "=", "'Hi %s,<br><br>New update to thread \"%s\" on <a href=\"https://www.oppia.org/create/%s#/feedback\">%s</a>:<br><ul><li>%s: %s<br></li></ul>(You received this message because you are a participant in this thread.)<br><br>Best wishes,<br>The Oppia team<br><br>%s'", "if", "(", "not", "feconf", ".", "CAN_SEND_EMAILS", ")", ":", "log_new_error", "(", "'This app cannot send emails to users.'", ")", "return", "if", "(", "not", "feconf", ".", "CAN_SEND_FEEDBACK_MESSAGE_EMAILS", ")", ":", "log_new_error", "(", "'This app cannot send feedback message emails to users.'", ")", "return", "sender_settings", "=", "user_services", ".", "get_user_settings", "(", "sender_id", ")", "recipient_settings", "=", "user_services", ".", "get_user_settings", "(", "recipient_id", ")", "recipient_preferences", "=", "user_services", ".", "get_email_preferences", "(", "recipient_id", ")", "if", "recipient_preferences", ".", "can_receive_feedback_message_email", ":", "email_body", "=", "(", "email_body_template", "%", "(", "recipient_settings", ".", "username", ",", "thread_title", ",", "exploration_id", ",", "exploration_title", ",", "sender_settings", ".", "username", ",", "message", ",", "EMAIL_FOOTER", ".", "value", ")", ")", "_send_email", "(", "recipient_id", ",", "feconf", ".", "SYSTEM_COMMITTER_ID", ",", "feconf", ".", "EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION", ",", "email_subject", ",", "email_body", ",", "feconf", ".", "NOREPLY_EMAIL_ADDRESS", ")" ]
send an email when a new message is posted to a feedback thread .
train
false
19,307
def setup_paste_factories(conf): global app_factory, filter_factory app_factory = AppFactory(conf) filter_factory = FilterFactory(conf)
[ "def", "setup_paste_factories", "(", "conf", ")", ":", "global", "app_factory", ",", "filter_factory", "app_factory", "=", "AppFactory", "(", "conf", ")", "filter_factory", "=", "FilterFactory", "(", "conf", ")" ]
set up the generic paste app and filter factories .
train
false
19,308
def process_image(img, scale, isotropic, crop, mean): if isotropic: img_shape = tf.to_float(tf.shape(img)[:2]) min_length = tf.minimum(img_shape[0], img_shape[1]) new_shape = tf.to_int32(((scale / min_length) * img_shape)) else: new_shape = tf.pack([scale, scale]) img = tf.image.resize_images(img, new_shape[0], new_shape[1]) offset = ((new_shape - crop) / 2) img = tf.slice(img, begin=tf.pack([offset[0], offset[1], 0]), size=tf.pack([crop, crop, (-1)])) return (tf.to_float(img) - mean)
[ "def", "process_image", "(", "img", ",", "scale", ",", "isotropic", ",", "crop", ",", "mean", ")", ":", "if", "isotropic", ":", "img_shape", "=", "tf", ".", "to_float", "(", "tf", ".", "shape", "(", "img", ")", "[", ":", "2", "]", ")", "min_length", "=", "tf", ".", "minimum", "(", "img_shape", "[", "0", "]", ",", "img_shape", "[", "1", "]", ")", "new_shape", "=", "tf", ".", "to_int32", "(", "(", "(", "scale", "/", "min_length", ")", "*", "img_shape", ")", ")", "else", ":", "new_shape", "=", "tf", ".", "pack", "(", "[", "scale", ",", "scale", "]", ")", "img", "=", "tf", ".", "image", ".", "resize_images", "(", "img", ",", "new_shape", "[", "0", "]", ",", "new_shape", "[", "1", "]", ")", "offset", "=", "(", "(", "new_shape", "-", "crop", ")", "/", "2", ")", "img", "=", "tf", ".", "slice", "(", "img", ",", "begin", "=", "tf", ".", "pack", "(", "[", "offset", "[", "0", "]", ",", "offset", "[", "1", "]", ",", "0", "]", ")", ",", "size", "=", "tf", ".", "pack", "(", "[", "crop", ",", "crop", ",", "(", "-", "1", ")", "]", ")", ")", "return", "(", "tf", ".", "to_float", "(", "img", ")", "-", "mean", ")" ]
process a source pil image through a series of image processors .
train
false
19,309
def reverse_field_path(model, path): reversed_path = [] parent = model pieces = path.split(LOOKUP_SEP) for piece in pieces: field = parent._meta.get_field(piece) direct = ((not field.auto_created) or field.concrete) if (len(reversed_path) == (len(pieces) - 1)): try: get_model_from_relation(field) except NotRelationField: break if direct: related_name = field.related_query_name() parent = field.rel.to else: related_name = field.field.name parent = field.model reversed_path.insert(0, related_name) return (parent, LOOKUP_SEP.join(reversed_path))
[ "def", "reverse_field_path", "(", "model", ",", "path", ")", ":", "reversed_path", "=", "[", "]", "parent", "=", "model", "pieces", "=", "path", ".", "split", "(", "LOOKUP_SEP", ")", "for", "piece", "in", "pieces", ":", "field", "=", "parent", ".", "_meta", ".", "get_field", "(", "piece", ")", "direct", "=", "(", "(", "not", "field", ".", "auto_created", ")", "or", "field", ".", "concrete", ")", "if", "(", "len", "(", "reversed_path", ")", "==", "(", "len", "(", "pieces", ")", "-", "1", ")", ")", ":", "try", ":", "get_model_from_relation", "(", "field", ")", "except", "NotRelationField", ":", "break", "if", "direct", ":", "related_name", "=", "field", ".", "related_query_name", "(", ")", "parent", "=", "field", ".", "rel", ".", "to", "else", ":", "related_name", "=", "field", ".", "field", ".", "name", "parent", "=", "field", ".", "model", "reversed_path", ".", "insert", "(", "0", ",", "related_name", ")", "return", "(", "parent", ",", "LOOKUP_SEP", ".", "join", "(", "reversed_path", ")", ")" ]
create a reversed field path .
train
false
19,310
def getstatus(file): import warnings warnings.warn('commands.getstatus() is deprecated', DeprecationWarning) return getoutput(('ls -ld' + mkarg(file)))
[ "def", "getstatus", "(", "file", ")", ":", "import", "warnings", "warnings", ".", "warn", "(", "'commands.getstatus() is deprecated'", ",", "DeprecationWarning", ")", "return", "getoutput", "(", "(", "'ls -ld'", "+", "mkarg", "(", "file", ")", ")", ")" ]
return output of "ls -ld <file>" in a string .
train
false
19,311
@click.command('set-mariadb-host') @click.argument('host') def set_mariadb_host(host): from bench.utils import set_mariadb_host set_mariadb_host(host)
[ "@", "click", ".", "command", "(", "'set-mariadb-host'", ")", "@", "click", ".", "argument", "(", "'host'", ")", "def", "set_mariadb_host", "(", "host", ")", ":", "from", "bench", ".", "utils", "import", "set_mariadb_host", "set_mariadb_host", "(", "host", ")" ]
set mariadb host for bench .
train
false
19,312
def PEM_cert_to_DER_cert(pem_cert_string): if (not pem_cert_string.startswith(PEM_HEADER)): raise ValueError(('Invalid PEM encoding; must start with %s' % PEM_HEADER)) if (not pem_cert_string.strip().endswith(PEM_FOOTER)): raise ValueError(('Invalid PEM encoding; must end with %s' % PEM_FOOTER)) d = pem_cert_string.strip()[len(PEM_HEADER):(- len(PEM_FOOTER))] return base64.decodestring(d)
[ "def", "PEM_cert_to_DER_cert", "(", "pem_cert_string", ")", ":", "if", "(", "not", "pem_cert_string", ".", "startswith", "(", "PEM_HEADER", ")", ")", ":", "raise", "ValueError", "(", "(", "'Invalid PEM encoding; must start with %s'", "%", "PEM_HEADER", ")", ")", "if", "(", "not", "pem_cert_string", ".", "strip", "(", ")", ".", "endswith", "(", "PEM_FOOTER", ")", ")", ":", "raise", "ValueError", "(", "(", "'Invalid PEM encoding; must end with %s'", "%", "PEM_FOOTER", ")", ")", "d", "=", "pem_cert_string", ".", "strip", "(", ")", "[", "len", "(", "PEM_HEADER", ")", ":", "(", "-", "len", "(", "PEM_FOOTER", ")", ")", "]", "return", "base64", ".", "decodestring", "(", "d", ")" ]
takes a certificate in ascii pem format and returns the der-encoded version of it as a byte sequence .
train
false
19,314
def _failing_getaddrinfo(*args): raise socket.gaierror(errno.EIO, 'mock: lookup failed')
[ "def", "_failing_getaddrinfo", "(", "*", "args", ")", ":", "raise", "socket", ".", "gaierror", "(", "errno", ".", "EIO", ",", "'mock: lookup failed'", ")" ]
dummy implementation of getaddrinfo for use in mocks .
train
false
19,315
@treeio_login_required @handle_response_format def liability_delete(request, liability_id, response_format='html'): liability = get_object_or_404(Liability, pk=liability_id) if (not request.user.profile.has_permission(liability, mode='w')): return user_denied(request, "You don't have access to this Liability", response_format) if request.POST: if ('delete' in request.POST): if ('trash' in request.POST): liability.trash = True liability.save() else: liability.delete() return HttpResponseRedirect(reverse('finance_index_liabilities')) elif ('cancel' in request.POST): return HttpResponseRedirect(reverse('finance_liability_view', args=[liability.id])) return render_to_response('finance/liability_delete', {'liability': liability}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "liability_delete", "(", "request", ",", "liability_id", ",", "response_format", "=", "'html'", ")", ":", "liability", "=", "get_object_or_404", "(", "Liability", ",", "pk", "=", "liability_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "liability", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "\"You don't have access to this Liability\"", ",", "response_format", ")", "if", "request", ".", "POST", ":", "if", "(", "'delete'", "in", "request", ".", "POST", ")", ":", "if", "(", "'trash'", "in", "request", ".", "POST", ")", ":", "liability", ".", "trash", "=", "True", "liability", ".", "save", "(", ")", "else", ":", "liability", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_index_liabilities'", ")", ")", "elif", "(", "'cancel'", "in", "request", ".", "POST", ")", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'finance_liability_view'", ",", "args", "=", "[", "liability", ".", "id", "]", ")", ")", "return", "render_to_response", "(", "'finance/liability_delete'", ",", "{", "'liability'", ":", "liability", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
liability delete .
train
false
19,316
def _get_si(): url = config.get_cloud_config_value('url', get_configured_provider(), __opts__, search_global=False) username = config.get_cloud_config_value('user', get_configured_provider(), __opts__, search_global=False) password = config.get_cloud_config_value('password', get_configured_provider(), __opts__, search_global=False) protocol = config.get_cloud_config_value('protocol', get_configured_provider(), __opts__, search_global=False, default='https') port = config.get_cloud_config_value('port', get_configured_provider(), __opts__, search_global=False, default=443) return salt.utils.vmware.get_service_instance(url, username, password, protocol=protocol, port=port)
[ "def", "_get_si", "(", ")", ":", "url", "=", "config", ".", "get_cloud_config_value", "(", "'url'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "username", "=", "config", ".", "get_cloud_config_value", "(", "'user'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "password", "=", "config", ".", "get_cloud_config_value", "(", "'password'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ")", "protocol", "=", "config", ".", "get_cloud_config_value", "(", "'protocol'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "'https'", ")", "port", "=", "config", ".", "get_cloud_config_value", "(", "'port'", ",", "get_configured_provider", "(", ")", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "443", ")", "return", "salt", ".", "utils", ".", "vmware", ".", "get_service_instance", "(", "url", ",", "username", ",", "password", ",", "protocol", "=", "protocol", ",", "port", "=", "port", ")" ]
authenticate with vcenter server and return service instance object .
train
true
19,317
def interlink_translations(content): lang = content.lang content.translations.sort(key=attrgetter('lang')) for translation in content.translations: relpath = relpath_to_site(lang, translation.lang) url = _NATIVE_CONTENT_URL_DB[translation.source_path] translation.override_url = posixpath.join(relpath, url)
[ "def", "interlink_translations", "(", "content", ")", ":", "lang", "=", "content", ".", "lang", "content", ".", "translations", ".", "sort", "(", "key", "=", "attrgetter", "(", "'lang'", ")", ")", "for", "translation", "in", "content", ".", "translations", ":", "relpath", "=", "relpath_to_site", "(", "lang", ",", "translation", ".", "lang", ")", "url", "=", "_NATIVE_CONTENT_URL_DB", "[", "translation", ".", "source_path", "]", "translation", ".", "override_url", "=", "posixpath", ".", "join", "(", "relpath", ",", "url", ")" ]
link content to translations in their main language so the url of the different subsites will be honored .
train
true
19,318
def encode_instance(instance, local=True): if local: return dict(id=instance['id'], _is_precooked=False) else: inst = dict(instance) inst['_is_precooked'] = True return inst
[ "def", "encode_instance", "(", "instance", ",", "local", "=", "True", ")", ":", "if", "local", ":", "return", "dict", "(", "id", "=", "instance", "[", "'id'", "]", ",", "_is_precooked", "=", "False", ")", "else", ":", "inst", "=", "dict", "(", "instance", ")", "inst", "[", "'_is_precooked'", "]", "=", "True", "return", "inst" ]
encode locally created instance for return via rpc .
train
false
19,320
def _write_batch_to_lmdb(db, batch): try: with db.begin(write=True) as lmdb_txn: for (key, datum) in batch: lmdb_txn.put(key, datum.SerializeToString()) except lmdb.MapFullError: curr_limit = db.info()['map_size'] new_limit = (curr_limit * 2) try: db.set_mapsize(new_limit) except AttributeError as e: version = tuple((int(x) for x in lmdb.__version__.split('.'))) if (version < (0, 87)): raise ImportError(('py-lmdb is out of date (%s vs 0.87)' % lmdb.__version__)) else: raise e _write_batch_to_lmdb(db, batch)
[ "def", "_write_batch_to_lmdb", "(", "db", ",", "batch", ")", ":", "try", ":", "with", "db", ".", "begin", "(", "write", "=", "True", ")", "as", "lmdb_txn", ":", "for", "(", "key", ",", "datum", ")", "in", "batch", ":", "lmdb_txn", ".", "put", "(", "key", ",", "datum", ".", "SerializeToString", "(", ")", ")", "except", "lmdb", ".", "MapFullError", ":", "curr_limit", "=", "db", ".", "info", "(", ")", "[", "'map_size'", "]", "new_limit", "=", "(", "curr_limit", "*", "2", ")", "try", ":", "db", ".", "set_mapsize", "(", "new_limit", ")", "except", "AttributeError", "as", "e", ":", "version", "=", "tuple", "(", "(", "int", "(", "x", ")", "for", "x", "in", "lmdb", ".", "__version__", ".", "split", "(", "'.'", ")", ")", ")", "if", "(", "version", "<", "(", "0", ",", "87", ")", ")", ":", "raise", "ImportError", "(", "(", "'py-lmdb is out of date (%s vs 0.87)'", "%", "lmdb", ".", "__version__", ")", ")", "else", ":", "raise", "e", "_write_batch_to_lmdb", "(", "db", ",", "batch", ")" ]
write a batch of to db .
train
false
19,321
def _ignore_request_headers_rewriter(environ): for h in constants.IGNORED_REQUEST_HEADERS: h = ('HTTP_' + h.replace('-', '_').upper()) try: del environ[h] except KeyError: pass
[ "def", "_ignore_request_headers_rewriter", "(", "environ", ")", ":", "for", "h", "in", "constants", ".", "IGNORED_REQUEST_HEADERS", ":", "h", "=", "(", "'HTTP_'", "+", "h", ".", "replace", "(", "'-'", ",", "'_'", ")", ".", "upper", "(", ")", ")", "try", ":", "del", "environ", "[", "h", "]", "except", "KeyError", ":", "pass" ]
ignore specific request headers .
train
false
19,322
def get_remote_url(git_path, module, dest, remote): command = [git_path, 'ls-remote', '--get-url', remote] (rc, out, err) = module.run_command(command, cwd=dest) if (rc != 0): return None return to_native(out).rstrip('\n')
[ "def", "get_remote_url", "(", "git_path", ",", "module", ",", "dest", ",", "remote", ")", ":", "command", "=", "[", "git_path", ",", "'ls-remote'", ",", "'--get-url'", ",", "remote", "]", "(", "rc", ",", "out", ",", "err", ")", "=", "module", ".", "run_command", "(", "command", ",", "cwd", "=", "dest", ")", "if", "(", "rc", "!=", "0", ")", ":", "return", "None", "return", "to_native", "(", "out", ")", ".", "rstrip", "(", "'\\n'", ")" ]
return url of remote source for repo .
train
false
19,323
def show_run(): try: ret = sendline('show run') except TerminalException as e: log.error(e) return 'Failed to "show run"' return ret
[ "def", "show_run", "(", ")", ":", "try", ":", "ret", "=", "sendline", "(", "'show run'", ")", "except", "TerminalException", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "'Failed to \"show run\"'", "return", "ret" ]
shortcut to run show run on switch .
train
false
19,325
def format_attachments(attachments=[]): formatted_attachments = [] for attachment in attachments: formatted_attachment = {'attach_time': str(attachment.get('AttachTime')), 'instance_id': attachment.get('InstanceId'), 'volume_id': attachment.get('VolumeId'), 'state': attachment.get('State'), 'delete_on_termination': attachment.get('DeleteOnTermination'), 'device': attachment.get('Device')} formatted_attachments.append(formatted_attachment) return formatted_attachments
[ "def", "format_attachments", "(", "attachments", "=", "[", "]", ")", ":", "formatted_attachments", "=", "[", "]", "for", "attachment", "in", "attachments", ":", "formatted_attachment", "=", "{", "'attach_time'", ":", "str", "(", "attachment", ".", "get", "(", "'AttachTime'", ")", ")", ",", "'instance_id'", ":", "attachment", ".", "get", "(", "'InstanceId'", ")", ",", "'volume_id'", ":", "attachment", ".", "get", "(", "'VolumeId'", ")", ",", "'state'", ":", "attachment", ".", "get", "(", "'State'", ")", ",", "'delete_on_termination'", ":", "attachment", ".", "get", "(", "'DeleteOnTermination'", ")", ",", "'device'", ":", "attachment", ".", "get", "(", "'Device'", ")", "}", "formatted_attachments", ".", "append", "(", "formatted_attachment", ")", "return", "formatted_attachments" ]
return formatted_attachments for volume config .
train
false
19,326
def read_in_chunks(iterator, chunk_size=None, fill_size=False, yield_empty=False): chunk_size = (chunk_size or CHUNK_SIZE) if isinstance(iterator, (file, httplib.HTTPResponse)): get_data = iterator.read args = (chunk_size,) else: get_data = next args = (iterator,) data = b('') empty = False while ((not empty) or (len(data) > 0)): if (not empty): try: chunk = b(get_data(*args)) if (len(chunk) > 0): data += chunk else: empty = True except StopIteration: empty = True if (len(data) == 0): if (empty and yield_empty): (yield b('')) raise StopIteration if fill_size: if (empty or (len(data) >= chunk_size)): (yield data[:chunk_size]) data = data[chunk_size:] else: (yield data) data = b('')
[ "def", "read_in_chunks", "(", "iterator", ",", "chunk_size", "=", "None", ",", "fill_size", "=", "False", ",", "yield_empty", "=", "False", ")", ":", "chunk_size", "=", "(", "chunk_size", "or", "CHUNK_SIZE", ")", "if", "isinstance", "(", "iterator", ",", "(", "file", ",", "httplib", ".", "HTTPResponse", ")", ")", ":", "get_data", "=", "iterator", ".", "read", "args", "=", "(", "chunk_size", ",", ")", "else", ":", "get_data", "=", "next", "args", "=", "(", "iterator", ",", ")", "data", "=", "b", "(", "''", ")", "empty", "=", "False", "while", "(", "(", "not", "empty", ")", "or", "(", "len", "(", "data", ")", ">", "0", ")", ")", ":", "if", "(", "not", "empty", ")", ":", "try", ":", "chunk", "=", "b", "(", "get_data", "(", "*", "args", ")", ")", "if", "(", "len", "(", "chunk", ")", ">", "0", ")", ":", "data", "+=", "chunk", "else", ":", "empty", "=", "True", "except", "StopIteration", ":", "empty", "=", "True", "if", "(", "len", "(", "data", ")", "==", "0", ")", ":", "if", "(", "empty", "and", "yield_empty", ")", ":", "(", "yield", "b", "(", "''", ")", ")", "raise", "StopIteration", "if", "fill_size", ":", "if", "(", "empty", "or", "(", "len", "(", "data", ")", ">=", "chunk_size", ")", ")", ":", "(", "yield", "data", "[", ":", "chunk_size", "]", ")", "data", "=", "data", "[", "chunk_size", ":", "]", "else", ":", "(", "yield", "data", ")", "data", "=", "b", "(", "''", ")" ]
return a generator which yields data in chunks .
train
false
19,327
def get_catalogue(backend=None, skip_caps=True): default_backend_config = (backend or default_catalogue_backend()) backend_name = default_backend_config['ENGINE'] catalog_module = load_backend(backend_name) assert hasattr(catalog_module, 'CatalogueBackend'), '%s must define a CatalogueBackend class' catalog_class = catalog_module.CatalogueBackend cat = catalog_class(skip_caps=skip_caps, **default_backend_config) return cat
[ "def", "get_catalogue", "(", "backend", "=", "None", ",", "skip_caps", "=", "True", ")", ":", "default_backend_config", "=", "(", "backend", "or", "default_catalogue_backend", "(", ")", ")", "backend_name", "=", "default_backend_config", "[", "'ENGINE'", "]", "catalog_module", "=", "load_backend", "(", "backend_name", ")", "assert", "hasattr", "(", "catalog_module", ",", "'CatalogueBackend'", ")", ",", "'%s must define a CatalogueBackend class'", "catalog_class", "=", "catalog_module", ".", "CatalogueBackend", "cat", "=", "catalog_class", "(", "skip_caps", "=", "skip_caps", ",", "**", "default_backend_config", ")", "return", "cat" ]
returns a catalogue object .
train
false
19,328
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
19,329
def splitdrive(p): return ('', p)
[ "def", "splitdrive", "(", "p", ")", ":", "return", "(", "''", ",", "p", ")" ]
split a pathname into a drive specification and the rest of the path .
train
false
19,330
def get_or_create_badge(badge_template, year=None): if (year is not None): badge_template = dict(((key, value.format(year=year)) for (key, value) in badge_template.items())) slug = badge_template.pop('slug') try: return Badge.objects.get(slug=slug) except Badge.DoesNotExist: return Badge.objects.create(slug=slug, **badge_template)
[ "def", "get_or_create_badge", "(", "badge_template", ",", "year", "=", "None", ")", ":", "if", "(", "year", "is", "not", "None", ")", ":", "badge_template", "=", "dict", "(", "(", "(", "key", ",", "value", ".", "format", "(", "year", "=", "year", ")", ")", "for", "(", "key", ",", "value", ")", "in", "badge_template", ".", "items", "(", ")", ")", ")", "slug", "=", "badge_template", ".", "pop", "(", "'slug'", ")", "try", ":", "return", "Badge", ".", "objects", ".", "get", "(", "slug", "=", "slug", ")", "except", "Badge", ".", "DoesNotExist", ":", "return", "Badge", ".", "objects", ".", "create", "(", "slug", "=", "slug", ",", "**", "badge_template", ")" ]
get or create a badge .
train
false
19,332
def get_discussion_id_map(course, user): return dict(map(get_discussion_id_map_entry, get_accessible_discussion_xblocks(course, user)))
[ "def", "get_discussion_id_map", "(", "course", ",", "user", ")", ":", "return", "dict", "(", "map", "(", "get_discussion_id_map_entry", ",", "get_accessible_discussion_xblocks", "(", "course", ",", "user", ")", ")", ")" ]
transform the list of this courses discussion xblocks into a dictionary of metadata keyed by discussion_id .
train
false
19,333
def skip_if_no_multiple_domains_support(f): @functools.wraps(f) def wrapper(*args, **kwargs): test_obj = args[0] if (not test_obj.identity_api.multiple_domains_supported): raise testcase.TestSkipped('No multiple domains support') return f(*args, **kwargs) return wrapper
[ "def", "skip_if_no_multiple_domains_support", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "test_obj", "=", "args", "[", "0", "]", "if", "(", "not", "test_obj", ".", "identity_api", ".", "multiple_domains_supported", ")", ":", "raise", "testcase", ".", "TestSkipped", "(", "'No multiple domains support'", ")", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
decorator to skip tests for identity drivers limited to one domain .
train
false
19,335
def fix_import_export_id_paths(fieldname): fixed_db_id = re.sub('([^/])\\.id', '\\1/.id', fieldname) fixed_external_id = re.sub('([^/]):id', '\\1/id', fixed_db_id) return fixed_external_id.split('/')
[ "def", "fix_import_export_id_paths", "(", "fieldname", ")", ":", "fixed_db_id", "=", "re", ".", "sub", "(", "'([^/])\\\\.id'", ",", "'\\\\1/.id'", ",", "fieldname", ")", "fixed_external_id", "=", "re", ".", "sub", "(", "'([^/]):id'", ",", "'\\\\1/id'", ",", "fixed_db_id", ")", "return", "fixed_external_id", ".", "split", "(", "'/'", ")" ]
fixes the id fields in import and exports .
train
false
19,337
def get_help_fs(app_name): app = appmanager.get_desktop_module(app_name) if (app is not None): if (app.help_dir is None): raise PopupException(("No help available for app '%s'." % app_name)) return LocalSubFileSystem(app.help_dir) else: raise PopupException(("App '%s' is not loaded, so no help is available for it!" % app_name))
[ "def", "get_help_fs", "(", "app_name", ")", ":", "app", "=", "appmanager", ".", "get_desktop_module", "(", "app_name", ")", "if", "(", "app", "is", "not", "None", ")", ":", "if", "(", "app", ".", "help_dir", "is", "None", ")", ":", "raise", "PopupException", "(", "(", "\"No help available for app '%s'.\"", "%", "app_name", ")", ")", "return", "LocalSubFileSystem", "(", "app", ".", "help_dir", ")", "else", ":", "raise", "PopupException", "(", "(", "\"App '%s' is not loaded, so no help is available for it!\"", "%", "app_name", ")", ")" ]
creates a local file system for a given apps help directory .
train
false
19,339
def get_rrmgr_cmd(src, dst, compression=None, tcp_buf_size=None, connections=None): cmd = ['rrmgr', '-s', 'zfs'] if compression: cmd.extend(['-c', ('%s' % compression)]) cmd.append('-q') cmd.append('-e') if tcp_buf_size: cmd.extend(['-w', six.text_type(tcp_buf_size)]) if connections: cmd.extend(['-n', six.text_type(connections)]) cmd.extend([src, dst]) return ' '.join(cmd)
[ "def", "get_rrmgr_cmd", "(", "src", ",", "dst", ",", "compression", "=", "None", ",", "tcp_buf_size", "=", "None", ",", "connections", "=", "None", ")", ":", "cmd", "=", "[", "'rrmgr'", ",", "'-s'", ",", "'zfs'", "]", "if", "compression", ":", "cmd", ".", "extend", "(", "[", "'-c'", ",", "(", "'%s'", "%", "compression", ")", "]", ")", "cmd", ".", "append", "(", "'-q'", ")", "cmd", ".", "append", "(", "'-e'", ")", "if", "tcp_buf_size", ":", "cmd", ".", "extend", "(", "[", "'-w'", ",", "six", ".", "text_type", "(", "tcp_buf_size", ")", "]", ")", "if", "connections", ":", "cmd", ".", "extend", "(", "[", "'-n'", ",", "six", ".", "text_type", "(", "connections", ")", "]", ")", "cmd", ".", "extend", "(", "[", "src", ",", "dst", "]", ")", "return", "' '", ".", "join", "(", "cmd", ")" ]
returns rrmgr command for source and destination .
train
false
19,341
def create_flat_names(varname, shape): if (not shape): return [varname] labels = (np.ravel(xs).tolist() for xs in np.indices(shape)) labels = (map(str, xs) for xs in labels) return ['{}__{}'.format(varname, '_'.join(idxs)) for idxs in zip(*labels)]
[ "def", "create_flat_names", "(", "varname", ",", "shape", ")", ":", "if", "(", "not", "shape", ")", ":", "return", "[", "varname", "]", "labels", "=", "(", "np", ".", "ravel", "(", "xs", ")", ".", "tolist", "(", ")", "for", "xs", "in", "np", ".", "indices", "(", "shape", ")", ")", "labels", "=", "(", "map", "(", "str", ",", "xs", ")", "for", "xs", "in", "labels", ")", "return", "[", "'{}__{}'", ".", "format", "(", "varname", ",", "'_'", ".", "join", "(", "idxs", ")", ")", "for", "idxs", "in", "zip", "(", "*", "labels", ")", "]" ]
return flat variable names for varname of shape .
train
false
19,342
def create_keyspace(keyspace, replication_strategy='SimpleStrategy', replication_factor=1, replication_datacenters=None, contact_points=None, port=None, cql_user=None, cql_pass=None): existing_keyspace = keyspace_exists(keyspace, contact_points, port) if (not existing_keyspace): replication_map = {'class': replication_strategy} if replication_datacenters: if isinstance(replication_datacenters, six.string_types): try: replication_datacenter_map = json.loads(replication_datacenters) replication_map.update(**replication_datacenter_map) except BaseException: log.error('Could not load json replication_datacenters.') return False else: replication_map.update(**replication_datacenters) else: replication_map['replication_factor'] = replication_factor query = 'create keyspace {0}\n with replication = {1}\n and durable_writes = true;'.format(keyspace, replication_map) try: cql_query(query, contact_points, port, cql_user, cql_pass) except CommandExecutionError: log.critical('Could not create keyspace.') raise except BaseException as e: log.critical('Unexpected error while creating keyspace: {0}'.format(str(e))) raise
[ "def", "create_keyspace", "(", "keyspace", ",", "replication_strategy", "=", "'SimpleStrategy'", ",", "replication_factor", "=", "1", ",", "replication_datacenters", "=", "None", ",", "contact_points", "=", "None", ",", "port", "=", "None", ",", "cql_user", "=", "None", ",", "cql_pass", "=", "None", ")", ":", "existing_keyspace", "=", "keyspace_exists", "(", "keyspace", ",", "contact_points", ",", "port", ")", "if", "(", "not", "existing_keyspace", ")", ":", "replication_map", "=", "{", "'class'", ":", "replication_strategy", "}", "if", "replication_datacenters", ":", "if", "isinstance", "(", "replication_datacenters", ",", "six", ".", "string_types", ")", ":", "try", ":", "replication_datacenter_map", "=", "json", ".", "loads", "(", "replication_datacenters", ")", "replication_map", ".", "update", "(", "**", "replication_datacenter_map", ")", "except", "BaseException", ":", "log", ".", "error", "(", "'Could not load json replication_datacenters.'", ")", "return", "False", "else", ":", "replication_map", ".", "update", "(", "**", "replication_datacenters", ")", "else", ":", "replication_map", "[", "'replication_factor'", "]", "=", "replication_factor", "query", "=", "'create keyspace {0}\\n with replication = {1}\\n and durable_writes = true;'", ".", "format", "(", "keyspace", ",", "replication_map", ")", "try", ":", "cql_query", "(", "query", ",", "contact_points", ",", "port", ",", "cql_user", ",", "cql_pass", ")", "except", "CommandExecutionError", ":", "log", ".", "critical", "(", "'Could not create keyspace.'", ")", "raise", "except", "BaseException", "as", "e", ":", "log", ".", "critical", "(", "'Unexpected error while creating keyspace: {0}'", ".", "format", "(", "str", "(", "e", ")", ")", ")", "raise" ]
create a new keyspace in cassandra .
train
true
19,343
def what_to_add(qtype, origword, newword, terminate): if (not newword.startswith(origword)): return '' else: qold = quotify(qtype, origword, terminate=False) return quotify(qtype, newword, terminate=terminate)[len(qold):]
[ "def", "what_to_add", "(", "qtype", ",", "origword", ",", "newword", ",", "terminate", ")", ":", "if", "(", "not", "newword", ".", "startswith", "(", "origword", ")", ")", ":", "return", "''", "else", ":", "qold", "=", "quotify", "(", "qtype", ",", "origword", ",", "terminate", "=", "False", ")", "return", "quotify", "(", "qtype", ",", "newword", ",", "terminate", "=", "terminate", ")", "[", "len", "(", "qold", ")", ":", "]" ]
return a qtype that is needed to finish a partial word .
train
false
19,344
@pytest.yield_fixture() def manager(request, config, caplog, monkeypatch, filecopy): if (u'tmpdir' in request.fixturenames): config = config.replace(u'__tmp__', request.getfuncargvalue(u'tmpdir').strpath) try: mockmanager = MockManager(config, request.cls.__name__) except Exception: print caplog.text() raise (yield mockmanager) mockmanager.shutdown()
[ "@", "pytest", ".", "yield_fixture", "(", ")", "def", "manager", "(", "request", ",", "config", ",", "caplog", ",", "monkeypatch", ",", "filecopy", ")", ":", "if", "(", "u'tmpdir'", "in", "request", ".", "fixturenames", ")", ":", "config", "=", "config", ".", "replace", "(", "u'__tmp__'", ",", "request", ".", "getfuncargvalue", "(", "u'tmpdir'", ")", ".", "strpath", ")", "try", ":", "mockmanager", "=", "MockManager", "(", "config", ",", "request", ".", "cls", ".", "__name__", ")", "except", "Exception", ":", "print", "caplog", ".", "text", "(", ")", "raise", "(", "yield", "mockmanager", ")", "mockmanager", ".", "shutdown", "(", ")" ]
create a :class:mockmanager for this test based on config argument .
train
false
19,345
def pull_request(owner, repository, number): return gh.pull_request(owner, repository, number)
[ "def", "pull_request", "(", "owner", ",", "repository", ",", "number", ")", ":", "return", "gh", ".", "pull_request", "(", "owner", ",", "repository", ",", "number", ")" ]
anonymously retrieve pull request :number on :owner/:repository .
train
false
19,346
def currency_represent(v): if (v == 'USD'): return '$' elif (v == 'EUR'): return '\xe2\x82\xac' elif (v == 'GBP'): return '\xc2\xa3' else: return v
[ "def", "currency_represent", "(", "v", ")", ":", "if", "(", "v", "==", "'USD'", ")", ":", "return", "'$'", "elif", "(", "v", "==", "'EUR'", ")", ":", "return", "'\\xe2\\x82\\xac'", "elif", "(", "v", "==", "'GBP'", ")", ":", "return", "'\\xc2\\xa3'", "else", ":", "return", "v" ]
custom representation of currencies .
train
false
19,348
def setup_domain(domain): data = get_domain(domain) setup_roles(data) setup_desktop_icons(data) setup_properties(data) set_values(data) setup_sidebar_items(data) if data.get(u'default_portal_role'): frappe.db.set_value(u'Portal Settings', None, u'default_role', data.get(u'default_portal_role')) frappe.clear_cache()
[ "def", "setup_domain", "(", "domain", ")", ":", "data", "=", "get_domain", "(", "domain", ")", "setup_roles", "(", "data", ")", "setup_desktop_icons", "(", "data", ")", "setup_properties", "(", "data", ")", "set_values", "(", "data", ")", "setup_sidebar_items", "(", "data", ")", "if", "data", ".", "get", "(", "u'default_portal_role'", ")", ":", "frappe", ".", "db", ".", "set_value", "(", "u'Portal Settings'", ",", "None", ",", "u'default_role'", ",", "data", ".", "get", "(", "u'default_portal_role'", ")", ")", "frappe", ".", "clear_cache", "(", ")" ]
setup roles .
train
false
19,349
def softmax_categorical_crossentropy(y_pred, y_true): with tf.name_scope('SoftmaxCrossentropy'): return tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(y_pred, y_true))
[ "def", "softmax_categorical_crossentropy", "(", "y_pred", ",", "y_true", ")", ":", "with", "tf", ".", "name_scope", "(", "'SoftmaxCrossentropy'", ")", ":", "return", "tf", ".", "reduce_mean", "(", "tf", ".", "nn", ".", "softmax_cross_entropy_with_logits", "(", "y_pred", ",", "y_true", ")", ")" ]
softmax categorical crossentropy .
train
false
19,350
def unpack_context(conf, msg): context_dict = {} for key in list(msg.keys()): key = str(key) if key.startswith('_context_'): value = msg.pop(key) context_dict[key[9:]] = value context_dict['msg_id'] = msg.pop('_msg_id', None) context_dict['reply_q'] = msg.pop('_reply_q', None) context_dict['conf'] = conf ctx = RpcContext.from_dict(context_dict) rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict()) return ctx
[ "def", "unpack_context", "(", "conf", ",", "msg", ")", ":", "context_dict", "=", "{", "}", "for", "key", "in", "list", "(", "msg", ".", "keys", "(", ")", ")", ":", "key", "=", "str", "(", "key", ")", "if", "key", ".", "startswith", "(", "'_context_'", ")", ":", "value", "=", "msg", ".", "pop", "(", "key", ")", "context_dict", "[", "key", "[", "9", ":", "]", "]", "=", "value", "context_dict", "[", "'msg_id'", "]", "=", "msg", ".", "pop", "(", "'_msg_id'", ",", "None", ")", "context_dict", "[", "'reply_q'", "]", "=", "msg", ".", "pop", "(", "'_reply_q'", ",", "None", ")", "context_dict", "[", "'conf'", "]", "=", "conf", "ctx", "=", "RpcContext", ".", "from_dict", "(", "context_dict", ")", "rpc_common", ".", "_safe_log", "(", "LOG", ".", "debug", ",", "_", "(", "'unpacked context: %s'", ")", ",", "ctx", ".", "to_dict", "(", ")", ")", "return", "ctx" ]
unpack context from msg .
train
false
19,351
def fowlkes_mallows_score(labels_true, labels_pred, sparse=False): (labels_true, labels_pred) = check_clusterings(labels_true, labels_pred) (n_samples,) = labels_true.shape c = contingency_matrix(labels_true, labels_pred, sparse=True) tk = (np.dot(c.data, c.data) - n_samples) pk = (np.sum((np.asarray(c.sum(axis=0)).ravel() ** 2)) - n_samples) qk = (np.sum((np.asarray(c.sum(axis=1)).ravel() ** 2)) - n_samples) return ((tk / np.sqrt((pk * qk))) if (tk != 0.0) else 0.0)
[ "def", "fowlkes_mallows_score", "(", "labels_true", ",", "labels_pred", ",", "sparse", "=", "False", ")", ":", "(", "labels_true", ",", "labels_pred", ")", "=", "check_clusterings", "(", "labels_true", ",", "labels_pred", ")", "(", "n_samples", ",", ")", "=", "labels_true", ".", "shape", "c", "=", "contingency_matrix", "(", "labels_true", ",", "labels_pred", ",", "sparse", "=", "True", ")", "tk", "=", "(", "np", ".", "dot", "(", "c", ".", "data", ",", "c", ".", "data", ")", "-", "n_samples", ")", "pk", "=", "(", "np", ".", "sum", "(", "(", "np", ".", "asarray", "(", "c", ".", "sum", "(", "axis", "=", "0", ")", ")", ".", "ravel", "(", ")", "**", "2", ")", ")", "-", "n_samples", ")", "qk", "=", "(", "np", ".", "sum", "(", "(", "np", ".", "asarray", "(", "c", ".", "sum", "(", "axis", "=", "1", ")", ")", ".", "ravel", "(", ")", "**", "2", ")", ")", "-", "n_samples", ")", "return", "(", "(", "tk", "/", "np", ".", "sqrt", "(", "(", "pk", "*", "qk", ")", ")", ")", "if", "(", "tk", "!=", "0.0", ")", "else", "0.0", ")" ]
measure the similarity of two clusterings of a set of points .
train
false
19,354
def merge_filters(filters1, filters2): result = list(filters1[:]) if filters2: for f in filters2: if (not (f in result)): result.append(f) return result
[ "def", "merge_filters", "(", "filters1", ",", "filters2", ")", ":", "result", "=", "list", "(", "filters1", "[", ":", "]", ")", "if", "filters2", ":", "for", "f", "in", "filters2", ":", "if", "(", "not", "(", "f", "in", "result", ")", ")", ":", "result", ".", "append", "(", "f", ")", "return", "result" ]
merge two filter lists into one .
train
false
19,356
@pytest.mark.integration def test_simple(caret_tester): caret_tester.js.load('position_caret/simple.html') caret_tester.check()
[ "@", "pytest", ".", "mark", ".", "integration", "def", "test_simple", "(", "caret_tester", ")", ":", "caret_tester", ".", "js", ".", "load", "(", "'position_caret/simple.html'", ")", "caret_tester", ".", "check", "(", ")" ]
simple case of uploading one partition .
train
false
19,357
def getNestedVectorTestExample(x=0.0, y=0.0, z=0.0): return NestedVectorTestExample(Vector3(x, y, z))
[ "def", "getNestedVectorTestExample", "(", "x", "=", "0.0", ",", "y", "=", "0.0", ",", "z", "=", "0.0", ")", ":", "return", "NestedVectorTestExample", "(", "Vector3", "(", "x", ",", "y", ",", "z", ")", ")" ]
get the nestedvectortestexample .
train
false
19,359
def Text(x, y, s, **options): options = _Underride(options, fontsize=16, verticalalignment='top', horizontalalignment='left') pyplot.text(x, y, s, **options)
[ "def", "Text", "(", "x", ",", "y", ",", "s", ",", "**", "options", ")", ":", "options", "=", "_Underride", "(", "options", ",", "fontsize", "=", "16", ",", "verticalalignment", "=", "'top'", ",", "horizontalalignment", "=", "'left'", ")", "pyplot", ".", "text", "(", "x", ",", "y", ",", "s", ",", "**", "options", ")" ]
puts text in a figure .
train
false
19,361
def getMaximumByPaths(elementNode): return euclidean.getMaximumByVector3Paths(elementNode.xmlObject.getTransformedPaths())
[ "def", "getMaximumByPaths", "(", "elementNode", ")", ":", "return", "euclidean", ".", "getMaximumByVector3Paths", "(", "elementNode", ".", "xmlObject", ".", "getTransformedPaths", "(", ")", ")" ]
get maximum of the transformed paths of the xmlobject of the elementnode .
train
false
19,363
def ipaddr(value, query='', version=False, alias='ipaddr'): query_func_extra_args = {'': ('vtype',), '6to4': ('vtype', 'value'), 'cidr_lookup': ('iplist', 'value'), 'int': ('vtype',), 'ipv4': ('value',), 'ipv6': ('value',), 'link-local': ('value',), 'loopback': ('value',), 'lo': ('value',), 'multicast': ('value',), 'private': ('value',), 'public': ('value',), 'unicast': ('value',), 'wrap': ('vtype', 'value')} query_func_map = {'': _empty_ipaddr_query, '6to4': _6to4_query, 'address': _ip_query, 'address/prefix': _gateway_query, 'bool': _bool_ipaddr_query, 'broadcast': _broadcast_query, 'cidr': _cidr_query, 'cidr_lookup': _cidr_lookup_query, 'gateway': _gateway_query, 'gw': _gateway_query, 'host': _host_query, 'host/prefix': _gateway_query, 'hostmask': _hostmask_query, 'hostnet': _gateway_query, 'int': _int_query, 'ip': _ip_query, 'ipv4': _ipv4_query, 'ipv6': _ipv6_query, 'link-local': _link_local_query, 'lo': _loopback_query, 'loopback': _loopback_query, 'multicast': _multicast_query, 'net': _net_query, 'netmask': _netmask_query, 'network': _network_query, 'prefix': _prefix_query, 'private': _private_query, 'public': _public_query, 'revdns': _revdns_query, 'router': _gateway_query, 'size': _size_query, 'subnet': _subnet_query, 'type': _type_query, 'unicast': _unicast_query, 'v4': _ipv4_query, 'v6': _ipv6_query, 'version': _version_query, 'wrap': _wrap_query} vtype = None if (not value): return False elif (value == True): return False elif isinstance(value, (list, tuple, types.GeneratorType)): _ret = [] for element in value: if ipaddr(element, str(query), version): _ret.append(ipaddr(element, str(query), version)) if _ret: return _ret else: return list() elif str(value).isdigit(): try: if ((not version) or (version and (version == 4))): v = netaddr.IPNetwork('0.0.0.0/0') v.value = int(value) v.prefixlen = 32 elif (version and (version == 6)): v = netaddr.IPNetwork('::/0') v.value = int(value) v.prefixlen = 128 except: try: v = netaddr.IPNetwork('::/0') v.value = int(value) v.prefixlen = 128 except: return False value = str(v) vtype = 'address' else: try: v = netaddr.IPNetwork(value) try: (address, prefix) = value.split('/') vtype = 'network' except: vtype = 'address' except: try: (address, prefix) = value.split('/') address.isdigit() address = int(address) prefix.isdigit() prefix = int(prefix) except: return False try: v = netaddr.IPNetwork('0.0.0.0/0') v.value = address v.prefixlen = prefix except: try: v = netaddr.IPNetwork('::/0') v.value = address v.prefixlen = prefix except: return False value = str(v) vtype = 'network' try: if (query and ((query not in query_func_map) or (query == 'cidr_lookup')) and ipaddr(query, 'network')): iplist = netaddr.IPSet([netaddr.IPNetwork(query)]) query = 'cidr_lookup' except: pass if (version and (v.version != version)): return False extras = [] for arg in query_func_extra_args.get(query, tuple()): extras.append(locals()[arg]) try: return query_func_map[query](v, *extras) except KeyError: try: float(query) if (v.size == 1): if (vtype == 'address'): return str(v.ip) elif (vtype == 'network'): return str(v) elif (v.size > 1): try: return ((str(v[query]) + '/') + str(v.prefixlen)) except: return False else: return value except: raise errors.AnsibleFilterError((alias + (': unknown filter type: %s' % query))) return False
[ "def", "ipaddr", "(", "value", ",", "query", "=", "''", ",", "version", "=", "False", ",", "alias", "=", "'ipaddr'", ")", ":", "query_func_extra_args", "=", "{", "''", ":", "(", "'vtype'", ",", ")", ",", "'6to4'", ":", "(", "'vtype'", ",", "'value'", ")", ",", "'cidr_lookup'", ":", "(", "'iplist'", ",", "'value'", ")", ",", "'int'", ":", "(", "'vtype'", ",", ")", ",", "'ipv4'", ":", "(", "'value'", ",", ")", ",", "'ipv6'", ":", "(", "'value'", ",", ")", ",", "'link-local'", ":", "(", "'value'", ",", ")", ",", "'loopback'", ":", "(", "'value'", ",", ")", ",", "'lo'", ":", "(", "'value'", ",", ")", ",", "'multicast'", ":", "(", "'value'", ",", ")", ",", "'private'", ":", "(", "'value'", ",", ")", ",", "'public'", ":", "(", "'value'", ",", ")", ",", "'unicast'", ":", "(", "'value'", ",", ")", ",", "'wrap'", ":", "(", "'vtype'", ",", "'value'", ")", "}", "query_func_map", "=", "{", "''", ":", "_empty_ipaddr_query", ",", "'6to4'", ":", "_6to4_query", ",", "'address'", ":", "_ip_query", ",", "'address/prefix'", ":", "_gateway_query", ",", "'bool'", ":", "_bool_ipaddr_query", ",", "'broadcast'", ":", "_broadcast_query", ",", "'cidr'", ":", "_cidr_query", ",", "'cidr_lookup'", ":", "_cidr_lookup_query", ",", "'gateway'", ":", "_gateway_query", ",", "'gw'", ":", "_gateway_query", ",", "'host'", ":", "_host_query", ",", "'host/prefix'", ":", "_gateway_query", ",", "'hostmask'", ":", "_hostmask_query", ",", "'hostnet'", ":", "_gateway_query", ",", "'int'", ":", "_int_query", ",", "'ip'", ":", "_ip_query", ",", "'ipv4'", ":", "_ipv4_query", ",", "'ipv6'", ":", "_ipv6_query", ",", "'link-local'", ":", "_link_local_query", ",", "'lo'", ":", "_loopback_query", ",", "'loopback'", ":", "_loopback_query", ",", "'multicast'", ":", "_multicast_query", ",", "'net'", ":", "_net_query", ",", "'netmask'", ":", "_netmask_query", ",", "'network'", ":", "_network_query", ",", "'prefix'", ":", "_prefix_query", ",", "'private'", ":", "_private_query", ",", "'public'", ":", "_public_query", ",", "'revdns'", ":", "_revdns_query", ",", "'router'", ":", "_gateway_query", ",", "'size'", ":", "_size_query", ",", "'subnet'", ":", "_subnet_query", ",", "'type'", ":", "_type_query", ",", "'unicast'", ":", "_unicast_query", ",", "'v4'", ":", "_ipv4_query", ",", "'v6'", ":", "_ipv6_query", ",", "'version'", ":", "_version_query", ",", "'wrap'", ":", "_wrap_query", "}", "vtype", "=", "None", "if", "(", "not", "value", ")", ":", "return", "False", "elif", "(", "value", "==", "True", ")", ":", "return", "False", "elif", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ",", "types", ".", "GeneratorType", ")", ")", ":", "_ret", "=", "[", "]", "for", "element", "in", "value", ":", "if", "ipaddr", "(", "element", ",", "str", "(", "query", ")", ",", "version", ")", ":", "_ret", ".", "append", "(", "ipaddr", "(", "element", ",", "str", "(", "query", ")", ",", "version", ")", ")", "if", "_ret", ":", "return", "_ret", "else", ":", "return", "list", "(", ")", "elif", "str", "(", "value", ")", ".", "isdigit", "(", ")", ":", "try", ":", "if", "(", "(", "not", "version", ")", "or", "(", "version", "and", "(", "version", "==", "4", ")", ")", ")", ":", "v", "=", "netaddr", ".", "IPNetwork", "(", "'0.0.0.0/0'", ")", "v", ".", "value", "=", "int", "(", "value", ")", "v", ".", "prefixlen", "=", "32", "elif", "(", "version", "and", "(", "version", "==", "6", ")", ")", ":", "v", "=", "netaddr", ".", "IPNetwork", "(", "'::/0'", ")", "v", ".", "value", "=", "int", "(", "value", ")", "v", ".", "prefixlen", "=", "128", "except", ":", "try", ":", "v", "=", "netaddr", ".", "IPNetwork", "(", "'::/0'", ")", "v", ".", "value", "=", "int", "(", "value", ")", "v", ".", "prefixlen", "=", "128", "except", ":", "return", "False", "value", "=", "str", "(", "v", ")", "vtype", "=", "'address'", "else", ":", "try", ":", "v", "=", "netaddr", ".", "IPNetwork", "(", "value", ")", "try", ":", "(", "address", ",", "prefix", ")", "=", "value", ".", "split", "(", "'/'", ")", "vtype", "=", "'network'", "except", ":", "vtype", "=", "'address'", "except", ":", "try", ":", "(", "address", ",", "prefix", ")", "=", "value", ".", "split", "(", "'/'", ")", "address", ".", "isdigit", "(", ")", "address", "=", "int", "(", "address", ")", "prefix", ".", "isdigit", "(", ")", "prefix", "=", "int", "(", "prefix", ")", "except", ":", "return", "False", "try", ":", "v", "=", "netaddr", ".", "IPNetwork", "(", "'0.0.0.0/0'", ")", "v", ".", "value", "=", "address", "v", ".", "prefixlen", "=", "prefix", "except", ":", "try", ":", "v", "=", "netaddr", ".", "IPNetwork", "(", "'::/0'", ")", "v", ".", "value", "=", "address", "v", ".", "prefixlen", "=", "prefix", "except", ":", "return", "False", "value", "=", "str", "(", "v", ")", "vtype", "=", "'network'", "try", ":", "if", "(", "query", "and", "(", "(", "query", "not", "in", "query_func_map", ")", "or", "(", "query", "==", "'cidr_lookup'", ")", ")", "and", "ipaddr", "(", "query", ",", "'network'", ")", ")", ":", "iplist", "=", "netaddr", ".", "IPSet", "(", "[", "netaddr", ".", "IPNetwork", "(", "query", ")", "]", ")", "query", "=", "'cidr_lookup'", "except", ":", "pass", "if", "(", "version", "and", "(", "v", ".", "version", "!=", "version", ")", ")", ":", "return", "False", "extras", "=", "[", "]", "for", "arg", "in", "query_func_extra_args", ".", "get", "(", "query", ",", "tuple", "(", ")", ")", ":", "extras", ".", "append", "(", "locals", "(", ")", "[", "arg", "]", ")", "try", ":", "return", "query_func_map", "[", "query", "]", "(", "v", ",", "*", "extras", ")", "except", "KeyError", ":", "try", ":", "float", "(", "query", ")", "if", "(", "v", ".", "size", "==", "1", ")", ":", "if", "(", "vtype", "==", "'address'", ")", ":", "return", "str", "(", "v", ".", "ip", ")", "elif", "(", "vtype", "==", "'network'", ")", ":", "return", "str", "(", "v", ")", "elif", "(", "v", ".", "size", ">", "1", ")", ":", "try", ":", "return", "(", "(", "str", "(", "v", "[", "query", "]", ")", "+", "'/'", ")", "+", "str", "(", "v", ".", "prefixlen", ")", ")", "except", ":", "return", "False", "else", ":", "return", "value", "except", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "(", "alias", "+", "(", "': unknown filter type: %s'", "%", "query", ")", ")", ")", "return", "False" ]
filters and returns only valid ip objects .
train
false
19,364
def cmd_list_running(args, opts): for x in json_get(opts, 'crawler/engine/open_spiders'): print x
[ "def", "cmd_list_running", "(", "args", ",", "opts", ")", ":", "for", "x", "in", "json_get", "(", "opts", ",", "'crawler/engine/open_spiders'", ")", ":", "print", "x" ]
list-running - list running spiders .
train
false
19,365
def session_check_requirements(session): session.install(GCP_REPO_TOOLS_REQ) if ('update' in session.posargs): command = 'update-requirements' else: command = 'check-requirements' reqfiles = list(_list_files('.', 'requirements*.txt')) for reqfile in reqfiles: session.run('gcprepotools', command, reqfile)
[ "def", "session_check_requirements", "(", "session", ")", ":", "session", ".", "install", "(", "GCP_REPO_TOOLS_REQ", ")", "if", "(", "'update'", "in", "session", ".", "posargs", ")", ":", "command", "=", "'update-requirements'", "else", ":", "command", "=", "'check-requirements'", "reqfiles", "=", "list", "(", "_list_files", "(", "'.'", ",", "'requirements*.txt'", ")", ")", "for", "reqfile", "in", "reqfiles", ":", "session", ".", "run", "(", "'gcprepotools'", ",", "command", ",", "reqfile", ")" ]
checks for out of date requirements and optionally updates them .
train
false
19,366
def stop_process(name): run_as_root(('supervisorctl stop %(name)s' % locals()))
[ "def", "stop_process", "(", "name", ")", ":", "run_as_root", "(", "(", "'supervisorctl stop %(name)s'", "%", "locals", "(", ")", ")", ")" ]
stop a supervisor process .
train
false
19,367
def maybe_name_or_idx(idx, model): if (idx is None): idx = lrange(model.exog.shape[1]) if isinstance(idx, (int, long)): exog_name = model.exog_names[idx] exog_idx = idx elif isinstance(idx, (tuple, list)): exog_name = [] exog_idx = [] for item in idx: (exog_name_item, exog_idx_item) = maybe_name_or_idx(item, model) exog_name.append(exog_name_item) exog_idx.append(exog_idx_item) else: exog_name = idx exog_idx = model.exog_names.index(idx) return (exog_name, exog_idx)
[ "def", "maybe_name_or_idx", "(", "idx", ",", "model", ")", ":", "if", "(", "idx", "is", "None", ")", ":", "idx", "=", "lrange", "(", "model", ".", "exog", ".", "shape", "[", "1", "]", ")", "if", "isinstance", "(", "idx", ",", "(", "int", ",", "long", ")", ")", ":", "exog_name", "=", "model", ".", "exog_names", "[", "idx", "]", "exog_idx", "=", "idx", "elif", "isinstance", "(", "idx", ",", "(", "tuple", ",", "list", ")", ")", ":", "exog_name", "=", "[", "]", "exog_idx", "=", "[", "]", "for", "item", "in", "idx", ":", "(", "exog_name_item", ",", "exog_idx_item", ")", "=", "maybe_name_or_idx", "(", "item", ",", "model", ")", "exog_name", ".", "append", "(", "exog_name_item", ")", "exog_idx", ".", "append", "(", "exog_idx_item", ")", "else", ":", "exog_name", "=", "idx", "exog_idx", "=", "model", ".", "exog_names", ".", "index", "(", "idx", ")", "return", "(", "exog_name", ",", "exog_idx", ")" ]
give a name or an integer and return the name and integer location of the column in a design matrix .
train
false
19,368
def _tan_repl_func(expr): if isinstance(expr, tan): return (sin(*expr.args) / cos(*expr.args)) elif ((not expr.args) or expr.is_Derivative): return expr
[ "def", "_tan_repl_func", "(", "expr", ")", ":", "if", "isinstance", "(", "expr", ",", "tan", ")", ":", "return", "(", "sin", "(", "*", "expr", ".", "args", ")", "/", "cos", "(", "*", "expr", ".", "args", ")", ")", "elif", "(", "(", "not", "expr", ".", "args", ")", "or", "expr", ".", "is_Derivative", ")", ":", "return", "expr" ]
replace tan with sin/cos .
train
false
19,369
def get_jenkins_info(jenkins_session, status): if status['context'].startswith('jenkins-'): jenkins_url = status['target_url'] project = jenkins_session.get((jenkins_url + JENKINS_BUILD_INFO_PATH)).json() return jenkins_info_from_response(project) return (None, None)
[ "def", "get_jenkins_info", "(", "jenkins_session", ",", "status", ")", ":", "if", "status", "[", "'context'", "]", ".", "startswith", "(", "'jenkins-'", ")", ":", "jenkins_url", "=", "status", "[", "'target_url'", "]", "project", "=", "jenkins_session", ".", "get", "(", "(", "jenkins_url", "+", "JENKINS_BUILD_INFO_PATH", ")", ")", ".", "json", "(", ")", "return", "jenkins_info_from_response", "(", "project", ")", "return", "(", "None", ",", "None", ")" ]
get the jenkins job info for a github status .
train
false
19,370
def getarray(a, b, lock=None): if (isinstance(b, tuple) and any(((x is None) for x in b))): b2 = tuple((x for x in b if (x is not None))) b3 = tuple(((None if (x is None) else slice(None, None)) for x in b if (not isinstance(x, (int, long))))) return getarray(a, b2, lock)[b3] if lock: lock.acquire() try: c = a[b] if (type(c) != np.ndarray): c = np.asarray(c) finally: if lock: lock.release() return c
[ "def", "getarray", "(", "a", ",", "b", ",", "lock", "=", "None", ")", ":", "if", "(", "isinstance", "(", "b", ",", "tuple", ")", "and", "any", "(", "(", "(", "x", "is", "None", ")", "for", "x", "in", "b", ")", ")", ")", ":", "b2", "=", "tuple", "(", "(", "x", "for", "x", "in", "b", "if", "(", "x", "is", "not", "None", ")", ")", ")", "b3", "=", "tuple", "(", "(", "(", "None", "if", "(", "x", "is", "None", ")", "else", "slice", "(", "None", ",", "None", ")", ")", "for", "x", "in", "b", "if", "(", "not", "isinstance", "(", "x", ",", "(", "int", ",", "long", ")", ")", ")", ")", ")", "return", "getarray", "(", "a", ",", "b2", ",", "lock", ")", "[", "b3", "]", "if", "lock", ":", "lock", ".", "acquire", "(", ")", "try", ":", "c", "=", "a", "[", "b", "]", "if", "(", "type", "(", "c", ")", "!=", "np", ".", "ndarray", ")", ":", "c", "=", "np", ".", "asarray", "(", "c", ")", "finally", ":", "if", "lock", ":", "lock", ".", "release", "(", ")", "return", "c" ]
mimics getitem but includes call to np .
train
false
19,372
def getEvaluatedExpressionValue(elementNode, value): try: return getEvaluatedExpressionValueBySplitLine(elementNode, getEvaluatorSplitWords(value)) except: print 'Warning, in getEvaluatedExpressionValue in evaluate could not get a value for:' print value traceback.print_exc(file=sys.stdout) return None
[ "def", "getEvaluatedExpressionValue", "(", "elementNode", ",", "value", ")", ":", "try", ":", "return", "getEvaluatedExpressionValueBySplitLine", "(", "elementNode", ",", "getEvaluatorSplitWords", "(", "value", ")", ")", "except", ":", "print", "'Warning, in getEvaluatedExpressionValue in evaluate could not get a value for:'", "print", "value", "traceback", ".", "print_exc", "(", "file", "=", "sys", ".", "stdout", ")", "return", "None" ]
evaluate the expression value .
train
false
19,373
def _get_T1T2_mag_inds(info): picks = pick_types(info, meg='mag') old_mag_inds = [] for ii in picks: ch = info['chs'][ii] if (ch['coil_type'] in (FIFF.FIFFV_COIL_VV_MAG_T1, FIFF.FIFFV_COIL_VV_MAG_T2)): old_mag_inds.append(ii) return old_mag_inds
[ "def", "_get_T1T2_mag_inds", "(", "info", ")", ":", "picks", "=", "pick_types", "(", "info", ",", "meg", "=", "'mag'", ")", "old_mag_inds", "=", "[", "]", "for", "ii", "in", "picks", ":", "ch", "=", "info", "[", "'chs'", "]", "[", "ii", "]", "if", "(", "ch", "[", "'coil_type'", "]", "in", "(", "FIFF", ".", "FIFFV_COIL_VV_MAG_T1", ",", "FIFF", ".", "FIFFV_COIL_VV_MAG_T2", ")", ")", ":", "old_mag_inds", ".", "append", "(", "ii", ")", "return", "old_mag_inds" ]
helper to find t1/t2 magnetometer coil types .
train
false
19,374
def id_number_checksum(gd): n = s = 0 for c in (((gd['year'] + gd['month']) + gd['day']) + gd['serial']): tmp = ((((n % 2) and 1) or 2) * int(c)) if (tmp > 9): tmp = sum([int(i) for i in str(tmp)]) s += tmp n += 1 if ((s % 10) == 0): return 0 return ((((s // 10) + 1) * 10) - s)
[ "def", "id_number_checksum", "(", "gd", ")", ":", "n", "=", "s", "=", "0", "for", "c", "in", "(", "(", "(", "gd", "[", "'year'", "]", "+", "gd", "[", "'month'", "]", ")", "+", "gd", "[", "'day'", "]", ")", "+", "gd", "[", "'serial'", "]", ")", ":", "tmp", "=", "(", "(", "(", "(", "n", "%", "2", ")", "and", "1", ")", "or", "2", ")", "*", "int", "(", "c", ")", ")", "if", "(", "tmp", ">", "9", ")", ":", "tmp", "=", "sum", "(", "[", "int", "(", "i", ")", "for", "i", "in", "str", "(", "tmp", ")", "]", ")", "s", "+=", "tmp", "n", "+=", "1", "if", "(", "(", "s", "%", "10", ")", "==", "0", ")", ":", "return", "0", "return", "(", "(", "(", "(", "s", "//", "10", ")", "+", "1", ")", "*", "10", ")", "-", "s", ")" ]
calculates a swedish id number checksum .
train
false
19,375
def resetProfile(): global settingsList for set in settingsList: if (not set.isProfile()): continue set.setValue(set.getDefault()) if (getMachineSetting('machine_type') == 'ultimaker'): putProfileSetting('nozzle_size', '0.4') if (getMachineSetting('ultimaker_extruder_upgrade') == 'True'): putProfileSetting('retraction_enable', 'True') elif (getMachineSetting('machine_type') == 'ultimaker_plus'): putProfileSetting('nozzle_size', '0.4') putProfileSetting('retraction_enable', 'True') elif getMachineSetting('machine_type').startswith('ultimaker2'): putProfileSetting('nozzle_size', '0.4') putProfileSetting('retraction_enable', 'True') else: putProfileSetting('nozzle_size', '0.5') putProfileSetting('retraction_enable', 'True')
[ "def", "resetProfile", "(", ")", ":", "global", "settingsList", "for", "set", "in", "settingsList", ":", "if", "(", "not", "set", ".", "isProfile", "(", ")", ")", ":", "continue", "set", ".", "setValue", "(", "set", ".", "getDefault", "(", ")", ")", "if", "(", "getMachineSetting", "(", "'machine_type'", ")", "==", "'ultimaker'", ")", ":", "putProfileSetting", "(", "'nozzle_size'", ",", "'0.4'", ")", "if", "(", "getMachineSetting", "(", "'ultimaker_extruder_upgrade'", ")", "==", "'True'", ")", ":", "putProfileSetting", "(", "'retraction_enable'", ",", "'True'", ")", "elif", "(", "getMachineSetting", "(", "'machine_type'", ")", "==", "'ultimaker_plus'", ")", ":", "putProfileSetting", "(", "'nozzle_size'", ",", "'0.4'", ")", "putProfileSetting", "(", "'retraction_enable'", ",", "'True'", ")", "elif", "getMachineSetting", "(", "'machine_type'", ")", ".", "startswith", "(", "'ultimaker2'", ")", ":", "putProfileSetting", "(", "'nozzle_size'", ",", "'0.4'", ")", "putProfileSetting", "(", "'retraction_enable'", ",", "'True'", ")", "else", ":", "putProfileSetting", "(", "'nozzle_size'", ",", "'0.5'", ")", "putProfileSetting", "(", "'retraction_enable'", ",", "'True'", ")" ]
reset the profile for the current machine to default .
train
false
19,376
def get_django_registration_version(): try: from registration.backends import get_backend version = 'new' except ImportError: version = 'old' try: import registration except ImportError as e: version = None return version
[ "def", "get_django_registration_version", "(", ")", ":", "try", ":", "from", "registration", ".", "backends", "import", "get_backend", "version", "=", "'new'", "except", "ImportError", ":", "version", "=", "'old'", "try", ":", "import", "registration", "except", "ImportError", "as", "e", ":", "version", "=", "None", "return", "version" ]
returns new .
train
false
19,379
def _build_option_string(options): if options: return 'with the following build options: {0}'.format(_get_option_list(options)) else: return 'with the default build options'
[ "def", "_build_option_string", "(", "options", ")", ":", "if", "options", ":", "return", "'with the following build options: {0}'", ".", "format", "(", "_get_option_list", "(", "options", ")", ")", "else", ":", "return", "'with the default build options'" ]
common function to get a string to append to the end of the state comment .
train
false
19,380
def assert_check(check, source_string, target_string, should_skip, **kwargs): try: assert (should_skip == check(source_string, target_string, **kwargs)) except FilterFailure: assert (not should_skip)
[ "def", "assert_check", "(", "check", ",", "source_string", ",", "target_string", ",", "should_skip", ",", "**", "kwargs", ")", ":", "try", ":", "assert", "(", "should_skip", "==", "check", "(", "source_string", ",", "target_string", ",", "**", "kwargs", ")", ")", "except", "FilterFailure", ":", "assert", "(", "not", "should_skip", ")" ]
runs check and asserts whether it should be skipped or not for the given source_string and target_string .
train
false
19,381
def flowtuple_from_raw(raw, linktype=1): ip = iplayer_from_raw(raw, linktype) if isinstance(ip, dpkt.ip.IP): (sip, dip) = (socket.inet_ntoa(ip.src), socket.inet_ntoa(ip.dst)) proto = ip.p if ((proto == dpkt.ip.IP_PROTO_TCP) or (proto == dpkt.ip.IP_PROTO_UDP)): l3 = ip.data (sport, dport) = (l3.sport, l3.dport) else: (sport, dport) = (0, 0) else: (sip, dip, proto) = (0, 0, (-1)) (sport, dport) = (0, 0) flowtuple = (sip, dip, sport, dport, proto) return flowtuple
[ "def", "flowtuple_from_raw", "(", "raw", ",", "linktype", "=", "1", ")", ":", "ip", "=", "iplayer_from_raw", "(", "raw", ",", "linktype", ")", "if", "isinstance", "(", "ip", ",", "dpkt", ".", "ip", ".", "IP", ")", ":", "(", "sip", ",", "dip", ")", "=", "(", "socket", ".", "inet_ntoa", "(", "ip", ".", "src", ")", ",", "socket", ".", "inet_ntoa", "(", "ip", ".", "dst", ")", ")", "proto", "=", "ip", ".", "p", "if", "(", "(", "proto", "==", "dpkt", ".", "ip", ".", "IP_PROTO_TCP", ")", "or", "(", "proto", "==", "dpkt", ".", "ip", ".", "IP_PROTO_UDP", ")", ")", ":", "l3", "=", "ip", ".", "data", "(", "sport", ",", "dport", ")", "=", "(", "l3", ".", "sport", ",", "l3", ".", "dport", ")", "else", ":", "(", "sport", ",", "dport", ")", "=", "(", "0", ",", "0", ")", "else", ":", "(", "sip", ",", "dip", ",", "proto", ")", "=", "(", "0", ",", "0", ",", "(", "-", "1", ")", ")", "(", "sport", ",", "dport", ")", "=", "(", "0", ",", "0", ")", "flowtuple", "=", "(", "sip", ",", "dip", ",", "sport", ",", "dport", ",", "proto", ")", "return", "flowtuple" ]
parse a packet from a pcap just enough to gain a flow description tuple .
train
false
19,382
def deprecated(func, name='Unknown'): def newFunc(*args, **kwargs): warnings.warn(('Call to deprecated function %s.' % name), category=DeprecationWarning) return func(*args, **kwargs) newFunc.__name__ = func.__name__ newFunc.__doc__ = func.__doc__ newFunc.__dict__.update(func.__dict__) return newFunc
[ "def", "deprecated", "(", "func", ",", "name", "=", "'Unknown'", ")", ":", "def", "newFunc", "(", "*", "args", ",", "**", "kwargs", ")", ":", "warnings", ".", "warn", "(", "(", "'Call to deprecated function %s.'", "%", "name", ")", ",", "category", "=", "DeprecationWarning", ")", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "newFunc", ".", "__name__", "=", "func", ".", "__name__", "newFunc", ".", "__doc__", "=", "func", ".", "__doc__", "newFunc", ".", "__dict__", ".", "update", "(", "func", ".", "__dict__", ")", "return", "newFunc" ]
this is a decorator used to mark functions as deprecated .
train
true
19,383
@register.function @jinja2.contextfunction def report_menu(context, request, report, obj=None): if obj: if isinstance(obj, Addon): has_privs = False if (request.user.is_authenticated() and (acl.action_allowed(request, 'Stats', 'View') or obj.has_author(request.user))): has_privs = True t = get_env().get_template('stats/addon_report_menu.html') c = {'addon': obj, 'has_privs': has_privs} return jinja2.Markup(t.render(c)) if isinstance(obj, Collection): t = get_env().get_template('stats/collection_report_menu.html') c = {'collection': obj} return jinja2.Markup(t.render(c)) t = get_env().get_template('stats/global_report_menu.html') return jinja2.Markup(t.render())
[ "@", "register", ".", "function", "@", "jinja2", ".", "contextfunction", "def", "report_menu", "(", "context", ",", "request", ",", "report", ",", "obj", "=", "None", ")", ":", "if", "obj", ":", "if", "isinstance", "(", "obj", ",", "Addon", ")", ":", "has_privs", "=", "False", "if", "(", "request", ".", "user", ".", "is_authenticated", "(", ")", "and", "(", "acl", ".", "action_allowed", "(", "request", ",", "'Stats'", ",", "'View'", ")", "or", "obj", ".", "has_author", "(", "request", ".", "user", ")", ")", ")", ":", "has_privs", "=", "True", "t", "=", "get_env", "(", ")", ".", "get_template", "(", "'stats/addon_report_menu.html'", ")", "c", "=", "{", "'addon'", ":", "obj", ",", "'has_privs'", ":", "has_privs", "}", "return", "jinja2", ".", "Markup", "(", "t", ".", "render", "(", "c", ")", ")", "if", "isinstance", "(", "obj", ",", "Collection", ")", ":", "t", "=", "get_env", "(", ")", ".", "get_template", "(", "'stats/collection_report_menu.html'", ")", "c", "=", "{", "'collection'", ":", "obj", "}", "return", "jinja2", ".", "Markup", "(", "t", ".", "render", "(", "c", ")", ")", "t", "=", "get_env", "(", ")", ".", "get_template", "(", "'stats/global_report_menu.html'", ")", "return", "jinja2", ".", "Markup", "(", "t", ".", "render", "(", ")", ")" ]
reports menu .
train
false
19,384
def _PasswordName(user): return '{0}_pwd'.format(user)
[ "def", "_PasswordName", "(", "user", ")", ":", "return", "'{0}_pwd'", ".", "format", "(", "user", ")" ]
returns the name of the password file for the specified user .
train
false
19,385
def get_colours(n): base = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) if (n <= 3): return base[0:n] needed = ((((n - 3) + 1) / 2), ((n - 3) / 2)) colours = [] for start in (0, 1): for x in np.linspace(0, 1, (needed[start] + 2)): colours.append(((base[start] * (1.0 - x)) + (base[(start + 1)] * x))) return [pastel(c) for c in colours[0:n]]
[ "def", "get_colours", "(", "n", ")", ":", "base", "=", "np", ".", "asarray", "(", "[", "[", "1", ",", "0", ",", "0", "]", ",", "[", "0", ",", "1", ",", "0", "]", ",", "[", "0", ",", "0", ",", "1", "]", "]", ")", "if", "(", "n", "<=", "3", ")", ":", "return", "base", "[", "0", ":", "n", "]", "needed", "=", "(", "(", "(", "(", "n", "-", "3", ")", "+", "1", ")", "/", "2", ")", ",", "(", "(", "n", "-", "3", ")", "/", "2", ")", ")", "colours", "=", "[", "]", "for", "start", "in", "(", "0", ",", "1", ")", ":", "for", "x", "in", "np", ".", "linspace", "(", "0", ",", "1", ",", "(", "needed", "[", "start", "]", "+", "2", ")", ")", ":", "colours", ".", "append", "(", "(", "(", "base", "[", "start", "]", "*", "(", "1.0", "-", "x", ")", ")", "+", "(", "base", "[", "(", "start", "+", "1", ")", "]", "*", "x", ")", ")", ")", "return", "[", "pastel", "(", "c", ")", "for", "c", "in", "colours", "[", "0", ":", "n", "]", "]" ]
return n pastel colours .
train
false
19,386
def downcast_intp_index(arr): if (arr.dtype.itemsize > np.dtype(np.intp).itemsize): if (arr.size == 0): return arr.astype(np.intp) maxval = arr.max() minval = arr.min() if ((maxval > np.iinfo(np.intp).max) or (minval < np.iinfo(np.intp).min)): raise ValueError('Cannot deal with arrays with indices larger than the machine maximum address size (e.g. 64-bit indices on 32-bit machine).') return arr.astype(np.intp) return arr
[ "def", "downcast_intp_index", "(", "arr", ")", ":", "if", "(", "arr", ".", "dtype", ".", "itemsize", ">", "np", ".", "dtype", "(", "np", ".", "intp", ")", ".", "itemsize", ")", ":", "if", "(", "arr", ".", "size", "==", "0", ")", ":", "return", "arr", ".", "astype", "(", "np", ".", "intp", ")", "maxval", "=", "arr", ".", "max", "(", ")", "minval", "=", "arr", ".", "min", "(", ")", "if", "(", "(", "maxval", ">", "np", ".", "iinfo", "(", "np", ".", "intp", ")", ".", "max", ")", "or", "(", "minval", "<", "np", ".", "iinfo", "(", "np", ".", "intp", ")", ".", "min", ")", ")", ":", "raise", "ValueError", "(", "'Cannot deal with arrays with indices larger than the machine maximum address size (e.g. 64-bit indices on 32-bit machine).'", ")", "return", "arr", ".", "astype", "(", "np", ".", "intp", ")", "return", "arr" ]
down-cast index array to np .
train
false
19,388
def host_remove_labels(id, labels): labels = models.Label.smart_get_bulk(labels) models.Host.smart_get(id).labels.remove(*labels)
[ "def", "host_remove_labels", "(", "id", ",", "labels", ")", ":", "labels", "=", "models", ".", "Label", ".", "smart_get_bulk", "(", "labels", ")", "models", ".", "Host", ".", "smart_get", "(", "id", ")", ".", "labels", ".", "remove", "(", "*", "labels", ")" ]
remove labels from host .
train
false
19,389
def _validate_vdi_chain(vdi_path): def get_parent_path(path): query_cmd = ('vhd-util query -n %(path)s -p' % locals()) query_proc = make_subprocess(query_cmd, stdout=True, stderr=True) (out, err) = finish_subprocess(query_proc, query_cmd, ok_exit_codes=[0, 22]) first_line = out.splitlines()[0].strip() if first_line.endswith('.vhd'): return first_line elif ('has no parent' in first_line): return None elif ('query failed' in first_line): raise Exception(("VDI '%(path)s' not present which breaks the VDI chain, bailing out" % locals())) else: raise Exception(("Unexpected output '%(out)s' from vhd-util" % locals())) cur_path = vdi_path while cur_path: _validate_footer_timestamp(cur_path) cur_path = get_parent_path(cur_path)
[ "def", "_validate_vdi_chain", "(", "vdi_path", ")", ":", "def", "get_parent_path", "(", "path", ")", ":", "query_cmd", "=", "(", "'vhd-util query -n %(path)s -p'", "%", "locals", "(", ")", ")", "query_proc", "=", "make_subprocess", "(", "query_cmd", ",", "stdout", "=", "True", ",", "stderr", "=", "True", ")", "(", "out", ",", "err", ")", "=", "finish_subprocess", "(", "query_proc", ",", "query_cmd", ",", "ok_exit_codes", "=", "[", "0", ",", "22", "]", ")", "first_line", "=", "out", ".", "splitlines", "(", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "first_line", ".", "endswith", "(", "'.vhd'", ")", ":", "return", "first_line", "elif", "(", "'has no parent'", "in", "first_line", ")", ":", "return", "None", "elif", "(", "'query failed'", "in", "first_line", ")", ":", "raise", "Exception", "(", "(", "\"VDI '%(path)s' not present which breaks the VDI chain, bailing out\"", "%", "locals", "(", ")", ")", ")", "else", ":", "raise", "Exception", "(", "(", "\"Unexpected output '%(out)s' from vhd-util\"", "%", "locals", "(", ")", ")", ")", "cur_path", "=", "vdi_path", "while", "cur_path", ":", "_validate_footer_timestamp", "(", "cur_path", ")", "cur_path", "=", "get_parent_path", "(", "cur_path", ")" ]
this check ensures that the parent pointers on the vhds are valid before we move the vdi chain to the sr .
train
false
19,390
def csc_median_axis_0(X): if (not isinstance(X, sp.csc_matrix)): raise TypeError(('Expected matrix of CSC format, got %s' % X.format)) indptr = X.indptr (n_samples, n_features) = X.shape median = np.zeros(n_features) for (f_ind, (start, end)) in enumerate(zip(indptr[:(-1)], indptr[1:])): data = np.copy(X.data[start:end]) nz = (n_samples - data.size) median[f_ind] = _get_median(data, nz) return median
[ "def", "csc_median_axis_0", "(", "X", ")", ":", "if", "(", "not", "isinstance", "(", "X", ",", "sp", ".", "csc_matrix", ")", ")", ":", "raise", "TypeError", "(", "(", "'Expected matrix of CSC format, got %s'", "%", "X", ".", "format", ")", ")", "indptr", "=", "X", ".", "indptr", "(", "n_samples", ",", "n_features", ")", "=", "X", ".", "shape", "median", "=", "np", ".", "zeros", "(", "n_features", ")", "for", "(", "f_ind", ",", "(", "start", ",", "end", ")", ")", "in", "enumerate", "(", "zip", "(", "indptr", "[", ":", "(", "-", "1", ")", "]", ",", "indptr", "[", "1", ":", "]", ")", ")", ":", "data", "=", "np", ".", "copy", "(", "X", ".", "data", "[", "start", ":", "end", "]", ")", "nz", "=", "(", "n_samples", "-", "data", ".", "size", ")", "median", "[", "f_ind", "]", "=", "_get_median", "(", "data", ",", "nz", ")", "return", "median" ]
find the median across axis 0 of a csc matrix .
train
false
19,391
def _scrape_strip_cruft(html, plain_text_out=False): html = unescape(html) html = html.replace('\r', '\n') html = re.sub(' +', ' ', html) html = BREAK_RE.sub('\n', html) html = re.sub('<(script).*?</\\1>(?s)', '', html) if plain_text_out: html = COMMENT_RE.sub('', html) html = TAG_RE.sub('', html) html = '\n'.join([x.strip() for x in html.strip().split('\n')]) html = re.sub('\\n{3,}', '\\n\\n', html) return html
[ "def", "_scrape_strip_cruft", "(", "html", ",", "plain_text_out", "=", "False", ")", ":", "html", "=", "unescape", "(", "html", ")", "html", "=", "html", ".", "replace", "(", "'\\r'", ",", "'\\n'", ")", "html", "=", "re", ".", "sub", "(", "' +'", ",", "' '", ",", "html", ")", "html", "=", "BREAK_RE", ".", "sub", "(", "'\\n'", ",", "html", ")", "html", "=", "re", ".", "sub", "(", "'<(script).*?</\\\\1>(?s)'", ",", "''", ",", "html", ")", "if", "plain_text_out", ":", "html", "=", "COMMENT_RE", ".", "sub", "(", "''", ",", "html", ")", "html", "=", "TAG_RE", ".", "sub", "(", "''", ",", "html", ")", "html", "=", "'\\n'", ".", "join", "(", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "html", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", "]", ")", "html", "=", "re", ".", "sub", "(", "'\\\\n{3,}'", ",", "'\\\\n\\\\n'", ",", "html", ")", "return", "html" ]
clean up html .
train
false
19,392
def gci(): return gcf()._gci()
[ "def", "gci", "(", ")", ":", "return", "gcf", "(", ")", ".", "_gci", "(", ")" ]
get the current :class:~matplotlib .
train
false
19,393
@pytest.mark.parametrize((u'expr', u'result'), [((lambda x, y: (x + y)), 5.0), ((lambda x, y: (x - y)), (-1.0)), ((lambda x, y: (x * y)), 6.0), ((lambda x, y: (x / y)), (2.0 / 3.0)), ((lambda x, y: (x ** y)), 8.0)]) def test_two_model_instance_arithmetic_1d(expr, result): s = expr(Const1D(2), Const1D(3)) assert isinstance(s, Model) assert (s.n_inputs == 1) assert (s.n_outputs == 1) out = s(0) assert (out == result) assert isinstance(out, float)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "(", "u'expr'", ",", "u'result'", ")", ",", "[", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "+", "y", ")", ")", ",", "5.0", ")", ",", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "-", "y", ")", ")", ",", "(", "-", "1.0", ")", ")", ",", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "*", "y", ")", ")", ",", "6.0", ")", ",", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "/", "y", ")", ")", ",", "(", "2.0", "/", "3.0", ")", ")", ",", "(", "(", "lambda", "x", ",", "y", ":", "(", "x", "**", "y", ")", ")", ",", "8.0", ")", "]", ")", "def", "test_two_model_instance_arithmetic_1d", "(", "expr", ",", "result", ")", ":", "s", "=", "expr", "(", "Const1D", "(", "2", ")", ",", "Const1D", "(", "3", ")", ")", "assert", "isinstance", "(", "s", ",", "Model", ")", "assert", "(", "s", ".", "n_inputs", "==", "1", ")", "assert", "(", "s", ".", "n_outputs", "==", "1", ")", "out", "=", "s", "(", "0", ")", "assert", "(", "out", "==", "result", ")", "assert", "isinstance", "(", "out", ",", "float", ")" ]
like test_two_model_class_arithmetic_1d .
train
false
19,394
def has_anonymous_link(node, auth): if auth.private_link: return auth.private_link.anonymous return False
[ "def", "has_anonymous_link", "(", "node", ",", "auth", ")", ":", "if", "auth", ".", "private_link", ":", "return", "auth", ".", "private_link", ".", "anonymous", "return", "False" ]
check if the node is anonymous to the user .
train
false
19,396
def test_patch_returns_error_on_invalid_dir(): from gooey.gui import image_repository with pytest.raises(IOError) as kaboom: image_repository.patch_images('foo/bar/not/a/path') assert (' user supplied' in str(kaboom.value)) assert ('foo/bar/not/a/path' in str(kaboom.value))
[ "def", "test_patch_returns_error_on_invalid_dir", "(", ")", ":", "from", "gooey", ".", "gui", "import", "image_repository", "with", "pytest", ".", "raises", "(", "IOError", ")", "as", "kaboom", ":", "image_repository", ".", "patch_images", "(", "'foo/bar/not/a/path'", ")", "assert", "(", "' user supplied'", "in", "str", "(", "kaboom", ".", "value", ")", ")", "assert", "(", "'foo/bar/not/a/path'", "in", "str", "(", "kaboom", ".", "value", ")", ")" ]
patch should explode with a helpful message if it cannot find the supplied directory .
train
false
19,397
def A(host, nameserver=None): if _has_dig(): return __salt__['dig.A'](host, nameserver) elif (nameserver is None): try: addresses = [sock[4][0] for sock in socket.getaddrinfo(host, None, socket.AF_INET, 0, socket.SOCK_RAW)] return addresses except socket.gaierror: return 'Unable to resolve {0}'.format(host) return 'This function requires dig, which is not currently available'
[ "def", "A", "(", "host", ",", "nameserver", "=", "None", ")", ":", "if", "_has_dig", "(", ")", ":", "return", "__salt__", "[", "'dig.A'", "]", "(", "host", ",", "nameserver", ")", "elif", "(", "nameserver", "is", "None", ")", ":", "try", ":", "addresses", "=", "[", "sock", "[", "4", "]", "[", "0", "]", "for", "sock", "in", "socket", ".", "getaddrinfo", "(", "host", ",", "None", ",", "socket", ".", "AF_INET", ",", "0", ",", "socket", ".", "SOCK_RAW", ")", "]", "return", "addresses", "except", "socket", ".", "gaierror", ":", "return", "'Unable to resolve {0}'", ".", "format", "(", "host", ")", "return", "'This function requires dig, which is not currently available'" ]
return the a record(s) for host .
train
true
19,398
@handle_response_format @treeio_login_required def item_edit(request, knowledgeItem_id, response_format='html'): item = get_object_or_404(KnowledgeItem, pk=knowledgeItem_id) items = Object.filter_permitted(manager=KnowledgeItem.objects, user=request.user.profile, mode='r') if (not request.user.profile.has_permission(item, mode='w')): return user_denied(request, message="You don't have access to this Knowledge Item") if request.POST: if ('cancel' not in request.POST): form = KnowledgeItemForm(request.user.profile, None, request.POST, instance=item) if form.is_valid(): item = form.save() return HttpResponseRedirect(reverse('knowledge_item_view', args=[item.folder.treepath, item.treepath])) else: return HttpResponseRedirect(reverse('knowledge_item_view', args=[item.folder.treepath, item.treepath])) else: form = KnowledgeItemForm(request.user.profile, None, instance=item) context = _get_default_context(request) context.update({'form': form, 'item': item, 'items': items}) return render_to_response('knowledge/item_edit', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "item_edit", "(", "request", ",", "knowledgeItem_id", ",", "response_format", "=", "'html'", ")", ":", "item", "=", "get_object_or_404", "(", "KnowledgeItem", ",", "pk", "=", "knowledgeItem_id", ")", "items", "=", "Object", ".", "filter_permitted", "(", "manager", "=", "KnowledgeItem", ".", "objects", ",", "user", "=", "request", ".", "user", ".", "profile", ",", "mode", "=", "'r'", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "item", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Knowledge Item\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "KnowledgeItemForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "request", ".", "POST", ",", "instance", "=", "item", ")", "if", "form", ".", "is_valid", "(", ")", ":", "item", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'knowledge_item_view'", ",", "args", "=", "[", "item", ".", "folder", ".", "treepath", ",", "item", ".", "treepath", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'knowledge_item_view'", ",", "args", "=", "[", "item", ".", "folder", ".", "treepath", ",", "item", ".", "treepath", "]", ")", ")", "else", ":", "form", "=", "KnowledgeItemForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "instance", "=", "item", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'form'", ":", "form", ",", "'item'", ":", "item", ",", "'items'", ":", "items", "}", ")", "return", "render_to_response", "(", "'knowledge/item_edit'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
item edit page .
train
false
19,399
def isPositive(phrase): return bool(re.search('\\b(sure|yes|yeah|go)\\b', phrase, re.IGNORECASE))
[ "def", "isPositive", "(", "phrase", ")", ":", "return", "bool", "(", "re", ".", "search", "(", "'\\\\b(sure|yes|yeah|go)\\\\b'", ",", "phrase", ",", "re", ".", "IGNORECASE", ")", ")" ]
returns true if the input phrase has a positive sentiment .
train
false
19,401
def create_cmats_to_csv_pipeline(name='cmats_to_csv', extra_column_heading='subject'): inputnode = pe.Node(interface=util.IdentityInterface(fields=['extra_field', 'matlab_matrix_files']), name='inputnode') pipeline = pe.Workflow(name=name) Matlab2CSV = pe.MapNode(interface=misc.Matlab2CSV(), name='Matlab2CSV', iterfield=['in_file']) MergeCSVFiles = pe.Node(interface=misc.MergeCSVFiles(), name='MergeCSVFiles') MergeCSVFiles.inputs.extra_column_heading = extra_column_heading pipeline.connect([(inputnode, Matlab2CSV, [('matlab_matrix_files', 'in_file')])]) pipeline.connect([(Matlab2CSV, MergeCSVFiles, [('csv_files', 'in_files')])]) pipeline.connect([(inputnode, MergeCSVFiles, [('extra_field', 'extra_field')])]) outputnode = pe.Node(interface=util.IdentityInterface(fields=['csv_file']), name='outputnode') pipeline.connect([(MergeCSVFiles, outputnode, [('csv_file', 'csv_file')])]) return pipeline
[ "def", "create_cmats_to_csv_pipeline", "(", "name", "=", "'cmats_to_csv'", ",", "extra_column_heading", "=", "'subject'", ")", ":", "inputnode", "=", "pe", ".", "Node", "(", "interface", "=", "util", ".", "IdentityInterface", "(", "fields", "=", "[", "'extra_field'", ",", "'matlab_matrix_files'", "]", ")", ",", "name", "=", "'inputnode'", ")", "pipeline", "=", "pe", ".", "Workflow", "(", "name", "=", "name", ")", "Matlab2CSV", "=", "pe", ".", "MapNode", "(", "interface", "=", "misc", ".", "Matlab2CSV", "(", ")", ",", "name", "=", "'Matlab2CSV'", ",", "iterfield", "=", "[", "'in_file'", "]", ")", "MergeCSVFiles", "=", "pe", ".", "Node", "(", "interface", "=", "misc", ".", "MergeCSVFiles", "(", ")", ",", "name", "=", "'MergeCSVFiles'", ")", "MergeCSVFiles", ".", "inputs", ".", "extra_column_heading", "=", "extra_column_heading", "pipeline", ".", "connect", "(", "[", "(", "inputnode", ",", "Matlab2CSV", ",", "[", "(", "'matlab_matrix_files'", ",", "'in_file'", ")", "]", ")", "]", ")", "pipeline", ".", "connect", "(", "[", "(", "Matlab2CSV", ",", "MergeCSVFiles", ",", "[", "(", "'csv_files'", ",", "'in_files'", ")", "]", ")", "]", ")", "pipeline", ".", "connect", "(", "[", "(", "inputnode", ",", "MergeCSVFiles", ",", "[", "(", "'extra_field'", ",", "'extra_field'", ")", "]", ")", "]", ")", "outputnode", "=", "pe", ".", "Node", "(", "interface", "=", "util", ".", "IdentityInterface", "(", "fields", "=", "[", "'csv_file'", "]", ")", ",", "name", "=", "'outputnode'", ")", "pipeline", ".", "connect", "(", "[", "(", "MergeCSVFiles", ",", "outputnode", ",", "[", "(", "'csv_file'", ",", "'csv_file'", ")", "]", ")", "]", ")", "return", "pipeline" ]
creates a workflow to convert the outputs from creatematrix into a single comma-separated value text file .
train
false
19,402
def iter_stack_frames(frames=None): if (not frames): frames = inspect.stack()[1:] for (frame, lineno) in ((f[0], f[2]) for f in frames): f_locals = getattr(frame, 'f_locals', {}) if _getitem_from_frame(f_locals, '__traceback_hide__'): continue (yield (frame, lineno))
[ "def", "iter_stack_frames", "(", "frames", "=", "None", ")", ":", "if", "(", "not", "frames", ")", ":", "frames", "=", "inspect", ".", "stack", "(", ")", "[", "1", ":", "]", "for", "(", "frame", ",", "lineno", ")", "in", "(", "(", "f", "[", "0", "]", ",", "f", "[", "2", "]", ")", "for", "f", "in", "frames", ")", ":", "f_locals", "=", "getattr", "(", "frame", ",", "'f_locals'", ",", "{", "}", ")", "if", "_getitem_from_frame", "(", "f_locals", ",", "'__traceback_hide__'", ")", ":", "continue", "(", "yield", "(", "frame", ",", "lineno", ")", ")" ]
given an optional list of frames .
train
true
19,404
def is_win(): with settings(hide('everything'), warn_only=True): return ('"' in run('echo "Will you echo quotation marks"'))
[ "def", "is_win", "(", ")", ":", "with", "settings", "(", "hide", "(", "'everything'", ")", ",", "warn_only", "=", "True", ")", ":", "return", "(", "'\"'", "in", "run", "(", "'echo \"Will you echo quotation marks\"'", ")", ")" ]
return true if remote ssh server is running windows .
train
false
19,405
def test_search_missing_argument(script): result = script.pip('search', expect_error=True) assert ('ERROR: Missing required argument (search query).' in result.stderr)
[ "def", "test_search_missing_argument", "(", "script", ")", ":", "result", "=", "script", ".", "pip", "(", "'search'", ",", "expect_error", "=", "True", ")", "assert", "(", "'ERROR: Missing required argument (search query).'", "in", "result", ".", "stderr", ")" ]
test missing required argument for search .
train
false
19,406
def _list_entries(logger): inner = RetryResult(_has_entries)(_consume_entries) outer = RetryErrors(GaxError, _retry_on_unavailable)(inner) return outer(logger)
[ "def", "_list_entries", "(", "logger", ")", ":", "inner", "=", "RetryResult", "(", "_has_entries", ")", "(", "_consume_entries", ")", "outer", "=", "RetryErrors", "(", "GaxError", ",", "_retry_on_unavailable", ")", "(", "inner", ")", "return", "outer", "(", "logger", ")" ]
retry-ing list entries in a logger .
train
false
19,407
def check_subscription_permissions(topic_name, subscription_name): pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) subscription = topic.subscription(subscription_name) permissions_to_check = ['pubsub.subscriptions.consume', 'pubsub.subscriptions.update'] allowed_permissions = subscription.check_iam_permissions(permissions_to_check) print 'Allowed permissions for subscription {} on topic {}: {}'.format(subscription.name, topic.name, allowed_permissions)
[ "def", "check_subscription_permissions", "(", "topic_name", ",", "subscription_name", ")", ":", "pubsub_client", "=", "pubsub", ".", "Client", "(", ")", "topic", "=", "pubsub_client", ".", "topic", "(", "topic_name", ")", "subscription", "=", "topic", ".", "subscription", "(", "subscription_name", ")", "permissions_to_check", "=", "[", "'pubsub.subscriptions.consume'", ",", "'pubsub.subscriptions.update'", "]", "allowed_permissions", "=", "subscription", ".", "check_iam_permissions", "(", "permissions_to_check", ")", "print", "'Allowed permissions for subscription {} on topic {}: {}'", ".", "format", "(", "subscription", ".", "name", ",", "topic", ".", "name", ",", "allowed_permissions", ")" ]
checks to which permissions are available on the given subscription .
train
false
19,408
def process_sort_params(sort_keys, sort_dirs, default_keys=None, default_dir='asc'): if (default_keys is None): default_keys = ['created_at', 'id'] if (sort_dirs and len(sort_dirs)): default_dir_value = sort_dirs[0] else: default_dir_value = default_dir if sort_keys: result_keys = list(sort_keys) else: result_keys = [] if sort_dirs: result_dirs = [] for sort_dir in sort_dirs: if (sort_dir not in ('asc', 'desc')): msg = _("Unknown sort direction, must be 'desc' or 'asc'.") raise exception.InvalidInput(reason=msg) result_dirs.append(sort_dir) else: result_dirs = [default_dir_value for _sort_key in result_keys] while (len(result_dirs) < len(result_keys)): result_dirs.append(default_dir_value) if (len(result_dirs) > len(result_keys)): msg = _('Sort direction array size exceeds sort key array size.') raise exception.InvalidInput(reason=msg) for key in default_keys: if (key not in result_keys): result_keys.append(key) result_dirs.append(default_dir_value) return (result_keys, result_dirs)
[ "def", "process_sort_params", "(", "sort_keys", ",", "sort_dirs", ",", "default_keys", "=", "None", ",", "default_dir", "=", "'asc'", ")", ":", "if", "(", "default_keys", "is", "None", ")", ":", "default_keys", "=", "[", "'created_at'", ",", "'id'", "]", "if", "(", "sort_dirs", "and", "len", "(", "sort_dirs", ")", ")", ":", "default_dir_value", "=", "sort_dirs", "[", "0", "]", "else", ":", "default_dir_value", "=", "default_dir", "if", "sort_keys", ":", "result_keys", "=", "list", "(", "sort_keys", ")", "else", ":", "result_keys", "=", "[", "]", "if", "sort_dirs", ":", "result_dirs", "=", "[", "]", "for", "sort_dir", "in", "sort_dirs", ":", "if", "(", "sort_dir", "not", "in", "(", "'asc'", ",", "'desc'", ")", ")", ":", "msg", "=", "_", "(", "\"Unknown sort direction, must be 'desc' or 'asc'.\"", ")", "raise", "exception", ".", "InvalidInput", "(", "reason", "=", "msg", ")", "result_dirs", ".", "append", "(", "sort_dir", ")", "else", ":", "result_dirs", "=", "[", "default_dir_value", "for", "_sort_key", "in", "result_keys", "]", "while", "(", "len", "(", "result_dirs", ")", "<", "len", "(", "result_keys", ")", ")", ":", "result_dirs", ".", "append", "(", "default_dir_value", ")", "if", "(", "len", "(", "result_dirs", ")", ">", "len", "(", "result_keys", ")", ")", ":", "msg", "=", "_", "(", "'Sort direction array size exceeds sort key array size.'", ")", "raise", "exception", ".", "InvalidInput", "(", "reason", "=", "msg", ")", "for", "key", "in", "default_keys", ":", "if", "(", "key", "not", "in", "result_keys", ")", ":", "result_keys", ".", "append", "(", "key", ")", "result_dirs", ".", "append", "(", "default_dir_value", ")", "return", "(", "result_keys", ",", "result_dirs", ")" ]
process the sort parameters to include default keys .
train
false
19,411
def prefix_dict_keys(dictionary, prefix='_'): result = {} for (key, value) in six.iteritems(dictionary): result[('%s%s' % (prefix, key))] = value return result
[ "def", "prefix_dict_keys", "(", "dictionary", ",", "prefix", "=", "'_'", ")", ":", "result", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "dictionary", ")", ":", "result", "[", "(", "'%s%s'", "%", "(", "prefix", ",", "key", ")", ")", "]", "=", "value", "return", "result" ]
prefix dictionary keys with a provided prefix .
train
false
19,414
def switch_backend(newbackend): close('all') global new_figure_manager, draw_if_interactive, show matplotlib.use(newbackend, warn=False) reload(matplotlib.backends) from matplotlib.backends import pylab_setup (new_figure_manager, draw_if_interactive, show) = pylab_setup()
[ "def", "switch_backend", "(", "newbackend", ")", ":", "close", "(", "'all'", ")", "global", "new_figure_manager", ",", "draw_if_interactive", ",", "show", "matplotlib", ".", "use", "(", "newbackend", ",", "warn", "=", "False", ")", "reload", "(", "matplotlib", ".", "backends", ")", "from", "matplotlib", ".", "backends", "import", "pylab_setup", "(", "new_figure_manager", ",", "draw_if_interactive", ",", "show", ")", "=", "pylab_setup", "(", ")" ]
switch the default backend to newbackend .
train
false
19,416
@pytest.fixture def data(N=100, period=1, theta=[10, 2, 3], dy=1, rseed=0): rng = np.random.RandomState(rseed) t = ((20 * period) * rng.rand(N)) omega = ((2 * np.pi) / period) y = ((theta[0] + (theta[1] * np.sin((omega * t)))) + (theta[2] * np.cos((omega * t)))) dy = (dy * (0.5 + rng.rand(N))) y += (dy * rng.randn(N)) return (t, y, dy)
[ "@", "pytest", ".", "fixture", "def", "data", "(", "N", "=", "100", ",", "period", "=", "1", ",", "theta", "=", "[", "10", ",", "2", ",", "3", "]", ",", "dy", "=", "1", ",", "rseed", "=", "0", ")", ":", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "rseed", ")", "t", "=", "(", "(", "20", "*", "period", ")", "*", "rng", ".", "rand", "(", "N", ")", ")", "omega", "=", "(", "(", "2", "*", "np", ".", "pi", ")", "/", "period", ")", "y", "=", "(", "(", "theta", "[", "0", "]", "+", "(", "theta", "[", "1", "]", "*", "np", ".", "sin", "(", "(", "omega", "*", "t", ")", ")", ")", ")", "+", "(", "theta", "[", "2", "]", "*", "np", ".", "cos", "(", "(", "omega", "*", "t", ")", ")", ")", ")", "dy", "=", "(", "dy", "*", "(", "0.5", "+", "rng", ".", "rand", "(", "N", ")", ")", ")", "y", "+=", "(", "dy", "*", "rng", ".", "randn", "(", "N", ")", ")", "return", "(", "t", ",", "y", ",", "dy", ")" ]
get a location for data .
train
false
19,417
def educateQuotes(text, language='en'): smart = smartchars(language) punct_class = '[!"#\\$\\%\'()*+,-.\\/:;<=>?\\@\\[\\\\\\]\\^_`{|}~]' text = re.sub(("^'(?=%s\\\\B)" % (punct_class,)), smart.csquote, text) text = re.sub(('^"(?=%s\\\\B)' % (punct_class,)), smart.cpquote, text) text = re.sub('"\'(?=\\w)', (smart.opquote + smart.osquote), text) text = re.sub('\'"(?=\\w)', (smart.osquote + smart.opquote), text) text = re.sub("\\b'(?=\\d{2}s)", smart.csquote, text) close_class = '[^\\ \\t\\r\\n\\[\\{\\(\\-]' dec_dashes = '&#8211;|&#8212;' opening_single_quotes_regex = re.compile(("\n (\n \\s | # a whitespace char, or\n &nbsp; | # a non-breaking space entity, or\n -- | # dashes, or\n &[mn]dash; | # named dash entities\n %s | # or decimal entities\n &\\#x201[34]; # or hex\n )\n ' # the quote\n (?=\\w) # followed by a word character\n " % (dec_dashes,)), re.VERBOSE) text = opening_single_quotes_regex.sub(('\\1' + smart.osquote), text) closing_single_quotes_regex = re.compile(("\n (%s)\n '\n (?!\\s | s\\b | \\d)\n " % (close_class,)), re.VERBOSE) text = closing_single_quotes_regex.sub(('\\1' + smart.csquote), text) closing_single_quotes_regex = re.compile(("\n (%s)\n '\n (\\s | s\\b)\n " % (close_class,)), re.VERBOSE) text = closing_single_quotes_regex.sub(('\\1%s\\2' % smart.csquote), text) text = re.sub("'", smart.osquote, text) opening_double_quotes_regex = re.compile(('\n (\n \\s | # a whitespace char, or\n &nbsp; | # a non-breaking space entity, or\n -- | # dashes, or\n &[mn]dash; | # named dash entities\n %s | # or decimal entities\n &\\#x201[34]; # or hex\n )\n " # the quote\n (?=\\w) # followed by a word character\n ' % (dec_dashes,)), re.VERBOSE) text = opening_double_quotes_regex.sub(('\\1' + smart.opquote), text) closing_double_quotes_regex = re.compile(('\n #(%s)? # character that indicates the quote should be closing\n "\n (?=\\s)\n ' % (close_class,)), re.VERBOSE) text = closing_double_quotes_regex.sub(smart.cpquote, text) closing_double_quotes_regex = re.compile(('\n (%s) # character that indicates the quote should be closing\n "\n ' % (close_class,)), re.VERBOSE) text = closing_double_quotes_regex.sub(('\\1' + smart.cpquote), text) text = re.sub('"', smart.opquote, text) return text
[ "def", "educateQuotes", "(", "text", ",", "language", "=", "'en'", ")", ":", "smart", "=", "smartchars", "(", "language", ")", "punct_class", "=", "'[!\"#\\\\$\\\\%\\'()*+,-.\\\\/:;<=>?\\\\@\\\\[\\\\\\\\\\\\]\\\\^_`{|}~]'", "text", "=", "re", ".", "sub", "(", "(", "\"^'(?=%s\\\\\\\\B)\"", "%", "(", "punct_class", ",", ")", ")", ",", "smart", ".", "csquote", ",", "text", ")", "text", "=", "re", ".", "sub", "(", "(", "'^\"(?=%s\\\\\\\\B)'", "%", "(", "punct_class", ",", ")", ")", ",", "smart", ".", "cpquote", ",", "text", ")", "text", "=", "re", ".", "sub", "(", "'\"\\'(?=\\\\w)'", ",", "(", "smart", ".", "opquote", "+", "smart", ".", "osquote", ")", ",", "text", ")", "text", "=", "re", ".", "sub", "(", "'\\'\"(?=\\\\w)'", ",", "(", "smart", ".", "osquote", "+", "smart", ".", "opquote", ")", ",", "text", ")", "text", "=", "re", ".", "sub", "(", "\"\\\\b'(?=\\\\d{2}s)\"", ",", "smart", ".", "csquote", ",", "text", ")", "close_class", "=", "'[^\\\\ \\\\t\\\\r\\\\n\\\\[\\\\{\\\\(\\\\-]'", "dec_dashes", "=", "'&#8211;|&#8212;'", "opening_single_quotes_regex", "=", "re", ".", "compile", "(", "(", "\"\\n (\\n \\\\s | # a whitespace char, or\\n &nbsp; | # a non-breaking space entity, or\\n -- | # dashes, or\\n &[mn]dash; | # named dash entities\\n %s | # or decimal entities\\n &\\\\#x201[34]; # or hex\\n )\\n ' # the quote\\n (?=\\\\w) # followed by a word character\\n \"", "%", "(", "dec_dashes", ",", ")", ")", ",", "re", ".", "VERBOSE", ")", "text", "=", "opening_single_quotes_regex", ".", "sub", "(", "(", "'\\\\1'", "+", "smart", ".", "osquote", ")", ",", "text", ")", "closing_single_quotes_regex", "=", "re", ".", "compile", "(", "(", "\"\\n (%s)\\n '\\n (?!\\\\s | s\\\\b | \\\\d)\\n \"", "%", "(", "close_class", ",", ")", ")", ",", "re", ".", "VERBOSE", ")", "text", "=", "closing_single_quotes_regex", ".", "sub", "(", "(", "'\\\\1'", "+", "smart", ".", "csquote", ")", ",", "text", ")", "closing_single_quotes_regex", "=", "re", ".", "compile", "(", "(", "\"\\n (%s)\\n '\\n (\\\\s | s\\\\b)\\n \"", "%", "(", "close_class", ",", ")", ")", ",", "re", ".", "VERBOSE", ")", "text", "=", "closing_single_quotes_regex", ".", "sub", "(", "(", "'\\\\1%s\\\\2'", "%", "smart", ".", "csquote", ")", ",", "text", ")", "text", "=", "re", ".", "sub", "(", "\"'\"", ",", "smart", ".", "osquote", ",", "text", ")", "opening_double_quotes_regex", "=", "re", ".", "compile", "(", "(", "'\\n (\\n \\\\s | # a whitespace char, or\\n &nbsp; | # a non-breaking space entity, or\\n -- | # dashes, or\\n &[mn]dash; | # named dash entities\\n %s | # or decimal entities\\n &\\\\#x201[34]; # or hex\\n )\\n \" # the quote\\n (?=\\\\w) # followed by a word character\\n '", "%", "(", "dec_dashes", ",", ")", ")", ",", "re", ".", "VERBOSE", ")", "text", "=", "opening_double_quotes_regex", ".", "sub", "(", "(", "'\\\\1'", "+", "smart", ".", "opquote", ")", ",", "text", ")", "closing_double_quotes_regex", "=", "re", ".", "compile", "(", "(", "'\\n #(%s)? # character that indicates the quote should be closing\\n \"\\n (?=\\\\s)\\n '", "%", "(", "close_class", ",", ")", ")", ",", "re", ".", "VERBOSE", ")", "text", "=", "closing_double_quotes_regex", ".", "sub", "(", "smart", ".", "cpquote", ",", "text", ")", "closing_double_quotes_regex", "=", "re", ".", "compile", "(", "(", "'\\n (%s) # character that indicates the quote should be closing\\n \"\\n '", "%", "(", "close_class", ",", ")", ")", ",", "re", ".", "VERBOSE", ")", "text", "=", "closing_double_quotes_regex", ".", "sub", "(", "(", "'\\\\1'", "+", "smart", ".", "cpquote", ")", ",", "text", ")", "text", "=", "re", ".", "sub", "(", "'\"'", ",", "smart", ".", "opquote", ",", "text", ")", "return", "text" ]
parameter: string .
train
false
19,418
def is_enum(obj): try: return issubclass(obj, enum.Enum) except TypeError: return False
[ "def", "is_enum", "(", "obj", ")", ":", "try", ":", "return", "issubclass", "(", "obj", ",", "enum", ".", "Enum", ")", "except", "TypeError", ":", "return", "False" ]
check if a given object is an enum .
train
false
19,419
def _json_play_events(data): temp = list() for (playerid, statcats) in data.iteritems(): for info in statcats: if (info['statId'] not in nflgame.statmap.idmap): continue statvals = nflgame.statmap.values(info['statId'], info['yards']) statvals['playerid'] = (None if (playerid == '0') else playerid) statvals['playername'] = (info['playerName'] or None) statvals['team'] = info['clubcode'] temp.append((int(info['sequence']), statvals)) return [t[1] for t in sorted(temp, key=(lambda t: t[0]))]
[ "def", "_json_play_events", "(", "data", ")", ":", "temp", "=", "list", "(", ")", "for", "(", "playerid", ",", "statcats", ")", "in", "data", ".", "iteritems", "(", ")", ":", "for", "info", "in", "statcats", ":", "if", "(", "info", "[", "'statId'", "]", "not", "in", "nflgame", ".", "statmap", ".", "idmap", ")", ":", "continue", "statvals", "=", "nflgame", ".", "statmap", ".", "values", "(", "info", "[", "'statId'", "]", ",", "info", "[", "'yards'", "]", ")", "statvals", "[", "'playerid'", "]", "=", "(", "None", "if", "(", "playerid", "==", "'0'", ")", "else", "playerid", ")", "statvals", "[", "'playername'", "]", "=", "(", "info", "[", "'playerName'", "]", "or", "None", ")", "statvals", "[", "'team'", "]", "=", "info", "[", "'clubcode'", "]", "temp", ".", "append", "(", "(", "int", "(", "info", "[", "'sequence'", "]", ")", ",", "statvals", ")", ")", "return", "[", "t", "[", "1", "]", "for", "t", "in", "sorted", "(", "temp", ",", "key", "=", "(", "lambda", "t", ":", "t", "[", "0", "]", ")", ")", "]" ]
takes a single json play entry and converts it to a list of events .
train
false
19,420
@none_if_empty def bytestring_from_base64(value): decoded_value = base64.b64decode(value) return datastore_types.ByteString(decoded_value)
[ "@", "none_if_empty", "def", "bytestring_from_base64", "(", "value", ")", ":", "decoded_value", "=", "base64", ".", "b64decode", "(", "value", ")", "return", "datastore_types", ".", "ByteString", "(", "decoded_value", ")" ]
return a datastore bytestring property from a base64 encoded value .
train
false