id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
19,421
def _random_string(length): return ''.join((random.choice((string.letters + string.digits)) for _ in range(length)))
[ "def", "_random_string", "(", "length", ")", ":", "return", "''", ".", "join", "(", "(", "random", ".", "choice", "(", "(", "string", ".", "letters", "+", "string", ".", "digits", ")", ")", "for", "_", "in", "range", "(", "length", ")", ")", ")" ]
generates random string .
train
false
19,422
def _find_common_type(types): if (len(types) == 0): raise ValueError('no types given') first = types[0] if all((is_dtype_equal(first, t) for t in types[1:])): return first if any((isinstance(t, ExtensionDtype) for t in types)): return np.object if all((is_datetime64_dtype(t) for t in types)): return np.dtype('datetime64[ns]') if all((is_timedelta64_dtype(t) for t in types)): return np.dtype('timedelta64[ns]') return np.find_common_type(types, [])
[ "def", "_find_common_type", "(", "types", ")", ":", "if", "(", "len", "(", "types", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "'no types given'", ")", "first", "=", "types", "[", "0", "]", "if", "all", "(", "(", "is_dtype_equal", "(", "first", ",", "t", ")", "for", "t", "in", "types", "[", "1", ":", "]", ")", ")", ":", "return", "first", "if", "any", "(", "(", "isinstance", "(", "t", ",", "ExtensionDtype", ")", "for", "t", "in", "types", ")", ")", ":", "return", "np", ".", "object", "if", "all", "(", "(", "is_datetime64_dtype", "(", "t", ")", "for", "t", "in", "types", ")", ")", ":", "return", "np", ".", "dtype", "(", "'datetime64[ns]'", ")", "if", "all", "(", "(", "is_timedelta64_dtype", "(", "t", ")", "for", "t", "in", "types", ")", ")", ":", "return", "np", ".", "dtype", "(", "'timedelta64[ns]'", ")", "return", "np", ".", "find_common_type", "(", "types", ",", "[", "]", ")" ]
find a common data type among the given dtypes .
train
false
19,423
def rowset(results): return set([tuple(row) for row in results])
[ "def", "rowset", "(", "results", ")", ":", "return", "set", "(", "[", "tuple", "(", "row", ")", "for", "row", "in", "results", "]", ")" ]
converts the results of sql execution into a plain set of column tuples .
train
false
19,425
def discardBody(response): return client.readBody(response).addCallback((lambda _: response))
[ "def", "discardBody", "(", "response", ")", ":", "return", "client", ".", "readBody", "(", "response", ")", ".", "addCallback", "(", "(", "lambda", "_", ":", "response", ")", ")" ]
discard the body of a http response .
train
false
19,429
def iterload(modname, verbose=False, failfast=False): return filter(None, (get_module(app, modname, verbose, failfast) for app in installed_apps()))
[ "def", "iterload", "(", "modname", ",", "verbose", "=", "False", ",", "failfast", "=", "False", ")", ":", "return", "filter", "(", "None", ",", "(", "get_module", "(", "app", ",", "modname", ",", "verbose", ",", "failfast", ")", "for", "app", "in", "installed_apps", "(", ")", ")", ")" ]
loads all modules with name modname from all installed apps and returns and iterator of those modules .
train
false
19,430
def test_truncate(): assert (truncate('1234567890', 50) == '1234567890') assert (truncate('1234567890', 5) == u('1234\xe2\x80\xa6')) assert (truncate('1234567890', 1) == u('\xe2\x80\xa6')) assert (truncate('1234567890', 9) == u('12345678\xe2\x80\xa6')) assert (truncate('1234567890', 10) == '1234567890') assert (truncate('1234567890', 0) == '1234567890') assert (truncate('1234567890', (-1)) == '1234567890')
[ "def", "test_truncate", "(", ")", ":", "assert", "(", "truncate", "(", "'1234567890'", ",", "50", ")", "==", "'1234567890'", ")", "assert", "(", "truncate", "(", "'1234567890'", ",", "5", ")", "==", "u", "(", "'1234\\xe2\\x80\\xa6'", ")", ")", "assert", "(", "truncate", "(", "'1234567890'", ",", "1", ")", "==", "u", "(", "'\\xe2\\x80\\xa6'", ")", ")", "assert", "(", "truncate", "(", "'1234567890'", ",", "9", ")", "==", "u", "(", "'12345678\\xe2\\x80\\xa6'", ")", ")", "assert", "(", "truncate", "(", "'1234567890'", ",", "10", ")", "==", "'1234567890'", ")", "assert", "(", "truncate", "(", "'1234567890'", ",", "0", ")", "==", "'1234567890'", ")", "assert", "(", "truncate", "(", "'1234567890'", ",", "(", "-", "1", ")", ")", "==", "'1234567890'", ")" ]
test truncate function .
train
false
19,431
def sanity_check(module, host, key, sshkeygen): if (key is None): return try: outf = tempfile.NamedTemporaryFile() outf.write(key) outf.flush() except IOError: e = get_exception() module.fail_json(msg=('Failed to write to temporary file %s: %s' % (outf.name, str(e)))) (rc, stdout, stderr) = module.run_command([sshkeygen, '-F', host, '-f', outf.name], check_rc=True) try: outf.close() except: pass if (stdout == ''): module.fail_json(msg='Host parameter does not match hashed host field in supplied key')
[ "def", "sanity_check", "(", "module", ",", "host", ",", "key", ",", "sshkeygen", ")", ":", "if", "(", "key", "is", "None", ")", ":", "return", "try", ":", "outf", "=", "tempfile", ".", "NamedTemporaryFile", "(", ")", "outf", ".", "write", "(", "key", ")", "outf", ".", "flush", "(", ")", "except", "IOError", ":", "e", "=", "get_exception", "(", ")", "module", ".", "fail_json", "(", "msg", "=", "(", "'Failed to write to temporary file %s: %s'", "%", "(", "outf", ".", "name", ",", "str", "(", "e", ")", ")", ")", ")", "(", "rc", ",", "stdout", ",", "stderr", ")", "=", "module", ".", "run_command", "(", "[", "sshkeygen", ",", "'-F'", ",", "host", ",", "'-f'", ",", "outf", ".", "name", "]", ",", "check_rc", "=", "True", ")", "try", ":", "outf", ".", "close", "(", ")", "except", ":", "pass", "if", "(", "stdout", "==", "''", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "'Host parameter does not match hashed host field in supplied key'", ")" ]
check supplied key is sensible host and key are parameters provided by the user; if the host provided is inconsistent with the key supplied .
train
false
19,432
def get_figlabels(): figManagers = _pylab_helpers.Gcf.get_all_fig_managers() figManagers.sort(key=(lambda m: m.num)) return [m.canvas.figure.get_label() for m in figManagers]
[ "def", "get_figlabels", "(", ")", ":", "figManagers", "=", "_pylab_helpers", ".", "Gcf", ".", "get_all_fig_managers", "(", ")", "figManagers", ".", "sort", "(", "key", "=", "(", "lambda", "m", ":", "m", ".", "num", ")", ")", "return", "[", "m", ".", "canvas", ".", "figure", ".", "get_label", "(", ")", "for", "m", "in", "figManagers", "]" ]
return a list of existing figure labels .
train
false
19,434
def tiny_escape(val): return (u'' if (val is None) else escape(val))
[ "def", "tiny_escape", "(", "val", ")", ":", "return", "(", "u''", "if", "(", "val", "is", "None", ")", "else", "escape", "(", "val", ")", ")" ]
helper function used in building a test html table .
train
false
19,435
def update_employee(emp_id, key=None, value=None, items=None): if (items is None): if ((key is None) or (value is None)): return {'Error': 'At least one key/value pair is required'} items = {key: value} elif isinstance(items, six.string_types): items = yaml.safe_load(items) xml_items = '' for pair in items.keys(): xml_items += '<field id="{0}">{1}</field>'.format(pair, items[pair]) xml_items = '<employee>{0}</employee>'.format(xml_items) (status, result) = _query(action='employees', command=emp_id, data=xml_items, method='POST') return show_employee(emp_id, ','.join(items.keys()))
[ "def", "update_employee", "(", "emp_id", ",", "key", "=", "None", ",", "value", "=", "None", ",", "items", "=", "None", ")", ":", "if", "(", "items", "is", "None", ")", ":", "if", "(", "(", "key", "is", "None", ")", "or", "(", "value", "is", "None", ")", ")", ":", "return", "{", "'Error'", ":", "'At least one key/value pair is required'", "}", "items", "=", "{", "key", ":", "value", "}", "elif", "isinstance", "(", "items", ",", "six", ".", "string_types", ")", ":", "items", "=", "yaml", ".", "safe_load", "(", "items", ")", "xml_items", "=", "''", "for", "pair", "in", "items", ".", "keys", "(", ")", ":", "xml_items", "+=", "'<field id=\"{0}\">{1}</field>'", ".", "format", "(", "pair", ",", "items", "[", "pair", "]", ")", "xml_items", "=", "'<employee>{0}</employee>'", ".", "format", "(", "xml_items", ")", "(", "status", ",", "result", ")", "=", "_query", "(", "action", "=", "'employees'", ",", "command", "=", "emp_id", ",", "data", "=", "xml_items", ",", "method", "=", "'POST'", ")", "return", "show_employee", "(", "emp_id", ",", "','", ".", "join", "(", "items", ".", "keys", "(", ")", ")", ")" ]
update one or more items for this employee .
train
true
19,436
def binary(n, count=16, reverse=False): bits = [str(((n >> y) & 1)) for y in range((count - 1), (-1), (-1))] if reverse: bits.reverse() return ''.join(bits)
[ "def", "binary", "(", "n", ",", "count", "=", "16", ",", "reverse", "=", "False", ")", ":", "bits", "=", "[", "str", "(", "(", "(", "n", ">>", "y", ")", "&", "1", ")", ")", "for", "y", "in", "range", "(", "(", "count", "-", "1", ")", ",", "(", "-", "1", ")", ",", "(", "-", "1", ")", ")", "]", "if", "reverse", ":", "bits", ".", "reverse", "(", ")", "return", "''", ".", "join", "(", "bits", ")" ]
show binary digits of a number .
train
false
19,437
def _pad_for_signing(message, target_length): max_msglength = (target_length - 11) msglength = len(message) if (msglength > max_msglength): raise OverflowError(('%i bytes needed for message, but there is only space for %i' % (msglength, max_msglength))) padding_length = ((target_length - msglength) - 3) return b('').join([b('\x00\x01'), (padding_length * b('\xff')), b('\x00'), message])
[ "def", "_pad_for_signing", "(", "message", ",", "target_length", ")", ":", "max_msglength", "=", "(", "target_length", "-", "11", ")", "msglength", "=", "len", "(", "message", ")", "if", "(", "msglength", ">", "max_msglength", ")", ":", "raise", "OverflowError", "(", "(", "'%i bytes needed for message, but there is only space for %i'", "%", "(", "msglength", ",", "max_msglength", ")", ")", ")", "padding_length", "=", "(", "(", "target_length", "-", "msglength", ")", "-", "3", ")", "return", "b", "(", "''", ")", ".", "join", "(", "[", "b", "(", "'\\x00\\x01'", ")", ",", "(", "padding_length", "*", "b", "(", "'\\xff'", ")", ")", ",", "b", "(", "'\\x00'", ")", ",", "message", "]", ")" ]
pads the message for signing .
train
false
19,438
def unplug_vbd(session, vbd_ref): max_attempts = (CONF.xenapi_num_vbd_unplug_retries + 1) for num_attempt in xrange(1, (max_attempts + 1)): try: session.call_xenapi('VBD.unplug', vbd_ref) return except session.XenAPI.Failure as exc: err = ((len(exc.details) > 0) and exc.details[0]) if (err == 'DEVICE_ALREADY_DETACHED'): LOG.info(_('VBD %s already detached'), vbd_ref) return elif (err == 'DEVICE_DETACH_REJECTED'): LOG.info(_('VBD %(vbd_ref)s detach rejected, attempt %(num_attempt)d/%(max_attempts)d'), locals()) else: LOG.exception(exc) raise volume_utils.StorageError((_('Unable to unplug VBD %s') % vbd_ref)) greenthread.sleep(1) raise volume_utils.StorageError((_('Reached maximum number of retries trying to unplug VBD %s') % vbd_ref))
[ "def", "unplug_vbd", "(", "session", ",", "vbd_ref", ")", ":", "max_attempts", "=", "(", "CONF", ".", "xenapi_num_vbd_unplug_retries", "+", "1", ")", "for", "num_attempt", "in", "xrange", "(", "1", ",", "(", "max_attempts", "+", "1", ")", ")", ":", "try", ":", "session", ".", "call_xenapi", "(", "'VBD.unplug'", ",", "vbd_ref", ")", "return", "except", "session", ".", "XenAPI", ".", "Failure", "as", "exc", ":", "err", "=", "(", "(", "len", "(", "exc", ".", "details", ")", ">", "0", ")", "and", "exc", ".", "details", "[", "0", "]", ")", "if", "(", "err", "==", "'DEVICE_ALREADY_DETACHED'", ")", ":", "LOG", ".", "info", "(", "_", "(", "'VBD %s already detached'", ")", ",", "vbd_ref", ")", "return", "elif", "(", "err", "==", "'DEVICE_DETACH_REJECTED'", ")", ":", "LOG", ".", "info", "(", "_", "(", "'VBD %(vbd_ref)s detach rejected, attempt %(num_attempt)d/%(max_attempts)d'", ")", ",", "locals", "(", ")", ")", "else", ":", "LOG", ".", "exception", "(", "exc", ")", "raise", "volume_utils", ".", "StorageError", "(", "(", "_", "(", "'Unable to unplug VBD %s'", ")", "%", "vbd_ref", ")", ")", "greenthread", ".", "sleep", "(", "1", ")", "raise", "volume_utils", ".", "StorageError", "(", "(", "_", "(", "'Reached maximum number of retries trying to unplug VBD %s'", ")", "%", "vbd_ref", ")", ")" ]
unplug vbd from vm .
train
false
19,442
def conv2d(input, filters, image_shape=None, filter_shape=None, border_mode='valid', subsample=(1, 1), **kargs): if (image_shape is not None): image_shape = list(image_shape) for i in xrange(len(image_shape)): if (image_shape[i] is not None): try: image_shape[i] = get_scalar_constant_value(as_tensor_variable(image_shape[i])) except NotScalarConstantError: raise NotScalarConstantError(('The convolution need that the shape information are constant values. We got %s for the image_shape parameter' % image_shape[i])) assert (image_shape[i].dtype in theano.tensor.discrete_dtypes) image_shape[i] = int(image_shape[i]) if (filter_shape is not None): filter_shape = list(filter_shape) for i in xrange(len(filter_shape)): if (filter_shape[i] is not None): try: filter_shape[i] = get_scalar_constant_value(as_tensor_variable(filter_shape[i])) except NotScalarConstantError: raise NotScalarConstantError(('The convolution need that the shape information are constant values. We got %s for the filter_shape parameter' % filter_shape[i])) assert (filter_shape[i].dtype in theano.tensor.discrete_dtypes) filter_shape[i] = int(filter_shape[i]) if (image_shape and filter_shape): try: if ((image_shape[1] is not None) and (filter_shape[1] is not None)): assert (image_shape[1] == filter_shape[1]) except Exception: print('image ', image_shape, ' filters ', filter_shape) raise if (filter_shape is not None): nkern = filter_shape[0] kshp = filter_shape[2:] else: (nkern, kshp) = (None, None) if (image_shape is not None): bsize = image_shape[0] imshp = image_shape[1:] else: (bsize, imshp) = (None, None) op = ConvOp(output_mode=border_mode, dx=subsample[0], dy=subsample[1], imshp=imshp, kshp=kshp, nkern=nkern, bsize=bsize, **kargs) return op(input, filters)
[ "def", "conv2d", "(", "input", ",", "filters", ",", "image_shape", "=", "None", ",", "filter_shape", "=", "None", ",", "border_mode", "=", "'valid'", ",", "subsample", "=", "(", "1", ",", "1", ")", ",", "**", "kargs", ")", ":", "if", "(", "image_shape", "is", "not", "None", ")", ":", "image_shape", "=", "list", "(", "image_shape", ")", "for", "i", "in", "xrange", "(", "len", "(", "image_shape", ")", ")", ":", "if", "(", "image_shape", "[", "i", "]", "is", "not", "None", ")", ":", "try", ":", "image_shape", "[", "i", "]", "=", "get_scalar_constant_value", "(", "as_tensor_variable", "(", "image_shape", "[", "i", "]", ")", ")", "except", "NotScalarConstantError", ":", "raise", "NotScalarConstantError", "(", "(", "'The convolution need that the shape information are constant values. We got %s for the image_shape parameter'", "%", "image_shape", "[", "i", "]", ")", ")", "assert", "(", "image_shape", "[", "i", "]", ".", "dtype", "in", "theano", ".", "tensor", ".", "discrete_dtypes", ")", "image_shape", "[", "i", "]", "=", "int", "(", "image_shape", "[", "i", "]", ")", "if", "(", "filter_shape", "is", "not", "None", ")", ":", "filter_shape", "=", "list", "(", "filter_shape", ")", "for", "i", "in", "xrange", "(", "len", "(", "filter_shape", ")", ")", ":", "if", "(", "filter_shape", "[", "i", "]", "is", "not", "None", ")", ":", "try", ":", "filter_shape", "[", "i", "]", "=", "get_scalar_constant_value", "(", "as_tensor_variable", "(", "filter_shape", "[", "i", "]", ")", ")", "except", "NotScalarConstantError", ":", "raise", "NotScalarConstantError", "(", "(", "'The convolution need that the shape information are constant values. We got %s for the filter_shape parameter'", "%", "filter_shape", "[", "i", "]", ")", ")", "assert", "(", "filter_shape", "[", "i", "]", ".", "dtype", "in", "theano", ".", "tensor", ".", "discrete_dtypes", ")", "filter_shape", "[", "i", "]", "=", "int", "(", "filter_shape", "[", "i", "]", ")", "if", "(", "image_shape", "and", "filter_shape", ")", ":", "try", ":", "if", "(", "(", "image_shape", "[", "1", "]", "is", "not", "None", ")", "and", "(", "filter_shape", "[", "1", "]", "is", "not", "None", ")", ")", ":", "assert", "(", "image_shape", "[", "1", "]", "==", "filter_shape", "[", "1", "]", ")", "except", "Exception", ":", "print", "(", "'image '", ",", "image_shape", ",", "' filters '", ",", "filter_shape", ")", "raise", "if", "(", "filter_shape", "is", "not", "None", ")", ":", "nkern", "=", "filter_shape", "[", "0", "]", "kshp", "=", "filter_shape", "[", "2", ":", "]", "else", ":", "(", "nkern", ",", "kshp", ")", "=", "(", "None", ",", "None", ")", "if", "(", "image_shape", "is", "not", "None", ")", ":", "bsize", "=", "image_shape", "[", "0", "]", "imshp", "=", "image_shape", "[", "1", ":", "]", "else", ":", "(", "bsize", ",", "imshp", ")", "=", "(", "None", ",", "None", ")", "op", "=", "ConvOp", "(", "output_mode", "=", "border_mode", ",", "dx", "=", "subsample", "[", "0", "]", ",", "dy", "=", "subsample", "[", "1", "]", ",", "imshp", "=", "imshp", ",", "kshp", "=", "kshp", ",", "nkern", "=", "nkern", ",", "bsize", "=", "bsize", ",", "**", "kargs", ")", "return", "op", "(", "input", ",", "filters", ")" ]
this function will build the symbolic graph for convolving a mini-batch of a stack of 2d inputs with a set of 2d filters .
train
false
19,443
def get_default_instance_type(): name = CONF.default_instance_type return get_instance_type_by_name(name)
[ "def", "get_default_instance_type", "(", ")", ":", "name", "=", "CONF", ".", "default_instance_type", "return", "get_instance_type_by_name", "(", "name", ")" ]
get the default instance type .
train
false
19,444
def first(func, items): for item in items: if func(item): return item
[ "def", "first", "(", "func", ",", "items", ")", ":", "for", "item", "in", "items", ":", "if", "func", "(", "item", ")", ":", "return", "item" ]
returns the first item in a list .
train
false
19,446
def layer_from_viewer_config(model, layer, source, ordering): layer_cfg = dict(layer) for k in ['format', 'name', 'opacity', 'styles', 'transparent', 'fixed', 'group', 'visibility', 'source', 'getFeatureInfo']: if (k in layer_cfg): del layer_cfg[k] source_cfg = dict(source) for k in ['url', 'projection']: if (k in source_cfg): del source_cfg[k] return model(stack_order=ordering, format=layer.get('format', None), name=layer.get('name', None), opacity=layer.get('opacity', 1), styles=layer.get('styles', None), transparent=layer.get('transparent', False), fixed=layer.get('fixed', False), group=layer.get('group', None), visibility=layer.get('visibility', True), ows_url=source.get('url', None), layer_params=json.dumps(layer_cfg), source_params=json.dumps(source_cfg))
[ "def", "layer_from_viewer_config", "(", "model", ",", "layer", ",", "source", ",", "ordering", ")", ":", "layer_cfg", "=", "dict", "(", "layer", ")", "for", "k", "in", "[", "'format'", ",", "'name'", ",", "'opacity'", ",", "'styles'", ",", "'transparent'", ",", "'fixed'", ",", "'group'", ",", "'visibility'", ",", "'source'", ",", "'getFeatureInfo'", "]", ":", "if", "(", "k", "in", "layer_cfg", ")", ":", "del", "layer_cfg", "[", "k", "]", "source_cfg", "=", "dict", "(", "source", ")", "for", "k", "in", "[", "'url'", ",", "'projection'", "]", ":", "if", "(", "k", "in", "source_cfg", ")", ":", "del", "source_cfg", "[", "k", "]", "return", "model", "(", "stack_order", "=", "ordering", ",", "format", "=", "layer", ".", "get", "(", "'format'", ",", "None", ")", ",", "name", "=", "layer", ".", "get", "(", "'name'", ",", "None", ")", ",", "opacity", "=", "layer", ".", "get", "(", "'opacity'", ",", "1", ")", ",", "styles", "=", "layer", ".", "get", "(", "'styles'", ",", "None", ")", ",", "transparent", "=", "layer", ".", "get", "(", "'transparent'", ",", "False", ")", ",", "fixed", "=", "layer", ".", "get", "(", "'fixed'", ",", "False", ")", ",", "group", "=", "layer", ".", "get", "(", "'group'", ",", "None", ")", ",", "visibility", "=", "layer", ".", "get", "(", "'visibility'", ",", "True", ")", ",", "ows_url", "=", "source", ".", "get", "(", "'url'", ",", "None", ")", ",", "layer_params", "=", "json", ".", "dumps", "(", "layer_cfg", ")", ",", "source_params", "=", "json", ".", "dumps", "(", "source_cfg", ")", ")" ]
parse an object out of a parsed layer configuration from a gxp viewer .
train
false
19,447
def ExecuteCode(code, global_dict): exec code in global_dict
[ "def", "ExecuteCode", "(", "code", ",", "global_dict", ")", ":", "exec", "code", "in", "global_dict" ]
executes some code in a given global environment .
train
false
19,449
def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner, columns_dict, user): resultant_match = True if (not row): return resultant_match for (doctype, filter_list) in doctype_match_filters.items(): matched_for_doctype = False if ((doctype == ref_doctype) and if_owner): idx = linked_doctypes.get(u'User') if ((idx is not None) and (row[idx] == user) and (columns_dict[idx] == columns_dict.get(u'owner'))): matched_for_doctype = True if (not matched_for_doctype): for match_filters in filter_list: match = True for (dt, idx) in linked_doctypes.items(): if ((dt == u'User') and (columns_dict[idx] == columns_dict.get(u'owner'))): continue if ((dt in match_filters) and (row[idx] not in match_filters[dt])): match = False break matched_for_doctype = (matched_for_doctype or match) if matched_for_doctype: break resultant_match = (resultant_match and matched_for_doctype) if (not resultant_match): break return resultant_match
[ "def", "has_match", "(", "row", ",", "linked_doctypes", ",", "doctype_match_filters", ",", "ref_doctype", ",", "if_owner", ",", "columns_dict", ",", "user", ")", ":", "resultant_match", "=", "True", "if", "(", "not", "row", ")", ":", "return", "resultant_match", "for", "(", "doctype", ",", "filter_list", ")", "in", "doctype_match_filters", ".", "items", "(", ")", ":", "matched_for_doctype", "=", "False", "if", "(", "(", "doctype", "==", "ref_doctype", ")", "and", "if_owner", ")", ":", "idx", "=", "linked_doctypes", ".", "get", "(", "u'User'", ")", "if", "(", "(", "idx", "is", "not", "None", ")", "and", "(", "row", "[", "idx", "]", "==", "user", ")", "and", "(", "columns_dict", "[", "idx", "]", "==", "columns_dict", ".", "get", "(", "u'owner'", ")", ")", ")", ":", "matched_for_doctype", "=", "True", "if", "(", "not", "matched_for_doctype", ")", ":", "for", "match_filters", "in", "filter_list", ":", "match", "=", "True", "for", "(", "dt", ",", "idx", ")", "in", "linked_doctypes", ".", "items", "(", ")", ":", "if", "(", "(", "dt", "==", "u'User'", ")", "and", "(", "columns_dict", "[", "idx", "]", "==", "columns_dict", ".", "get", "(", "u'owner'", ")", ")", ")", ":", "continue", "if", "(", "(", "dt", "in", "match_filters", ")", "and", "(", "row", "[", "idx", "]", "not", "in", "match_filters", "[", "dt", "]", ")", ")", ":", "match", "=", "False", "break", "matched_for_doctype", "=", "(", "matched_for_doctype", "or", "match", ")", "if", "matched_for_doctype", ":", "break", "resultant_match", "=", "(", "resultant_match", "and", "matched_for_doctype", ")", "if", "(", "not", "resultant_match", ")", ":", "break", "return", "resultant_match" ]
returns true if after evaluating permissions for each linked doctype - there is an owner match for the ref_doctype - and there is a user permission match for all linked doctypes returns true if the row is empty note: each doctype could have multiple conflicting user permission doctypes .
train
false
19,450
def _set_term_title_xterm(title): sys.stdout.write(('\x1b]0;%s\x07' % title))
[ "def", "_set_term_title_xterm", "(", "title", ")", ":", "sys", ".", "stdout", ".", "write", "(", "(", "'\\x1b]0;%s\\x07'", "%", "title", ")", ")" ]
change virtual terminal title in xterm-workalikes .
train
false
19,451
def osquery_flags(attrs=None, where=None): return _osquery_cmd(table='osquery_flags', attrs=attrs, where=where)
[ "def", "osquery_flags", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'osquery_flags'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return osquery_flags information from osquery cli example: .
train
false
19,452
def format_navigation_links(additional_languages, default_lang, messages, strip_indexes=False): f = u' {0}: (\n ("{1}/archive.html", "{2[Archive]}"),\n ("{1}/categories/{3}", "{2[Tags]}"),\n ("{1}/rss.xml", "{2[RSS feed]}"),\n ),' pairs = [] def get_msg(lang): u'Generate a smaller messages dict with fallback.' fmsg = {} for i in (u'Archive', u'Tags', u'RSS feed'): if messages[lang][i]: fmsg[i] = messages[lang][i] else: fmsg[i] = i return fmsg if strip_indexes: index_html = u'' else: index_html = u'index.html' pairs.append(f.format(u'DEFAULT_LANG', u'', get_msg(default_lang), index_html)) for l in additional_languages: pairs.append(f.format(json.dumps(l, ensure_ascii=False), (u'/' + l), get_msg(l), index_html)) return u'{{\n{0}\n}}'.format(u'\n\n'.join(pairs))
[ "def", "format_navigation_links", "(", "additional_languages", ",", "default_lang", ",", "messages", ",", "strip_indexes", "=", "False", ")", ":", "f", "=", "u' {0}: (\\n (\"{1}/archive.html\", \"{2[Archive]}\"),\\n (\"{1}/categories/{3}\", \"{2[Tags]}\"),\\n (\"{1}/rss.xml\", \"{2[RSS feed]}\"),\\n ),'", "pairs", "=", "[", "]", "def", "get_msg", "(", "lang", ")", ":", "fmsg", "=", "{", "}", "for", "i", "in", "(", "u'Archive'", ",", "u'Tags'", ",", "u'RSS feed'", ")", ":", "if", "messages", "[", "lang", "]", "[", "i", "]", ":", "fmsg", "[", "i", "]", "=", "messages", "[", "lang", "]", "[", "i", "]", "else", ":", "fmsg", "[", "i", "]", "=", "i", "return", "fmsg", "if", "strip_indexes", ":", "index_html", "=", "u''", "else", ":", "index_html", "=", "u'index.html'", "pairs", ".", "append", "(", "f", ".", "format", "(", "u'DEFAULT_LANG'", ",", "u''", ",", "get_msg", "(", "default_lang", ")", ",", "index_html", ")", ")", "for", "l", "in", "additional_languages", ":", "pairs", ".", "append", "(", "f", ".", "format", "(", "json", ".", "dumps", "(", "l", ",", "ensure_ascii", "=", "False", ")", ",", "(", "u'/'", "+", "l", ")", ",", "get_msg", "(", "l", ")", ",", "index_html", ")", ")", "return", "u'{{\\n{0}\\n}}'", ".", "format", "(", "u'\\n\\n'", ".", "join", "(", "pairs", ")", ")" ]
return the string to configure navigation_links .
train
false
19,454
def merge_message_path(): for basename in (u'MERGE_MSG', u'SQUASH_MSG'): path = git.git_path(basename) if core.exists(path): return path return None
[ "def", "merge_message_path", "(", ")", ":", "for", "basename", "in", "(", "u'MERGE_MSG'", ",", "u'SQUASH_MSG'", ")", ":", "path", "=", "git", ".", "git_path", "(", "basename", ")", "if", "core", ".", "exists", "(", "path", ")", ":", "return", "path", "return", "None" ]
return the path to .
train
false
19,455
def org_facility_geojson(user_id=None): if user_id: auth.s3_impersonate(user_id) s3db.org_facility_geojson()
[ "def", "org_facility_geojson", "(", "user_id", "=", "None", ")", ":", "if", "user_id", ":", "auth", ".", "s3_impersonate", "(", "user_id", ")", "s3db", ".", "org_facility_geojson", "(", ")" ]
export geojson[p] of facility data .
train
false
19,457
@pytest.mark.parametrize('cookiejar', (compat.cookielib.CookieJar(), RequestsCookieJar())) def test_add_dict_to_cookiejar(cookiejar): cookiedict = {'test': 'cookies', 'good': 'cookies'} cj = add_dict_to_cookiejar(cookiejar, cookiedict) cookies = dict(((cookie.name, cookie.value) for cookie in cj)) assert (cookiedict == cookies)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'cookiejar'", ",", "(", "compat", ".", "cookielib", ".", "CookieJar", "(", ")", ",", "RequestsCookieJar", "(", ")", ")", ")", "def", "test_add_dict_to_cookiejar", "(", "cookiejar", ")", ":", "cookiedict", "=", "{", "'test'", ":", "'cookies'", ",", "'good'", ":", "'cookies'", "}", "cj", "=", "add_dict_to_cookiejar", "(", "cookiejar", ",", "cookiedict", ")", "cookies", "=", "dict", "(", "(", "(", "cookie", ".", "name", ",", "cookie", ".", "value", ")", "for", "cookie", "in", "cj", ")", ")", "assert", "(", "cookiedict", "==", "cookies", ")" ]
ensure add_dict_to_cookiejar works for non-requestscookiejar cookiejars .
train
false
19,459
def lane_stats(estate=None): return get_stats(estate=estate, stack='lane')
[ "def", "lane_stats", "(", "estate", "=", "None", ")", ":", "return", "get_stats", "(", "estate", "=", "estate", ",", "stack", "=", "'lane'", ")" ]
print the estate manor lane stack stats estate : none the name of the target estate .
train
false
19,460
def increment(key, delta=1, host=DEFAULT_HOST, port=DEFAULT_PORT): conn = _connect(host, port) _check_stats(conn) cur = get(key) if (cur is None): raise CommandExecutionError("Key '{0}' does not exist".format(key)) elif (not isinstance(cur, integer_types)): raise CommandExecutionError("Value for key '{0}' must be an integer to be incremented".format(key)) try: return conn.incr(key, delta) except ValueError: raise SaltInvocationError('Delta value must be an integer')
[ "def", "increment", "(", "key", ",", "delta", "=", "1", ",", "host", "=", "DEFAULT_HOST", ",", "port", "=", "DEFAULT_PORT", ")", ":", "conn", "=", "_connect", "(", "host", ",", "port", ")", "_check_stats", "(", "conn", ")", "cur", "=", "get", "(", "key", ")", "if", "(", "cur", "is", "None", ")", ":", "raise", "CommandExecutionError", "(", "\"Key '{0}' does not exist\"", ".", "format", "(", "key", ")", ")", "elif", "(", "not", "isinstance", "(", "cur", ",", "integer_types", ")", ")", ":", "raise", "CommandExecutionError", "(", "\"Value for key '{0}' must be an integer to be incremented\"", ".", "format", "(", "key", ")", ")", "try", ":", "return", "conn", ".", "incr", "(", "key", ",", "delta", ")", "except", "ValueError", ":", "raise", "SaltInvocationError", "(", "'Delta value must be an integer'", ")" ]
increment the value of a key cli example: .
train
true
19,461
def _parse_date_greek(dateString): m = _greek_date_format_re.match(dateString) if (not m): return wday = _greek_wdays[m.group(1)] month = _greek_months[m.group(3)] rfc822date = ('%(wday)s, %(day)s %(month)s %(year)s %(hour)s:%(minute)s:%(second)s %(zonediff)s' % {'wday': wday, 'day': m.group(2), 'month': month, 'year': m.group(4), 'hour': m.group(5), 'minute': m.group(6), 'second': m.group(7), 'zonediff': m.group(8)}) return _parse_date_rfc822(rfc822date)
[ "def", "_parse_date_greek", "(", "dateString", ")", ":", "m", "=", "_greek_date_format_re", ".", "match", "(", "dateString", ")", "if", "(", "not", "m", ")", ":", "return", "wday", "=", "_greek_wdays", "[", "m", ".", "group", "(", "1", ")", "]", "month", "=", "_greek_months", "[", "m", ".", "group", "(", "3", ")", "]", "rfc822date", "=", "(", "'%(wday)s, %(day)s %(month)s %(year)s %(hour)s:%(minute)s:%(second)s %(zonediff)s'", "%", "{", "'wday'", ":", "wday", ",", "'day'", ":", "m", ".", "group", "(", "2", ")", ",", "'month'", ":", "month", ",", "'year'", ":", "m", ".", "group", "(", "4", ")", ",", "'hour'", ":", "m", ".", "group", "(", "5", ")", ",", "'minute'", ":", "m", ".", "group", "(", "6", ")", ",", "'second'", ":", "m", ".", "group", "(", "7", ")", ",", "'zonediff'", ":", "m", ".", "group", "(", "8", ")", "}", ")", "return", "_parse_date_rfc822", "(", "rfc822date", ")" ]
parse a string according to a greek 8-bit date format .
train
true
19,462
@require_admin_context def snapshot_get_all(context, filters=None, marker=None, limit=None, sort_keys=None, sort_dirs=None, offset=None): if (filters and (not is_valid_model_filters(models.Snapshot, filters, exclude_list=('host', 'cluster_name')))): return [] session = get_session() with session.begin(): query = _generate_paginate_query(context, session, marker, limit, sort_keys, sort_dirs, filters, offset, models.Snapshot) if (not query): return [] return query.all()
[ "@", "require_admin_context", "def", "snapshot_get_all", "(", "context", ",", "filters", "=", "None", ",", "marker", "=", "None", ",", "limit", "=", "None", ",", "sort_keys", "=", "None", ",", "sort_dirs", "=", "None", ",", "offset", "=", "None", ")", ":", "if", "(", "filters", "and", "(", "not", "is_valid_model_filters", "(", "models", ".", "Snapshot", ",", "filters", ",", "exclude_list", "=", "(", "'host'", ",", "'cluster_name'", ")", ")", ")", ")", ":", "return", "[", "]", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "query", "=", "_generate_paginate_query", "(", "context", ",", "session", ",", "marker", ",", "limit", ",", "sort_keys", ",", "sort_dirs", ",", "filters", ",", "offset", ",", "models", ".", "Snapshot", ")", "if", "(", "not", "query", ")", ":", "return", "[", "]", "return", "query", ".", "all", "(", ")" ]
get all snapshots .
train
false
19,464
def losetup_detach(device_file): check_output(['losetup', '--detach', device_file.path])
[ "def", "losetup_detach", "(", "device_file", ")", ":", "check_output", "(", "[", "'losetup'", ",", "'--detach'", ",", "device_file", ".", "path", "]", ")" ]
detach the supplied loopback device_file .
train
false
19,465
def discover_executable(name, sitepath): paths = ([str(sitepath)] + os.environ['PATH'].split(os.pathsep)) for path in paths: full_name = os.path.join(path, name) if os.path.exists(full_name): return full_name return None
[ "def", "discover_executable", "(", "name", ",", "sitepath", ")", ":", "paths", "=", "(", "[", "str", "(", "sitepath", ")", "]", "+", "os", ".", "environ", "[", "'PATH'", "]", ".", "split", "(", "os", ".", "pathsep", ")", ")", "for", "path", "in", "paths", ":", "full_name", "=", "os", ".", "path", ".", "join", "(", "path", ",", "name", ")", "if", "os", ".", "path", ".", "exists", "(", "full_name", ")", ":", "return", "full_name", "return", "None" ]
finds an executable in the given sitepath or in the path list provided by the path environment variable .
train
false
19,467
def get_instance_health(name, region=None, key=None, keyid=None, profile=None, instances=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: instance_states = conn.describe_instance_health(name, instances) ret = [] for _instance in instance_states: ret.append({'instance_id': _instance.instance_id, 'description': _instance.description, 'state': _instance.state, 'reason_code': _instance.reason_code}) return ret except boto.exception.BotoServerError as error: log.debug(error) return []
[ "def", "get_instance_health", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ",", "instances", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "instance_states", "=", "conn", ".", "describe_instance_health", "(", "name", ",", "instances", ")", "ret", "=", "[", "]", "for", "_instance", "in", "instance_states", ":", "ret", ".", "append", "(", "{", "'instance_id'", ":", "_instance", ".", "instance_id", ",", "'description'", ":", "_instance", ".", "description", ",", "'state'", ":", "_instance", ".", "state", ",", "'reason_code'", ":", "_instance", ".", "reason_code", "}", ")", "return", "ret", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "error", ":", "log", ".", "debug", "(", "error", ")", "return", "[", "]" ]
get a list of instances and their health state cli example: .
train
true
19,468
def auth_required(func): def wrapper(self, *args, **kwargs): if (not self.is_authenticated()): log.debug(u'None API token. Authenticating with "%s" account...', self.credentials.get(u'username')) self.auth() assert self.is_authenticated() return func(self, *args, **kwargs) return wrapper
[ "def", "auth_required", "(", "func", ")", ":", "def", "wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "self", ".", "is_authenticated", "(", ")", ")", ":", "log", ".", "debug", "(", "u'None API token. Authenticating with \"%s\" account...'", ",", "self", ".", "credentials", ".", "get", "(", "u'username'", ")", ")", "self", ".", "auth", "(", ")", "assert", "self", ".", "is_authenticated", "(", ")", "return", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
decorator that protects enpoints through multiple mechanisms example:: @app .
train
false
19,469
def OptionName(widget): return widget.tk.call('tixOptionName', widget._w)
[ "def", "OptionName", "(", "widget", ")", ":", "return", "widget", ".", "tk", ".", "call", "(", "'tixOptionName'", ",", "widget", ".", "_w", ")" ]
returns the qualified path name for the widget .
train
false
19,470
def send_command(remote_conn, cmd='', delay=1): if (cmd != ''): cmd = cmd.strip() remote_conn.send((cmd + '\n')) time.sleep(delay) if remote_conn.recv_ready(): return remote_conn.recv(MAX_BUFFER) else: return ''
[ "def", "send_command", "(", "remote_conn", ",", "cmd", "=", "''", ",", "delay", "=", "1", ")", ":", "if", "(", "cmd", "!=", "''", ")", ":", "cmd", "=", "cmd", ".", "strip", "(", ")", "remote_conn", ".", "send", "(", "(", "cmd", "+", "'\\n'", ")", ")", "time", ".", "sleep", "(", "delay", ")", "if", "remote_conn", ".", "recv_ready", "(", ")", ":", "return", "remote_conn", ".", "recv", "(", "MAX_BUFFER", ")", "else", ":", "return", "''" ]
send a command down the telnet channel return the response .
train
false
19,471
def _pipepager(text, cmd, color): import subprocess env = dict(os.environ) cmd_detail = cmd.rsplit('/', 1)[(-1)].split() if ((color is None) and (cmd_detail[0] == 'less')): less_flags = (os.environ.get('LESS', '') + ' '.join(cmd_detail[1:])) if (not less_flags): env['LESS'] = '-R' color = True elif (('r' in less_flags) or ('R' in less_flags)): color = True if (not color): text = strip_ansi(text) c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) encoding = get_best_encoding(c.stdin) try: c.stdin.write(text.encode(encoding, 'replace')) c.stdin.close() except (IOError, KeyboardInterrupt): pass while True: try: c.wait() except KeyboardInterrupt: pass else: break
[ "def", "_pipepager", "(", "text", ",", "cmd", ",", "color", ")", ":", "import", "subprocess", "env", "=", "dict", "(", "os", ".", "environ", ")", "cmd_detail", "=", "cmd", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "(", "-", "1", ")", "]", ".", "split", "(", ")", "if", "(", "(", "color", "is", "None", ")", "and", "(", "cmd_detail", "[", "0", "]", "==", "'less'", ")", ")", ":", "less_flags", "=", "(", "os", ".", "environ", ".", "get", "(", "'LESS'", ",", "''", ")", "+", "' '", ".", "join", "(", "cmd_detail", "[", "1", ":", "]", ")", ")", "if", "(", "not", "less_flags", ")", ":", "env", "[", "'LESS'", "]", "=", "'-R'", "color", "=", "True", "elif", "(", "(", "'r'", "in", "less_flags", ")", "or", "(", "'R'", "in", "less_flags", ")", ")", ":", "color", "=", "True", "if", "(", "not", "color", ")", ":", "text", "=", "strip_ansi", "(", "text", ")", "c", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "env", "=", "env", ")", "encoding", "=", "get_best_encoding", "(", "c", ".", "stdin", ")", "try", ":", "c", ".", "stdin", ".", "write", "(", "text", ".", "encode", "(", "encoding", ",", "'replace'", ")", ")", "c", ".", "stdin", ".", "close", "(", ")", "except", "(", "IOError", ",", "KeyboardInterrupt", ")", ":", "pass", "while", "True", ":", "try", ":", "c", ".", "wait", "(", ")", "except", "KeyboardInterrupt", ":", "pass", "else", ":", "break" ]
page through text by feeding it to another program .
train
true
19,472
def ypbpr2rgb(ypbpr): return _convert(rgb_from_ypbpr, ypbpr)
[ "def", "ypbpr2rgb", "(", "ypbpr", ")", ":", "return", "_convert", "(", "rgb_from_ypbpr", ",", "ypbpr", ")" ]
ypbpr to rgb color space conversion .
train
false
19,474
def input_string_or_list(options): if (options == '<<inherit>>'): return '<<inherit>>' if ((options is None) or (options == '') or (options == 'delete')): return [] elif isinstance(options, list): return options elif isinstance(options, basestring): tokens = shlex.split(options) return tokens else: raise CX(_('invalid input type'))
[ "def", "input_string_or_list", "(", "options", ")", ":", "if", "(", "options", "==", "'<<inherit>>'", ")", ":", "return", "'<<inherit>>'", "if", "(", "(", "options", "is", "None", ")", "or", "(", "options", "==", "''", ")", "or", "(", "options", "==", "'delete'", ")", ")", ":", "return", "[", "]", "elif", "isinstance", "(", "options", ",", "list", ")", ":", "return", "options", "elif", "isinstance", "(", "options", ",", "basestring", ")", ":", "tokens", "=", "shlex", ".", "split", "(", "options", ")", "return", "tokens", "else", ":", "raise", "CX", "(", "_", "(", "'invalid input type'", ")", ")" ]
accepts a delimited list of stuff or a list .
train
false
19,475
def _shorten_line_at_tokens_new(tokens, source, indentation, max_line_length): (yield (indentation + source)) parsed_tokens = _parse_tokens(tokens) if parsed_tokens: fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, start_on_prefix_line=True) if (fixed and check_syntax(normalize_multiline(fixed.lstrip()))): (yield fixed) fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, start_on_prefix_line=False) if (fixed and check_syntax(normalize_multiline(fixed.lstrip()))): (yield fixed)
[ "def", "_shorten_line_at_tokens_new", "(", "tokens", ",", "source", ",", "indentation", ",", "max_line_length", ")", ":", "(", "yield", "(", "indentation", "+", "source", ")", ")", "parsed_tokens", "=", "_parse_tokens", "(", "tokens", ")", "if", "parsed_tokens", ":", "fixed", "=", "_reflow_lines", "(", "parsed_tokens", ",", "indentation", ",", "max_line_length", ",", "start_on_prefix_line", "=", "True", ")", "if", "(", "fixed", "and", "check_syntax", "(", "normalize_multiline", "(", "fixed", ".", "lstrip", "(", ")", ")", ")", ")", ":", "(", "yield", "fixed", ")", "fixed", "=", "_reflow_lines", "(", "parsed_tokens", ",", "indentation", ",", "max_line_length", ",", "start_on_prefix_line", "=", "False", ")", "if", "(", "fixed", "and", "check_syntax", "(", "normalize_multiline", "(", "fixed", ".", "lstrip", "(", ")", ")", ")", ")", ":", "(", "yield", "fixed", ")" ]
shorten the line taking its length into account .
train
true
19,476
def test_cons_slicing(): cons = HyCons('car', 'cdr') assert (cons[0] == 'car') assert (cons[1:] == 'cdr') try: cons[:] assert (True is False) except IndexError: pass try: cons[1] assert (True is False) except IndexError: pass
[ "def", "test_cons_slicing", "(", ")", ":", "cons", "=", "HyCons", "(", "'car'", ",", "'cdr'", ")", "assert", "(", "cons", "[", "0", "]", "==", "'car'", ")", "assert", "(", "cons", "[", "1", ":", "]", "==", "'cdr'", ")", "try", ":", "cons", "[", ":", "]", "assert", "(", "True", "is", "False", ")", "except", "IndexError", ":", "pass", "try", ":", "cons", "[", "1", "]", "assert", "(", "True", "is", "False", ")", "except", "IndexError", ":", "pass" ]
check that cons slicing works as expected .
train
false
19,477
def get_vn_id(kwargs=None, call=None): if (call == 'action'): raise SaltCloudSystemExit('The get_vn_id function must be called with -f or --function.') if (kwargs is None): kwargs = {} name = kwargs.get('name', None) if (name is None): raise SaltCloudSystemExit('The get_vn_id function requires a name.') try: ret = list_vns()[name]['id'] except KeyError: raise SaltCloudSystemExit("The VN '{0}' could not be found.".format(name)) return ret
[ "def", "get_vn_id", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The get_vn_id function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "name", "=", "kwargs", ".", "get", "(", "'name'", ",", "None", ")", "if", "(", "name", "is", "None", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The get_vn_id function requires a name.'", ")", "try", ":", "ret", "=", "list_vns", "(", ")", "[", "name", "]", "[", "'id'", "]", "except", "KeyError", ":", "raise", "SaltCloudSystemExit", "(", "\"The VN '{0}' could not be found.\"", ".", "format", "(", "name", ")", ")", "return", "ret" ]
returns a virtual networks id from the given virtual networks name .
train
true
19,478
def host_memory_extents(obj): return mviewbuf.memoryview_get_extents(obj)
[ "def", "host_memory_extents", "(", "obj", ")", ":", "return", "mviewbuf", ".", "memoryview_get_extents", "(", "obj", ")" ]
returns the start and end pointer of the array .
train
false
19,479
def getToothProfile(derivation, pitchRadius, teeth): if (teeth < 0): return getToothProfileAnnulus(derivation, pitchRadius, teeth) if (teeth == 0): return getToothProfileRack(derivation) return getToothProfileCylinder(derivation, pitchRadius, teeth)
[ "def", "getToothProfile", "(", "derivation", ",", "pitchRadius", ",", "teeth", ")", ":", "if", "(", "teeth", "<", "0", ")", ":", "return", "getToothProfileAnnulus", "(", "derivation", ",", "pitchRadius", ",", "teeth", ")", "if", "(", "teeth", "==", "0", ")", ":", "return", "getToothProfileRack", "(", "derivation", ")", "return", "getToothProfileCylinder", "(", "derivation", ",", "pitchRadius", ",", "teeth", ")" ]
get profile for one tooth .
train
false
19,481
def coshm(A): A = _asarray_square(A) return _maybe_real(A, (0.5 * (expm(A) + expm((- A)))))
[ "def", "coshm", "(", "A", ")", ":", "A", "=", "_asarray_square", "(", "A", ")", "return", "_maybe_real", "(", "A", ",", "(", "0.5", "*", "(", "expm", "(", "A", ")", "+", "expm", "(", "(", "-", "A", ")", ")", ")", ")", ")" ]
compute the hyperbolic matrix cosine .
train
false
19,482
def patch_cache_rr(lock_obj): import rr_cache import rr_cache_with_lock rr_lock1lvl = rr_cache_with_lock.create_cache1lvl(lock_obj) rr_lock2lvl = rr_cache_with_lock.create_cache2lvl(lock_obj) __patch(rr_cache, 'cache1lvl', rr_lock1lvl) __patch(rr_cache, 'cache2lvl', rr_lock2lvl)
[ "def", "patch_cache_rr", "(", "lock_obj", ")", ":", "import", "rr_cache", "import", "rr_cache_with_lock", "rr_lock1lvl", "=", "rr_cache_with_lock", ".", "create_cache1lvl", "(", "lock_obj", ")", "rr_lock2lvl", "=", "rr_cache_with_lock", ".", "create_cache2lvl", "(", "lock_obj", ")", "__patch", "(", "rr_cache", ",", "'cache1lvl'", ",", "rr_lock1lvl", ")", "__patch", "(", "rr_cache", ",", "'cache2lvl'", ",", "rr_lock2lvl", ")" ]
patches cache mechanizm to be thread safe .
train
false
19,484
@utils.arg('flavor', metavar='<flavor>', help=_('Name or ID of the flavor to delete.')) def do_flavor_delete(cs, args): flavorid = _find_flavor(cs, args.flavor) cs.flavors.delete(flavorid) _print_flavor_list([flavorid])
[ "@", "utils", ".", "arg", "(", "'flavor'", ",", "metavar", "=", "'<flavor>'", ",", "help", "=", "_", "(", "'Name or ID of the flavor to delete.'", ")", ")", "def", "do_flavor_delete", "(", "cs", ",", "args", ")", ":", "flavorid", "=", "_find_flavor", "(", "cs", ",", "args", ".", "flavor", ")", "cs", ".", "flavors", ".", "delete", "(", "flavorid", ")", "_print_flavor_list", "(", "[", "flavorid", "]", ")" ]
delete a specific flavor .
train
false
19,488
def quota_usage_get_all_by_project(context, project_id): return IMPL.quota_usage_get_all_by_project(context, project_id)
[ "def", "quota_usage_get_all_by_project", "(", "context", ",", "project_id", ")", ":", "return", "IMPL", ".", "quota_usage_get_all_by_project", "(", "context", ",", "project_id", ")" ]
retrieve all usage associated with a given resource .
train
false
19,490
def _get_config_value(profile, config_name): config = __salt__['config.option'](profile) if (not config): raise CommandExecutionError("Authentication information could not be found for the '{0}' profile.".format(profile)) config_value = config.get(config_name) if (config_value is None): raise CommandExecutionError("The '{0}' parameter was not found in the '{1}' profile.".format(config_name, profile)) return config_value
[ "def", "_get_config_value", "(", "profile", ",", "config_name", ")", ":", "config", "=", "__salt__", "[", "'config.option'", "]", "(", "profile", ")", "if", "(", "not", "config", ")", ":", "raise", "CommandExecutionError", "(", "\"Authentication information could not be found for the '{0}' profile.\"", ".", "format", "(", "profile", ")", ")", "config_value", "=", "config", ".", "get", "(", "config_name", ")", "if", "(", "config_value", "is", "None", ")", ":", "raise", "CommandExecutionError", "(", "\"The '{0}' parameter was not found in the '{1}' profile.\"", ".", "format", "(", "config_name", ",", "profile", ")", ")", "return", "config_value" ]
helper function that returns a profiles configuration value based on the supplied configuration name .
train
true
19,491
def phone2numeric(phone): letters = re.compile('[A-PR-Y]', re.I) char2number = (lambda m: {'a': '2', 'c': '2', 'b': '2', 'e': '3', 'd': '3', 'g': '4', 'f': '3', 'i': '4', 'h': '4', 'k': '5', 'j': '5', 'm': '6', 'l': '5', 'o': '6', 'n': '6', 'p': '7', 's': '7', 'r': '7', 'u': '8', 't': '8', 'w': '9', 'v': '8', 'y': '9', 'x': '9'}.get(m.group(0).lower())) return letters.sub(char2number, phone)
[ "def", "phone2numeric", "(", "phone", ")", ":", "letters", "=", "re", ".", "compile", "(", "'[A-PR-Y]'", ",", "re", ".", "I", ")", "char2number", "=", "(", "lambda", "m", ":", "{", "'a'", ":", "'2'", ",", "'c'", ":", "'2'", ",", "'b'", ":", "'2'", ",", "'e'", ":", "'3'", ",", "'d'", ":", "'3'", ",", "'g'", ":", "'4'", ",", "'f'", ":", "'3'", ",", "'i'", ":", "'4'", ",", "'h'", ":", "'4'", ",", "'k'", ":", "'5'", ",", "'j'", ":", "'5'", ",", "'m'", ":", "'6'", ",", "'l'", ":", "'5'", ",", "'o'", ":", "'6'", ",", "'n'", ":", "'6'", ",", "'p'", ":", "'7'", ",", "'s'", ":", "'7'", ",", "'r'", ":", "'7'", ",", "'u'", ":", "'8'", ",", "'t'", ":", "'8'", ",", "'w'", ":", "'9'", ",", "'v'", ":", "'8'", ",", "'y'", ":", "'9'", ",", "'x'", ":", "'9'", "}", ".", "get", "(", "m", ".", "group", "(", "0", ")", ".", "lower", "(", ")", ")", ")", "return", "letters", ".", "sub", "(", "char2number", ",", "phone", ")" ]
converts a phone number with letters into its numeric equivalent .
train
false
19,492
def prefixedMethodNames(classObj, prefix): dct = {} addMethodNamesToDict(classObj, dct, prefix) return list(dct.keys())
[ "def", "prefixedMethodNames", "(", "classObj", ",", "prefix", ")", ":", "dct", "=", "{", "}", "addMethodNamesToDict", "(", "classObj", ",", "dct", ",", "prefix", ")", "return", "list", "(", "dct", ".", "keys", "(", ")", ")" ]
given a class object c{classobj} .
train
false
19,494
@utils_routes.route('/calendar/<path:filename>') def send_cal(filename): return send_from_directory((os.path.realpath('.') + '/static/'), filename)
[ "@", "utils_routes", ".", "route", "(", "'/calendar/<path:filename>'", ")", "def", "send_cal", "(", "filename", ")", ":", "return", "send_from_directory", "(", "(", "os", ".", "path", ".", "realpath", "(", "'.'", ")", "+", "'/static/'", ")", ",", "filename", ")" ]
returns calendar .
train
false
19,495
def vendor(proxy): if proxy: return {'vendor': _get_grain(proxy, 'vendor')}
[ "def", "vendor", "(", "proxy", ")", ":", "if", "proxy", ":", "return", "{", "'vendor'", ":", "_get_grain", "(", "proxy", ",", "'vendor'", ")", "}" ]
returns the network device vendor .
train
false
19,499
def _get_func_fullname(func): (modules, funcname) = get_func_name(func) modules.append(funcname) return os.path.join(*modules)
[ "def", "_get_func_fullname", "(", "func", ")", ":", "(", "modules", ",", "funcname", ")", "=", "get_func_name", "(", "func", ")", "modules", ".", "append", "(", "funcname", ")", "return", "os", ".", "path", ".", "join", "(", "*", "modules", ")" ]
compute the part of part associated with a function .
train
false
19,500
def style_loss(style_image, target_image, style_masks, target_masks): assert (3 == K.ndim(style_image) == K.ndim(target_image)) assert (3 == K.ndim(style_masks) == K.ndim(target_masks)) loss = K.variable(0) for i in xrange(nb_labels): if (K.image_dim_ordering() == 'th'): style_mask = style_masks[i, :, :] target_mask = target_masks[i, :, :] else: style_mask = style_masks[:, :, i] target_mask = target_masks[:, :, i] loss += region_style_loss(style_image, target_image, style_mask, target_mask) return loss
[ "def", "style_loss", "(", "style_image", ",", "target_image", ",", "style_masks", ",", "target_masks", ")", ":", "assert", "(", "3", "==", "K", ".", "ndim", "(", "style_image", ")", "==", "K", ".", "ndim", "(", "target_image", ")", ")", "assert", "(", "3", "==", "K", ".", "ndim", "(", "style_masks", ")", "==", "K", ".", "ndim", "(", "target_masks", ")", ")", "loss", "=", "K", ".", "variable", "(", "0", ")", "for", "i", "in", "xrange", "(", "nb_labels", ")", ":", "if", "(", "K", ".", "image_dim_ordering", "(", ")", "==", "'th'", ")", ":", "style_mask", "=", "style_masks", "[", "i", ",", ":", ",", ":", "]", "target_mask", "=", "target_masks", "[", "i", ",", ":", ",", ":", "]", "else", ":", "style_mask", "=", "style_masks", "[", ":", ",", ":", ",", "i", "]", "target_mask", "=", "target_masks", "[", ":", ",", ":", ",", "i", "]", "loss", "+=", "region_style_loss", "(", "style_image", ",", "target_image", ",", "style_mask", ",", "target_mask", ")", "return", "loss" ]
calculate style loss between style_image and target_image .
train
false
19,501
def texts_are_equivalent(texta, textb): def normalized_lines(text): for l in text.splitlines(): l = l.strip() if l: (yield l) texta = '\n'.join(normalized_lines(texta)) textb = '\n'.join(normalized_lines(textb)) return (texta == textb)
[ "def", "texts_are_equivalent", "(", "texta", ",", "textb", ")", ":", "def", "normalized_lines", "(", "text", ")", ":", "for", "l", "in", "text", ".", "splitlines", "(", ")", ":", "l", "=", "l", ".", "strip", "(", ")", "if", "l", ":", "(", "yield", "l", ")", "texta", "=", "'\\n'", ".", "join", "(", "normalized_lines", "(", "texta", ")", ")", "textb", "=", "'\\n'", ".", "join", "(", "normalized_lines", "(", "textb", ")", ")", "return", "(", "texta", "==", "textb", ")" ]
compares two program texts by removing all identation and blank lines first .
train
false
19,503
def parse_field_descriptor(descr, name=None): assert descr (type, tail) = eat_descriptor(descr) assert (not tail) if name: return ((type + ' ') + name) else: return type
[ "def", "parse_field_descriptor", "(", "descr", ",", "name", "=", "None", ")", ":", "assert", "descr", "(", "type", ",", "tail", ")", "=", "eat_descriptor", "(", "descr", ")", "assert", "(", "not", "tail", ")", "if", "name", ":", "return", "(", "(", "type", "+", "' '", ")", "+", "name", ")", "else", ":", "return", "type" ]
parse a field descriptor .
train
false
19,505
def xframe_options_deny(view_func): def wrapped_view(*args, **kwargs): resp = view_func(*args, **kwargs) if (resp.get('X-Frame-Options') is None): resp['X-Frame-Options'] = 'DENY' return resp return wraps(view_func)(wrapped_view)
[ "def", "xframe_options_deny", "(", "view_func", ")", ":", "def", "wrapped_view", "(", "*", "args", ",", "**", "kwargs", ")", ":", "resp", "=", "view_func", "(", "*", "args", ",", "**", "kwargs", ")", "if", "(", "resp", ".", "get", "(", "'X-Frame-Options'", ")", "is", "None", ")", ":", "resp", "[", "'X-Frame-Options'", "]", "=", "'DENY'", "return", "resp", "return", "wraps", "(", "view_func", ")", "(", "wrapped_view", ")" ]
modifies a view function so its response has the x-frame-options http header set to deny as long as the response doesnt already have that header set .
train
false
19,508
def make_thumbnail(in_fname, out_fname, width, height): img = Image.open(in_fname) (width_in, height_in) = img.size scale_w = (width / float(width_in)) scale_h = (height / float(height_in)) if ((height_in * scale_w) <= height): scale = scale_w else: scale = scale_h width_sc = int(round((scale * width_in))) height_sc = int(round((scale * height_in))) img.thumbnail((width_sc, height_sc), Image.ANTIALIAS) thumb = Image.new('RGB', (width, height), (255, 255, 255)) pos_insert = (((width - width_sc) / 2), ((height - height_sc) / 2)) thumb.paste(img, pos_insert) thumb.save(out_fname)
[ "def", "make_thumbnail", "(", "in_fname", ",", "out_fname", ",", "width", ",", "height", ")", ":", "img", "=", "Image", ".", "open", "(", "in_fname", ")", "(", "width_in", ",", "height_in", ")", "=", "img", ".", "size", "scale_w", "=", "(", "width", "/", "float", "(", "width_in", ")", ")", "scale_h", "=", "(", "height", "/", "float", "(", "height_in", ")", ")", "if", "(", "(", "height_in", "*", "scale_w", ")", "<=", "height", ")", ":", "scale", "=", "scale_w", "else", ":", "scale", "=", "scale_h", "width_sc", "=", "int", "(", "round", "(", "(", "scale", "*", "width_in", ")", ")", ")", "height_sc", "=", "int", "(", "round", "(", "(", "scale", "*", "height_in", ")", ")", ")", "img", ".", "thumbnail", "(", "(", "width_sc", ",", "height_sc", ")", ",", "Image", ".", "ANTIALIAS", ")", "thumb", "=", "Image", ".", "new", "(", "'RGB'", ",", "(", "width", ",", "height", ")", ",", "(", "255", ",", "255", ",", "255", ")", ")", "pos_insert", "=", "(", "(", "(", "width", "-", "width_sc", ")", "/", "2", ")", ",", "(", "(", "height", "-", "height_sc", ")", "/", "2", ")", ")", "thumb", ".", "paste", "(", "img", ",", "pos_insert", ")", "thumb", ".", "save", "(", "out_fname", ")" ]
make a thumbnail with the same aspect ratio centered in an image with a given width and height .
train
false
19,509
def parse_mime_type(mime_type): parts = mime_type.split(';') params = dict([tuple([s.strip() for s in param.split('=')]) for param in parts[1:]]) full_type = parts[0].strip() if (full_type == '*'): full_type = '*/*' (type, subtype) = full_type.split('/') return (type.strip(), subtype.strip(), params)
[ "def", "parse_mime_type", "(", "mime_type", ")", ":", "parts", "=", "mime_type", ".", "split", "(", "';'", ")", "params", "=", "dict", "(", "[", "tuple", "(", "[", "s", ".", "strip", "(", ")", "for", "s", "in", "param", ".", "split", "(", "'='", ")", "]", ")", "for", "param", "in", "parts", "[", "1", ":", "]", "]", ")", "full_type", "=", "parts", "[", "0", "]", ".", "strip", "(", ")", "if", "(", "full_type", "==", "'*'", ")", ":", "full_type", "=", "'*/*'", "(", "type", ",", "subtype", ")", "=", "full_type", ".", "split", "(", "'/'", ")", "return", "(", "type", ".", "strip", "(", ")", ",", "subtype", ".", "strip", "(", ")", ",", "params", ")" ]
parses a mime-type into its component parts .
train
true
19,510
def _NewFacetsFromPb(facet_list): return [_NewFacetFromPb(f) for f in facet_list]
[ "def", "_NewFacetsFromPb", "(", "facet_list", ")", ":", "return", "[", "_NewFacetFromPb", "(", "f", ")", "for", "f", "in", "facet_list", "]" ]
returns a list of facet copied from a document_pb .
train
false
19,511
def test0(): pm = PackageMaker('distutils2', '1.0.2', 'Python distutils package.') pm.build('/Users/dinu/Desktop/distutils2')
[ "def", "test0", "(", ")", ":", "pm", "=", "PackageMaker", "(", "'distutils2'", ",", "'1.0.2'", ",", "'Python distutils package.'", ")", "pm", ".", "build", "(", "'/Users/dinu/Desktop/distutils2'", ")" ]
vanilla test for the distutils distribution .
train
false
19,512
def _available_path(folder, filename): path = (folder + filename) if (not os.path.isfile(path)): return path path += str(1) ct = 1 while os.path.isfile(path): ct += 1 path = ((folder + filename) + str(ct)) return path
[ "def", "_available_path", "(", "folder", ",", "filename", ")", ":", "path", "=", "(", "folder", "+", "filename", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "path", ")", ")", ":", "return", "path", "path", "+=", "str", "(", "1", ")", "ct", "=", "1", "while", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "ct", "+=", "1", "path", "=", "(", "(", "folder", "+", "filename", ")", "+", "str", "(", "ct", ")", ")", "return", "path" ]
takes filename and folder and returns available path .
train
false
19,514
def require_context(f): def wrapper(*args, **kwargs): if ((not is_admin_context(args[0])) and (not is_user_context(args[0]))): raise exception.NotAuthorized() return f(*args, **kwargs) return wrapper
[ "def", "require_context", "(", "f", ")", ":", "def", "wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "(", "not", "is_admin_context", "(", "args", "[", "0", "]", ")", ")", "and", "(", "not", "is_user_context", "(", "args", "[", "0", "]", ")", ")", ")", ":", "raise", "exception", ".", "NotAuthorized", "(", ")", "return", "f", "(", "*", "args", ",", "**", "kwargs", ")", "return", "wrapper" ]
raise exception .
train
false
19,517
def sm_flavor_get_all(context): return IMPL.sm_flavor_get_all(context)
[ "def", "sm_flavor_get_all", "(", "context", ")", ":", "return", "IMPL", ".", "sm_flavor_get_all", "(", "context", ")" ]
get all sm flavors .
train
false
19,518
@contextlib.contextmanager def replace_modules(new_module_classes): old_registry = _registry[:] _registry[:] = [] for cls in new_module_classes: register(cls) try: (yield) finally: _registry[:] = old_registry
[ "@", "contextlib", ".", "contextmanager", "def", "replace_modules", "(", "new_module_classes", ")", ":", "old_registry", "=", "_registry", "[", ":", "]", "_registry", "[", ":", "]", "=", "[", "]", "for", "cls", "in", "new_module_classes", ":", "register", "(", "cls", ")", "try", ":", "(", "yield", ")", "finally", ":", "_registry", "[", ":", "]", "=", "old_registry" ]
context manager to temporarily replace all modules with something else .
train
false
19,519
def frame(I=None, second=5, saveable=True, name='frame', cmap=None, fig_idx=12836): if (saveable is False): plt.ion() fig = plt.figure(fig_idx) if (len(I.shape) and (I.shape[(-1)] == 1)): I = I[:, :, 0] plt.imshow(I, cmap) plt.title(name) if saveable: plt.savefig((name + '.pdf'), format='pdf') else: plt.draw() plt.pause(second)
[ "def", "frame", "(", "I", "=", "None", ",", "second", "=", "5", ",", "saveable", "=", "True", ",", "name", "=", "'frame'", ",", "cmap", "=", "None", ",", "fig_idx", "=", "12836", ")", ":", "if", "(", "saveable", "is", "False", ")", ":", "plt", ".", "ion", "(", ")", "fig", "=", "plt", ".", "figure", "(", "fig_idx", ")", "if", "(", "len", "(", "I", ".", "shape", ")", "and", "(", "I", ".", "shape", "[", "(", "-", "1", ")", "]", "==", "1", ")", ")", ":", "I", "=", "I", "[", ":", ",", ":", ",", "0", "]", "plt", ".", "imshow", "(", "I", ",", "cmap", ")", "plt", ".", "title", "(", "name", ")", "if", "saveable", ":", "plt", ".", "savefig", "(", "(", "name", "+", "'.pdf'", ")", ",", "format", "=", "'pdf'", ")", "else", ":", "plt", ".", "draw", "(", ")", "plt", ".", "pause", "(", "second", ")" ]
display a frame .
train
true
19,520
def libvlc_media_player_has_vout(p_mi): f = (_Cfunctions.get('libvlc_media_player_has_vout', None) or _Cfunction('libvlc_media_player_has_vout', ((1,),), None, ctypes.c_uint, MediaPlayer)) return f(p_mi)
[ "def", "libvlc_media_player_has_vout", "(", "p_mi", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_player_has_vout'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_player_has_vout'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "ctypes", ".", "c_uint", ",", "MediaPlayer", ")", ")", "return", "f", "(", "p_mi", ")" ]
how many video outputs does this media player have? .
train
true
19,521
def _diff_dict(orig, new): result = {k: ['-'] for k in (set(orig.keys()) - set(new.keys()))} for (key, value) in new.items(): if ((key not in orig) or (value != orig[key])): result[key] = ['+', value] return result
[ "def", "_diff_dict", "(", "orig", ",", "new", ")", ":", "result", "=", "{", "k", ":", "[", "'-'", "]", "for", "k", "in", "(", "set", "(", "orig", ".", "keys", "(", ")", ")", "-", "set", "(", "new", ".", "keys", "(", ")", ")", ")", "}", "for", "(", "key", ",", "value", ")", "in", "new", ".", "items", "(", ")", ":", "if", "(", "(", "key", "not", "in", "orig", ")", "or", "(", "value", "!=", "orig", "[", "key", "]", ")", ")", ":", "result", "[", "key", "]", "=", "[", "'+'", ",", "value", "]", "return", "result" ]
return a dict describing how to change orig to new .
train
false
19,522
def set_jinja2(jinja2, key=_registry_key, app=None): app = (app or webapp2.get_app()) app.registry[key] = jinja2
[ "def", "set_jinja2", "(", "jinja2", ",", "key", "=", "_registry_key", ",", "app", "=", "None", ")", ":", "app", "=", "(", "app", "or", "webapp2", ".", "get_app", "(", ")", ")", "app", ".", "registry", "[", "key", "]", "=", "jinja2" ]
sets an instance of :class:jinja2 in the app registry .
train
false
19,523
def add_star(G, nodes, **attr): nlist = iter(nodes) v = next(nlist) edges = ((v, n) for n in nlist) G.add_edges_from(edges, **attr)
[ "def", "add_star", "(", "G", ",", "nodes", ",", "**", "attr", ")", ":", "nlist", "=", "iter", "(", "nodes", ")", "v", "=", "next", "(", "nlist", ")", "edges", "=", "(", "(", "v", ",", "n", ")", "for", "n", "in", "nlist", ")", "G", ".", "add_edges_from", "(", "edges", ",", "**", "attr", ")" ]
add a star to graph g .
train
false
19,524
def trigger_restart(): if sabnzbd.downloader.Downloader.do.paused: sabnzbd.RESTART_ARGS.append('-p') sys.argv = sabnzbd.RESTART_ARGS sabnzbd.halt() cherrypy.engine.exit() if sabnzbd.WIN32: del_connection_info() if (sabnzbd.WIN_SERVICE or (getattr(sys, 'frozen', None) == 'macosx_app')): sabnzbd.TRIGGER_RESTART = True else: cherrypy.engine._do_execv()
[ "def", "trigger_restart", "(", ")", ":", "if", "sabnzbd", ".", "downloader", ".", "Downloader", ".", "do", ".", "paused", ":", "sabnzbd", ".", "RESTART_ARGS", ".", "append", "(", "'-p'", ")", "sys", ".", "argv", "=", "sabnzbd", ".", "RESTART_ARGS", "sabnzbd", ".", "halt", "(", ")", "cherrypy", ".", "engine", ".", "exit", "(", ")", "if", "sabnzbd", ".", "WIN32", ":", "del_connection_info", "(", ")", "if", "(", "sabnzbd", ".", "WIN_SERVICE", "or", "(", "getattr", "(", "sys", ",", "'frozen'", ",", "None", ")", "==", "'macosx_app'", ")", ")", ":", "sabnzbd", ".", "TRIGGER_RESTART", "=", "True", "else", ":", "cherrypy", ".", "engine", ".", "_do_execv", "(", ")" ]
trigger a restart by setting a flag an shutting down cp .
train
false
19,525
def _get_oath2_access_token(client_key, client_secret): if ((not client_key) and (not client_secret)): log.error('client_key and client_secret have not been specified and are required parameters.') return False method = 'POST' url = 'https://api.vistara.io/auth/oauth/token' headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json'} params = {'grant_type': 'client_credentials', 'client_id': client_key, 'client_secret': client_secret} resp = salt.utils.http.query(url=url, method=method, header_dict=headers, params=params, opts=__opts__) respbody = resp.get('body', None) if (not respbody): return False access_token = json.loads(respbody)['access_token'] return access_token
[ "def", "_get_oath2_access_token", "(", "client_key", ",", "client_secret", ")", ":", "if", "(", "(", "not", "client_key", ")", "and", "(", "not", "client_secret", ")", ")", ":", "log", ".", "error", "(", "'client_key and client_secret have not been specified and are required parameters.'", ")", "return", "False", "method", "=", "'POST'", "url", "=", "'https://api.vistara.io/auth/oauth/token'", "headers", "=", "{", "'Content-Type'", ":", "'application/x-www-form-urlencoded'", ",", "'Accept'", ":", "'application/json'", "}", "params", "=", "{", "'grant_type'", ":", "'client_credentials'", ",", "'client_id'", ":", "client_key", ",", "'client_secret'", ":", "client_secret", "}", "resp", "=", "salt", ".", "utils", ".", "http", ".", "query", "(", "url", "=", "url", ",", "method", "=", "method", ",", "header_dict", "=", "headers", ",", "params", "=", "params", ",", "opts", "=", "__opts__", ")", "respbody", "=", "resp", ".", "get", "(", "'body'", ",", "None", ")", "if", "(", "not", "respbody", ")", ":", "return", "False", "access_token", "=", "json", ".", "loads", "(", "respbody", ")", "[", "'access_token'", "]", "return", "access_token" ]
query the vistara api and get an access_token .
train
true
19,527
def sample_iter_subs(expr, condition=None, numsamples=S.Infinity, **kwargs): if (condition is not None): ps = pspace(Tuple(expr, condition)) else: ps = pspace(expr) count = 0 while (count < numsamples): d = ps.sample() if (condition is not None): gd = condition.xreplace(d) if ((gd != True) and (gd != False)): raise ValueError('Conditions must not contain free symbols') if (not gd): continue (yield expr.xreplace(d)) count += 1
[ "def", "sample_iter_subs", "(", "expr", ",", "condition", "=", "None", ",", "numsamples", "=", "S", ".", "Infinity", ",", "**", "kwargs", ")", ":", "if", "(", "condition", "is", "not", "None", ")", ":", "ps", "=", "pspace", "(", "Tuple", "(", "expr", ",", "condition", ")", ")", "else", ":", "ps", "=", "pspace", "(", "expr", ")", "count", "=", "0", "while", "(", "count", "<", "numsamples", ")", ":", "d", "=", "ps", ".", "sample", "(", ")", "if", "(", "condition", "is", "not", "None", ")", ":", "gd", "=", "condition", ".", "xreplace", "(", "d", ")", "if", "(", "(", "gd", "!=", "True", ")", "and", "(", "gd", "!=", "False", ")", ")", ":", "raise", "ValueError", "(", "'Conditions must not contain free symbols'", ")", "if", "(", "not", "gd", ")", ":", "continue", "(", "yield", "expr", ".", "xreplace", "(", "d", ")", ")", "count", "+=", "1" ]
see sample_iter uses subs for computation .
train
false
19,528
def bench_R6(): s = sum((simplify((((x + sin(i)) / x) + ((x - sin(i)) / x))) for i in range(100)))
[ "def", "bench_R6", "(", ")", ":", "s", "=", "sum", "(", "(", "simplify", "(", "(", "(", "(", "x", "+", "sin", "(", "i", ")", ")", "/", "x", ")", "+", "(", "(", "x", "-", "sin", "(", "i", ")", ")", "/", "x", ")", ")", ")", "for", "i", "in", "range", "(", "100", ")", ")", ")" ]
sum(simplify((x+sin(i))/x+(x-sin(i))/x) for i in range) .
train
false
19,529
def get_all_cache_subnet_groups(name=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: marker = '' groups = [] while (marker is not None): ret = conn.describe_cache_subnet_groups(cache_subnet_group_name=name, marker=marker) trimmed = ret.get('DescribeCacheSubnetGroupsResponse', {}).get('DescribeCacheSubnetGroupsResult', {}) groups += trimmed.get('CacheSubnetGroups', []) marker = trimmed.get('Marker', None) if (not groups): log.debug('No ElastiCache subnet groups found.') return groups except boto.exception.BotoServerError as e: log.error(e) return []
[ "def", "get_all_cache_subnet_groups", "(", "name", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "marker", "=", "''", "groups", "=", "[", "]", "while", "(", "marker", "is", "not", "None", ")", ":", "ret", "=", "conn", ".", "describe_cache_subnet_groups", "(", "cache_subnet_group_name", "=", "name", ",", "marker", "=", "marker", ")", "trimmed", "=", "ret", ".", "get", "(", "'DescribeCacheSubnetGroupsResponse'", ",", "{", "}", ")", ".", "get", "(", "'DescribeCacheSubnetGroupsResult'", ",", "{", "}", ")", "groups", "+=", "trimmed", ".", "get", "(", "'CacheSubnetGroups'", ",", "[", "]", ")", "marker", "=", "trimmed", ".", "get", "(", "'Marker'", ",", "None", ")", "if", "(", "not", "groups", ")", ":", "log", ".", "debug", "(", "'No ElastiCache subnet groups found.'", ")", "return", "groups", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "[", "]" ]
return a list of all cache subnet groups with details cli example:: salt myminion boto_elasticache .
train
true
19,531
def test_get_params(): kwargs = {'population_size': 500, 'generations': 1000, 'verbosity': 1} tpot_obj = TPOTClassifier(**kwargs) initializer = inspect.getargspec(TPOTBase.__init__) default_kwargs = dict(zip(initializer.args[1:], initializer.defaults)) default_kwargs.update(kwargs) assert (tpot_obj.get_params() == default_kwargs)
[ "def", "test_get_params", "(", ")", ":", "kwargs", "=", "{", "'population_size'", ":", "500", ",", "'generations'", ":", "1000", ",", "'verbosity'", ":", "1", "}", "tpot_obj", "=", "TPOTClassifier", "(", "**", "kwargs", ")", "initializer", "=", "inspect", ".", "getargspec", "(", "TPOTBase", ".", "__init__", ")", "default_kwargs", "=", "dict", "(", "zip", "(", "initializer", ".", "args", "[", "1", ":", "]", ",", "initializer", ".", "defaults", ")", ")", "default_kwargs", ".", "update", "(", "kwargs", ")", "assert", "(", "tpot_obj", ".", "get_params", "(", ")", "==", "default_kwargs", ")" ]
assert that get_params returns the exact dictionary of parameters used by tpot .
train
false
19,533
def takes_multiple_arguments(func): if (func in ONE_ARITY_BUILTINS): return False elif (func in MULTI_ARITY_BUILTINS): return True try: spec = getargspec(func) except: return False try: is_constructor = ((spec.args[0] == 'self') and isinstance(func, type)) except: is_constructor = False if spec.varargs: return True if (spec.defaults is None): return ((len(spec.args) - is_constructor) != 1) return (((len(spec.args) - len(spec.defaults)) - is_constructor) > 1)
[ "def", "takes_multiple_arguments", "(", "func", ")", ":", "if", "(", "func", "in", "ONE_ARITY_BUILTINS", ")", ":", "return", "False", "elif", "(", "func", "in", "MULTI_ARITY_BUILTINS", ")", ":", "return", "True", "try", ":", "spec", "=", "getargspec", "(", "func", ")", "except", ":", "return", "False", "try", ":", "is_constructor", "=", "(", "(", "spec", ".", "args", "[", "0", "]", "==", "'self'", ")", "and", "isinstance", "(", "func", ",", "type", ")", ")", "except", ":", "is_constructor", "=", "False", "if", "spec", ".", "varargs", ":", "return", "True", "if", "(", "spec", ".", "defaults", "is", "None", ")", ":", "return", "(", "(", "len", "(", "spec", ".", "args", ")", "-", "is_constructor", ")", "!=", "1", ")", "return", "(", "(", "(", "len", "(", "spec", ".", "args", ")", "-", "len", "(", "spec", ".", "defaults", ")", ")", "-", "is_constructor", ")", ">", "1", ")" ]
does this function take multiple arguments? .
train
false
19,535
def all_pairs_bellman_ford_path_length(G, cutoff=None, weight='weight'): length = single_source_bellman_ford_path_length for n in G: (yield (n, dict(length(G, n, cutoff=cutoff, weight=weight))))
[ "def", "all_pairs_bellman_ford_path_length", "(", "G", ",", "cutoff", "=", "None", ",", "weight", "=", "'weight'", ")", ":", "length", "=", "single_source_bellman_ford_path_length", "for", "n", "in", "G", ":", "(", "yield", "(", "n", ",", "dict", "(", "length", "(", "G", ",", "n", ",", "cutoff", "=", "cutoff", ",", "weight", "=", "weight", ")", ")", ")", ")" ]
compute shortest path lengths between all nodes in a weighted graph .
train
false
19,536
def PylonsVersion(): import pylons return distutils.version.LooseVersion(pylons.__version__)
[ "def", "PylonsVersion", "(", ")", ":", "import", "pylons", "return", "distutils", ".", "version", ".", "LooseVersion", "(", "pylons", ".", "__version__", ")" ]
discover the version of pylons installed .
train
false
19,537
def replace_word(word_input, one, two): regex = re.compile(('\\W(%s)(\\W|$)' % one), re.I) matches = regex.findall(word_input) if matches: for unused in matches: word_input = word_input.replace(one, two) return word_input
[ "def", "replace_word", "(", "word_input", ",", "one", ",", "two", ")", ":", "regex", "=", "re", ".", "compile", "(", "(", "'\\\\W(%s)(\\\\W|$)'", "%", "one", ")", ",", "re", ".", "I", ")", "matches", "=", "regex", ".", "findall", "(", "word_input", ")", "if", "matches", ":", "for", "unused", "in", "matches", ":", "word_input", "=", "word_input", ".", "replace", "(", "one", ",", "two", ")", "return", "word_input" ]
regex replace on just words .
train
false
19,538
def urlresolve(url): res = urlget(url, stream=True, allow_redirects=False) if ((res.status_code == 302) and ('location' in res.headers)): return res.headers['location'] else: return url
[ "def", "urlresolve", "(", "url", ")", ":", "res", "=", "urlget", "(", "url", ",", "stream", "=", "True", ",", "allow_redirects", "=", "False", ")", "if", "(", "(", "res", ".", "status_code", "==", "302", ")", "and", "(", "'location'", "in", "res", ".", "headers", ")", ")", ":", "return", "res", ".", "headers", "[", "'location'", "]", "else", ":", "return", "url" ]
this function is deprecated .
train
false
19,539
def _encode_asn1_str(backend, data, length): s = backend._lib.ASN1_OCTET_STRING_new() res = backend._lib.ASN1_OCTET_STRING_set(s, data, length) backend.openssl_assert((res == 1)) return s
[ "def", "_encode_asn1_str", "(", "backend", ",", "data", ",", "length", ")", ":", "s", "=", "backend", ".", "_lib", ".", "ASN1_OCTET_STRING_new", "(", ")", "res", "=", "backend", ".", "_lib", ".", "ASN1_OCTET_STRING_set", "(", "s", ",", "data", ",", "length", ")", "backend", ".", "openssl_assert", "(", "(", "res", "==", "1", ")", ")", "return", "s" ]
create an asn1_octet_string from a python byte string .
train
false
19,540
def build_FastTree(): status('Building FastTree...') if (not app_available('gcc')): status('GCC not installed, so cannot build FastTree.\n') return if download_file('http://www.microbesonline.org/fasttree/FastTree-2.1.3.c', 'scripts/', 'FastTree.c'): status('Could not download FastTree, so not installing it.\n') return cwd = getcwd() scripts = join(cwd, 'scripts') try: chdir(scripts) if (not system_call('gcc -Wall -O3 -finline-functions -funroll-loops -o FastTree FastTree.c -lm', 'build FastTree')): return status('FastTree built.\n') finally: remove('FastTree.c') chdir(cwd)
[ "def", "build_FastTree", "(", ")", ":", "status", "(", "'Building FastTree...'", ")", "if", "(", "not", "app_available", "(", "'gcc'", ")", ")", ":", "status", "(", "'GCC not installed, so cannot build FastTree.\\n'", ")", "return", "if", "download_file", "(", "'http://www.microbesonline.org/fasttree/FastTree-2.1.3.c'", ",", "'scripts/'", ",", "'FastTree.c'", ")", ":", "status", "(", "'Could not download FastTree, so not installing it.\\n'", ")", "return", "cwd", "=", "getcwd", "(", ")", "scripts", "=", "join", "(", "cwd", ",", "'scripts'", ")", "try", ":", "chdir", "(", "scripts", ")", "if", "(", "not", "system_call", "(", "'gcc -Wall -O3 -finline-functions -funroll-loops -o FastTree FastTree.c -lm'", ",", "'build FastTree'", ")", ")", ":", "return", "status", "(", "'FastTree built.\\n'", ")", "finally", ":", "remove", "(", "'FastTree.c'", ")", "chdir", "(", "cwd", ")" ]
download and build fasttree then copy it to the scripts directory .
train
false
19,542
def create_vdi(session, sr_ref, instance, name_label, disk_type, virtual_size, read_only=False): otherconf = {'nova_disk_type': disk_type} if instance: otherconf['nova_instance_uuid'] = instance['uuid'] vdi_ref = session.call_xenapi('VDI.create', {'name_label': name_label, 'name_description': disk_type, 'SR': sr_ref, 'virtual_size': str(virtual_size), 'type': 'User', 'sharable': False, 'read_only': read_only, 'xenstore_data': {}, 'other_config': otherconf, 'sm_config': {}, 'tags': []}) LOG.debug(_('Created VDI %(vdi_ref)s (%(name_label)s, %(virtual_size)s, %(read_only)s) on %(sr_ref)s.'), locals()) return vdi_ref
[ "def", "create_vdi", "(", "session", ",", "sr_ref", ",", "instance", ",", "name_label", ",", "disk_type", ",", "virtual_size", ",", "read_only", "=", "False", ")", ":", "otherconf", "=", "{", "'nova_disk_type'", ":", "disk_type", "}", "if", "instance", ":", "otherconf", "[", "'nova_instance_uuid'", "]", "=", "instance", "[", "'uuid'", "]", "vdi_ref", "=", "session", ".", "call_xenapi", "(", "'VDI.create'", ",", "{", "'name_label'", ":", "name_label", ",", "'name_description'", ":", "disk_type", ",", "'SR'", ":", "sr_ref", ",", "'virtual_size'", ":", "str", "(", "virtual_size", ")", ",", "'type'", ":", "'User'", ",", "'sharable'", ":", "False", ",", "'read_only'", ":", "read_only", ",", "'xenstore_data'", ":", "{", "}", ",", "'other_config'", ":", "otherconf", ",", "'sm_config'", ":", "{", "}", ",", "'tags'", ":", "[", "]", "}", ")", "LOG", ".", "debug", "(", "_", "(", "'Created VDI %(vdi_ref)s (%(name_label)s, %(virtual_size)s, %(read_only)s) on %(sr_ref)s.'", ")", ",", "locals", "(", ")", ")", "return", "vdi_ref" ]
create a vdi record and returns its reference .
train
false
19,543
def save_nzb(nzb_name, nzb_string): try: with ek(open, (nzb_name + '.nzb'), 'w') as nzb_fh: nzb_fh.write(nzb_string) except EnvironmentError as error: logger.log((u'Unable to save NZB: ' + ex(error)), logger.ERROR)
[ "def", "save_nzb", "(", "nzb_name", ",", "nzb_string", ")", ":", "try", ":", "with", "ek", "(", "open", ",", "(", "nzb_name", "+", "'.nzb'", ")", ",", "'w'", ")", "as", "nzb_fh", ":", "nzb_fh", ".", "write", "(", "nzb_string", ")", "except", "EnvironmentError", "as", "error", ":", "logger", ".", "log", "(", "(", "u'Unable to save NZB: '", "+", "ex", "(", "error", ")", ")", ",", "logger", ".", "ERROR", ")" ]
save nzb to disk .
train
false
19,544
def filter_upgrade_duplicates(file_list): bases = {} for f in file_list: basekey = f.replace('.json', '') if f.endswith('.json'): bases[basekey] = f else: lookup = bases.get(basekey, '') if (not lookup.endswith('.json')): bases[basekey] = f return bases.values()
[ "def", "filter_upgrade_duplicates", "(", "file_list", ")", ":", "bases", "=", "{", "}", "for", "f", "in", "file_list", ":", "basekey", "=", "f", ".", "replace", "(", "'.json'", ",", "''", ")", "if", "f", ".", "endswith", "(", "'.json'", ")", ":", "bases", "[", "basekey", "]", "=", "f", "else", ":", "lookup", "=", "bases", ".", "get", "(", "basekey", ",", "''", ")", "if", "(", "not", "lookup", ".", "endswith", "(", "'.json'", ")", ")", ":", "bases", "[", "basekey", "]", "=", "f", "return", "bases", ".", "values", "(", ")" ]
in a set of files .
train
false
19,545
def _add_to_tfrecord(filename, tfrecord_writer, offset=0): with tf.gfile.Open(filename, 'r') as f: data = cPickle.load(f) images = data['data'] num_images = images.shape[0] images = images.reshape((num_images, 3, 32, 32)) labels = data['labels'] with tf.Graph().as_default(): image_placeholder = tf.placeholder(dtype=tf.uint8) encoded_image = tf.image.encode_png(image_placeholder) with tf.Session('') as sess: for j in range(num_images): sys.stdout.write(('\r>> Reading file [%s] image %d/%d' % (filename, ((offset + j) + 1), (offset + num_images)))) sys.stdout.flush() image = np.squeeze(images[j]).transpose((1, 2, 0)) label = labels[j] png_string = sess.run(encoded_image, feed_dict={image_placeholder: image}) example = dataset_utils.image_to_tfexample(png_string, 'png', _IMAGE_SIZE, _IMAGE_SIZE, label) tfrecord_writer.write(example.SerializeToString()) return (offset + num_images)
[ "def", "_add_to_tfrecord", "(", "filename", ",", "tfrecord_writer", ",", "offset", "=", "0", ")", ":", "with", "tf", ".", "gfile", ".", "Open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "data", "=", "cPickle", ".", "load", "(", "f", ")", "images", "=", "data", "[", "'data'", "]", "num_images", "=", "images", ".", "shape", "[", "0", "]", "images", "=", "images", ".", "reshape", "(", "(", "num_images", ",", "3", ",", "32", ",", "32", ")", ")", "labels", "=", "data", "[", "'labels'", "]", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "image_placeholder", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "uint8", ")", "encoded_image", "=", "tf", ".", "image", ".", "encode_png", "(", "image_placeholder", ")", "with", "tf", ".", "Session", "(", "''", ")", "as", "sess", ":", "for", "j", "in", "range", "(", "num_images", ")", ":", "sys", ".", "stdout", ".", "write", "(", "(", "'\\r>> Reading file [%s] image %d/%d'", "%", "(", "filename", ",", "(", "(", "offset", "+", "j", ")", "+", "1", ")", ",", "(", "offset", "+", "num_images", ")", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "image", "=", "np", ".", "squeeze", "(", "images", "[", "j", "]", ")", ".", "transpose", "(", "(", "1", ",", "2", ",", "0", ")", ")", "label", "=", "labels", "[", "j", "]", "png_string", "=", "sess", ".", "run", "(", "encoded_image", ",", "feed_dict", "=", "{", "image_placeholder", ":", "image", "}", ")", "example", "=", "dataset_utils", ".", "image_to_tfexample", "(", "png_string", ",", "'png'", ",", "_IMAGE_SIZE", ",", "_IMAGE_SIZE", ",", "label", ")", "tfrecord_writer", ".", "write", "(", "example", ".", "SerializeToString", "(", ")", ")", "return", "(", "offset", "+", "num_images", ")" ]
loads data from the binary mnist files and writes files to a tfrecord .
train
false
19,547
def deserialize_stream(stream, obj): check_qdatastream(stream) (stream >> obj) check_qdatastream(stream)
[ "def", "deserialize_stream", "(", "stream", ",", "obj", ")", ":", "check_qdatastream", "(", "stream", ")", "(", "stream", ">>", "obj", ")", "check_qdatastream", "(", "stream", ")" ]
deserialize a qdatastream into an object .
train
false
19,548
def rw_protected_resource(scopes=None, validator_cls=OAuth2Validator, server_cls=Server): _scopes = (scopes or []) def decorator(view_func): @wraps(view_func) def _validate(request, *args, **kwargs): provided_scopes = oauth2_settings._SCOPES read_write_scopes = [oauth2_settings.READ_SCOPE, oauth2_settings.WRITE_SCOPE] if (not set(read_write_scopes).issubset(set(provided_scopes))): raise ImproperlyConfigured("rw_protected_resource decorator requires following scopes {0} to be in OAUTH2_PROVIDER['SCOPES'] list in settings".format(read_write_scopes)) if (request.method.upper() in ['GET', 'HEAD', 'OPTIONS']): _scopes.append(oauth2_settings.READ_SCOPE) else: _scopes.append(oauth2_settings.WRITE_SCOPE) validator = validator_cls() core = OAuthLibCore(server_cls(validator)) (valid, oauthlib_req) = core.verify_request(request, scopes=_scopes) if valid: request.resource_owner = oauthlib_req.user return view_func(request, *args, **kwargs) return HttpResponseForbidden() return _validate return decorator
[ "def", "rw_protected_resource", "(", "scopes", "=", "None", ",", "validator_cls", "=", "OAuth2Validator", ",", "server_cls", "=", "Server", ")", ":", "_scopes", "=", "(", "scopes", "or", "[", "]", ")", "def", "decorator", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ")", "def", "_validate", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "provided_scopes", "=", "oauth2_settings", ".", "_SCOPES", "read_write_scopes", "=", "[", "oauth2_settings", ".", "READ_SCOPE", ",", "oauth2_settings", ".", "WRITE_SCOPE", "]", "if", "(", "not", "set", "(", "read_write_scopes", ")", ".", "issubset", "(", "set", "(", "provided_scopes", ")", ")", ")", ":", "raise", "ImproperlyConfigured", "(", "\"rw_protected_resource decorator requires following scopes {0} to be in OAUTH2_PROVIDER['SCOPES'] list in settings\"", ".", "format", "(", "read_write_scopes", ")", ")", "if", "(", "request", ".", "method", ".", "upper", "(", ")", "in", "[", "'GET'", ",", "'HEAD'", ",", "'OPTIONS'", "]", ")", ":", "_scopes", ".", "append", "(", "oauth2_settings", ".", "READ_SCOPE", ")", "else", ":", "_scopes", ".", "append", "(", "oauth2_settings", ".", "WRITE_SCOPE", ")", "validator", "=", "validator_cls", "(", ")", "core", "=", "OAuthLibCore", "(", "server_cls", "(", "validator", ")", ")", "(", "valid", ",", "oauthlib_req", ")", "=", "core", ".", "verify_request", "(", "request", ",", "scopes", "=", "_scopes", ")", "if", "valid", ":", "request", ".", "resource_owner", "=", "oauthlib_req", ".", "user", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "HttpResponseForbidden", "(", ")", "return", "_validate", "return", "decorator" ]
decorator to protect views by providing oauth2 authentication and read/write scopes out of the box .
train
false
19,550
def try_or_none(f): def f_or_none(x): try: return f(x) except: return None return f_or_none
[ "def", "try_or_none", "(", "f", ")", ":", "def", "f_or_none", "(", "x", ")", ":", "try", ":", "return", "f", "(", "x", ")", "except", ":", "return", "None", "return", "f_or_none" ]
wraps f to return none if f raises an exception assumes f takes only one input .
train
false
19,551
def if_no_repeat(event): return (not event.is_repeat)
[ "def", "if_no_repeat", "(", "event", ")", ":", "return", "(", "not", "event", ".", "is_repeat", ")" ]
callable that returns true when the previous event was delivered to another handler .
train
false
19,552
def _wake(NSApp): event = msg(C('NSEvent'), n('otherEventWithType:location:modifierFlags:timestamp:windowNumber:context:subtype:data1:data2:'), 15, 0, 0, 0, 0, None, 0, 0, 0) msg(NSApp, n('postEvent:atStart:'), void_p(event), True)
[ "def", "_wake", "(", "NSApp", ")", ":", "event", "=", "msg", "(", "C", "(", "'NSEvent'", ")", ",", "n", "(", "'otherEventWithType:location:modifierFlags:timestamp:windowNumber:context:subtype:data1:data2:'", ")", ",", "15", ",", "0", ",", "0", ",", "0", ",", "0", ",", "None", ",", "0", ",", "0", ",", "0", ")", "msg", "(", "NSApp", ",", "n", "(", "'postEvent:atStart:'", ")", ",", "void_p", "(", "event", ")", ",", "True", ")" ]
wake the application .
train
false
19,553
def test_scenario_has_steps(): scenario = Scenario.from_string(SCENARIO1) assert_equals(type(scenario.steps), list) assert_equals(len(scenario.steps), 4, 'It should have 4 steps') expected_sentences = ['Given I have the following courses in my university:', "When I consolidate the database into 'courses.txt'", "Then I see the 1st line of 'courses.txt' has 'Computer Science:5'", "And I see the 2nd line of 'courses.txt' has 'Nutrition:4'"] for (step, expected_sentence) in zip(scenario.steps, expected_sentences): assert_equals(type(step), Step) assert_equals(step.sentence, expected_sentence) assert_equals(scenario.steps[0].keys, ('Name', 'Duration')) assert_equals(scenario.steps[0].hashes, [{'Name': 'Computer Science', 'Duration': '5 years'}, {'Name': 'Nutrition', 'Duration': '4 years'}])
[ "def", "test_scenario_has_steps", "(", ")", ":", "scenario", "=", "Scenario", ".", "from_string", "(", "SCENARIO1", ")", "assert_equals", "(", "type", "(", "scenario", ".", "steps", ")", ",", "list", ")", "assert_equals", "(", "len", "(", "scenario", ".", "steps", ")", ",", "4", ",", "'It should have 4 steps'", ")", "expected_sentences", "=", "[", "'Given I have the following courses in my university:'", ",", "\"When I consolidate the database into 'courses.txt'\"", ",", "\"Then I see the 1st line of 'courses.txt' has 'Computer Science:5'\"", ",", "\"And I see the 2nd line of 'courses.txt' has 'Nutrition:4'\"", "]", "for", "(", "step", ",", "expected_sentence", ")", "in", "zip", "(", "scenario", ".", "steps", ",", "expected_sentences", ")", ":", "assert_equals", "(", "type", "(", "step", ")", ",", "Step", ")", "assert_equals", "(", "step", ".", "sentence", ",", "expected_sentence", ")", "assert_equals", "(", "scenario", ".", "steps", "[", "0", "]", ".", "keys", ",", "(", "'Name'", ",", "'Duration'", ")", ")", "assert_equals", "(", "scenario", ".", "steps", "[", "0", "]", ".", "hashes", ",", "[", "{", "'Name'", ":", "'Computer Science'", ",", "'Duration'", ":", "'5 years'", "}", ",", "{", "'Name'", ":", "'Nutrition'", ",", "'Duration'", ":", "'4 years'", "}", "]", ")" ]
a scenario object should have a list of steps .
train
false
19,554
def linkcheck(): os.system('sphinx-build -b linkcheck -d build/doctrees . build/linkcheck')
[ "def", "linkcheck", "(", ")", ":", "os", ".", "system", "(", "'sphinx-build -b linkcheck -d build/doctrees . build/linkcheck'", ")" ]
execute sphinx linkcheck target .
train
false
19,555
def test_relative_luminance(): out1 = utils.relative_luminance('white') assert_equal(out1, 1) out2 = utils.relative_luminance('#000000') assert_equal(out2, 0) out3 = utils.relative_luminance((0.25, 0.5, 0.75)) nose.tools.assert_almost_equal(out3, 0.201624536) rgbs = mpl.cm.RdBu(np.linspace(0, 1, 10)) lums1 = [utils.relative_luminance(rgb) for rgb in rgbs] lums2 = utils.relative_luminance(rgbs) for (lum1, lum2) in zip(lums1, lums2): nose.tools.assert_almost_equal(lum1, lum2)
[ "def", "test_relative_luminance", "(", ")", ":", "out1", "=", "utils", ".", "relative_luminance", "(", "'white'", ")", "assert_equal", "(", "out1", ",", "1", ")", "out2", "=", "utils", ".", "relative_luminance", "(", "'#000000'", ")", "assert_equal", "(", "out2", ",", "0", ")", "out3", "=", "utils", ".", "relative_luminance", "(", "(", "0.25", ",", "0.5", ",", "0.75", ")", ")", "nose", ".", "tools", ".", "assert_almost_equal", "(", "out3", ",", "0.201624536", ")", "rgbs", "=", "mpl", ".", "cm", ".", "RdBu", "(", "np", ".", "linspace", "(", "0", ",", "1", ",", "10", ")", ")", "lums1", "=", "[", "utils", ".", "relative_luminance", "(", "rgb", ")", "for", "rgb", "in", "rgbs", "]", "lums2", "=", "utils", ".", "relative_luminance", "(", "rgbs", ")", "for", "(", "lum1", ",", "lum2", ")", "in", "zip", "(", "lums1", ",", "lums2", ")", ":", "nose", ".", "tools", ".", "assert_almost_equal", "(", "lum1", ",", "lum2", ")" ]
test relative luminance .
train
false
19,556
def _load_tristan_coil_locs(coil_loc_path): channel_info = dict() with open(coil_loc_path, 'r') as fid: fid.readline() fid.readline() for line in fid: line = line.strip() vals = line.split(',') channel_info[vals[0]] = dict() if vals[6]: channel_info[vals[0]]['inner_coil'] = np.array(vals[2:5], np.float) channel_info[vals[0]]['outer_coil'] = np.array(vals[5:8], np.float) else: channel_info[vals[0]]['inner_coil'] = np.zeros(3) channel_info[vals[0]]['outer_coil'] = np.zeros(3) return channel_info
[ "def", "_load_tristan_coil_locs", "(", "coil_loc_path", ")", ":", "channel_info", "=", "dict", "(", ")", "with", "open", "(", "coil_loc_path", ",", "'r'", ")", "as", "fid", ":", "fid", ".", "readline", "(", ")", "fid", ".", "readline", "(", ")", "for", "line", "in", "fid", ":", "line", "=", "line", ".", "strip", "(", ")", "vals", "=", "line", ".", "split", "(", "','", ")", "channel_info", "[", "vals", "[", "0", "]", "]", "=", "dict", "(", ")", "if", "vals", "[", "6", "]", ":", "channel_info", "[", "vals", "[", "0", "]", "]", "[", "'inner_coil'", "]", "=", "np", ".", "array", "(", "vals", "[", "2", ":", "5", "]", ",", "np", ".", "float", ")", "channel_info", "[", "vals", "[", "0", "]", "]", "[", "'outer_coil'", "]", "=", "np", ".", "array", "(", "vals", "[", "5", ":", "8", "]", ",", "np", ".", "float", ")", "else", ":", "channel_info", "[", "vals", "[", "0", "]", "]", "[", "'inner_coil'", "]", "=", "np", ".", "zeros", "(", "3", ")", "channel_info", "[", "vals", "[", "0", "]", "]", "[", "'outer_coil'", "]", "=", "np", ".", "zeros", "(", "3", ")", "return", "channel_info" ]
load the coil locations from tristan cad drawings .
train
false
19,557
def get_elapsed_time(start_time, end_time): if ((start_time == end_time) or (not (start_time and end_time))): return 0 if (start_time[:(-4)] == end_time[:(-4)]): return (int(end_time[(-3):]) - int(start_time[(-3):])) start_millis = _timestamp_to_millis(start_time) end_millis = _timestamp_to_millis(end_time) return int((end_millis - start_millis))
[ "def", "get_elapsed_time", "(", "start_time", ",", "end_time", ")", ":", "if", "(", "(", "start_time", "==", "end_time", ")", "or", "(", "not", "(", "start_time", "and", "end_time", ")", ")", ")", ":", "return", "0", "if", "(", "start_time", "[", ":", "(", "-", "4", ")", "]", "==", "end_time", "[", ":", "(", "-", "4", ")", "]", ")", ":", "return", "(", "int", "(", "end_time", "[", "(", "-", "3", ")", ":", "]", ")", "-", "int", "(", "start_time", "[", "(", "-", "3", ")", ":", "]", ")", ")", "start_millis", "=", "_timestamp_to_millis", "(", "start_time", ")", "end_millis", "=", "_timestamp_to_millis", "(", "end_time", ")", "return", "int", "(", "(", "end_millis", "-", "start_millis", ")", ")" ]
gets the elapsed time between two events .
train
false
19,558
def readPlist(pathOrFile): didOpen = False result = None if isinstance(pathOrFile, (bytes, unicode)): pathOrFile = open(pathOrFile, 'rb') didOpen = True try: reader = PlistReader(pathOrFile) result = reader.parse() except NotBinaryPlistException as e: try: pathOrFile.seek(0) result = None if hasattr(plistlib, 'loads'): contents = None if isinstance(pathOrFile, (bytes, unicode)): with open(pathOrFile, 'rb') as f: contents = f.read() else: contents = pathOrFile.read() result = plistlib.loads(contents) else: result = plistlib.readPlist(pathOrFile) result = wrapDataObject(result, for_binary=True) except Exception as e: raise InvalidPlistException(e) finally: if didOpen: pathOrFile.close() return result
[ "def", "readPlist", "(", "pathOrFile", ")", ":", "didOpen", "=", "False", "result", "=", "None", "if", "isinstance", "(", "pathOrFile", ",", "(", "bytes", ",", "unicode", ")", ")", ":", "pathOrFile", "=", "open", "(", "pathOrFile", ",", "'rb'", ")", "didOpen", "=", "True", "try", ":", "reader", "=", "PlistReader", "(", "pathOrFile", ")", "result", "=", "reader", ".", "parse", "(", ")", "except", "NotBinaryPlistException", "as", "e", ":", "try", ":", "pathOrFile", ".", "seek", "(", "0", ")", "result", "=", "None", "if", "hasattr", "(", "plistlib", ",", "'loads'", ")", ":", "contents", "=", "None", "if", "isinstance", "(", "pathOrFile", ",", "(", "bytes", ",", "unicode", ")", ")", ":", "with", "open", "(", "pathOrFile", ",", "'rb'", ")", "as", "f", ":", "contents", "=", "f", ".", "read", "(", ")", "else", ":", "contents", "=", "pathOrFile", ".", "read", "(", ")", "result", "=", "plistlib", ".", "loads", "(", "contents", ")", "else", ":", "result", "=", "plistlib", ".", "readPlist", "(", "pathOrFile", ")", "result", "=", "wrapDataObject", "(", "result", ",", "for_binary", "=", "True", ")", "except", "Exception", "as", "e", ":", "raise", "InvalidPlistException", "(", "e", ")", "finally", ":", "if", "didOpen", ":", "pathOrFile", ".", "close", "(", ")", "return", "result" ]
raises notbinaryplistexception .
train
true
19,559
def test_join_oneInput(): x_0 = theano.tensor.fmatrix() x_1 = theano.tensor.fmatrix() x_2 = theano.tensor.fvector() join_0 = theano.tensor.concatenate([x_0], axis=1) join_1 = theano.tensor.concatenate([x_0, x_1, theano.tensor.shape_padright(x_2)], axis=1) assert (join_0 is x_0) assert (join_1 is not x_0)
[ "def", "test_join_oneInput", "(", ")", ":", "x_0", "=", "theano", ".", "tensor", ".", "fmatrix", "(", ")", "x_1", "=", "theano", ".", "tensor", ".", "fmatrix", "(", ")", "x_2", "=", "theano", ".", "tensor", ".", "fvector", "(", ")", "join_0", "=", "theano", ".", "tensor", ".", "concatenate", "(", "[", "x_0", "]", ",", "axis", "=", "1", ")", "join_1", "=", "theano", ".", "tensor", ".", "concatenate", "(", "[", "x_0", ",", "x_1", ",", "theano", ".", "tensor", ".", "shape_padright", "(", "x_2", ")", "]", ",", "axis", "=", "1", ")", "assert", "(", "join_0", "is", "x_0", ")", "assert", "(", "join_1", "is", "not", "x_0", ")" ]
test join when only 1 input is given .
train
false
19,560
def parse_override_var(s): return (parsedotval(item) for item in s.split(u';') if item)
[ "def", "parse_override_var", "(", "s", ")", ":", "return", "(", "parsedotval", "(", "item", ")", "for", "item", "in", "s", ".", "split", "(", "u';'", ")", "if", "item", ")" ]
parse a semicolon-separated list of strings into a sequence of values emits the same items in sequence as :py:func:parsedotval does .
train
false