id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
44,316
def _conn_info_check(infoblox_server=None, infoblox_user=None, infoblox_password=None): if (infoblox_server is None): infoblox_server = __salt__['pillar.get']('infoblox:server', None) if (infoblox_user is None): infoblox_user = __salt__['pillar.get']('infoblox:user', None) log.debug('Infoblox username is "{0}"'.format(infoblox_user)) if (infoblox_password is None): infoblox_password = __salt__['pillar.get']('infoblox:password', None) return (infoblox_server, infoblox_user, infoblox_password)
[ "def", "_conn_info_check", "(", "infoblox_server", "=", "None", ",", "infoblox_user", "=", "None", ",", "infoblox_password", "=", "None", ")", ":", "if", "(", "infoblox_server", "is", "None", ")", ":", "infoblox_server", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'infoblox:server'", ",", "None", ")", "if", "(", "infoblox_user", "is", "None", ")", ":", "infoblox_user", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'infoblox:user'", ",", "None", ")", "log", ".", "debug", "(", "'Infoblox username is \"{0}\"'", ".", "format", "(", "infoblox_user", ")", ")", "if", "(", "infoblox_password", "is", "None", ")", ":", "infoblox_password", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'infoblox:password'", ",", "None", ")", "return", "(", "infoblox_server", ",", "infoblox_user", ",", "infoblox_password", ")" ]
get infoblox stuff from pillar if not passed .
train
false
44,317
def get_plugin_classes(cls, showall=False, lower=False): result = {} for plugin in set(_get_subclasses(cls)): if (showall or (not (plugin.__name__.startswith('Abstract') or (plugin == cls)))): if ((not showall) and (plugin.__name__ in ['BufferAddressSpace', 'HiveFileAddressSpace', 'HiveAddressSpace'])): continue name = plugin.__name__.split('.')[(-1)] if lower: name = name.lower() if (name not in result): result[name] = plugin else: raise Exception('Object {0} has already been defined by {1}'.format(name, plugin)) return result
[ "def", "get_plugin_classes", "(", "cls", ",", "showall", "=", "False", ",", "lower", "=", "False", ")", ":", "result", "=", "{", "}", "for", "plugin", "in", "set", "(", "_get_subclasses", "(", "cls", ")", ")", ":", "if", "(", "showall", "or", "(", "not", "(", "plugin", ".", "__name__", ".", "startswith", "(", "'Abstract'", ")", "or", "(", "plugin", "==", "cls", ")", ")", ")", ")", ":", "if", "(", "(", "not", "showall", ")", "and", "(", "plugin", ".", "__name__", "in", "[", "'BufferAddressSpace'", ",", "'HiveFileAddressSpace'", ",", "'HiveAddressSpace'", "]", ")", ")", ":", "continue", "name", "=", "plugin", ".", "__name__", ".", "split", "(", "'.'", ")", "[", "(", "-", "1", ")", "]", "if", "lower", ":", "name", "=", "name", ".", "lower", "(", ")", "if", "(", "name", "not", "in", "result", ")", ":", "result", "[", "name", "]", "=", "plugin", "else", ":", "raise", "Exception", "(", "'Object {0} has already been defined by {1}'", ".", "format", "(", "name", ",", "plugin", ")", ")", "return", "result" ]
returns a dictionary of plugins .
train
false
44,318
def represent(obj): try: return pickle.dumps(obj, pickle.HIGHEST_PROTOCOL) except: return repr(obj)
[ "def", "represent", "(", "obj", ")", ":", "try", ":", "return", "pickle", ".", "dumps", "(", "obj", ",", "pickle", ".", "HIGHEST_PROTOCOL", ")", "except", ":", "return", "repr", "(", "obj", ")" ]
returns a string representing the given objects value .
train
false
44,319
def test_compound_model_classify_attributes(): inspect.classify_class_attrs((Gaussian1D + Gaussian1D))
[ "def", "test_compound_model_classify_attributes", "(", ")", ":", "inspect", ".", "classify_class_attrs", "(", "(", "Gaussian1D", "+", "Gaussian1D", ")", ")" ]
regression test for an issue raised here: URL#discussion_r22221123 the issue is that part of the help implementation calls a utility function called inspect .
train
false
44,323
def getVector3TransformedByMatrix(matrixTetragrid, vector3): if (matrixTetragrid == None): return vector3.copy() return Vector3(getTransformedByList(matrixTetragrid[0], vector3), getTransformedByList(matrixTetragrid[1], vector3), getTransformedByList(matrixTetragrid[2], vector3))
[ "def", "getVector3TransformedByMatrix", "(", "matrixTetragrid", ",", "vector3", ")", ":", "if", "(", "matrixTetragrid", "==", "None", ")", ":", "return", "vector3", ".", "copy", "(", ")", "return", "Vector3", "(", "getTransformedByList", "(", "matrixTetragrid", "[", "0", "]", ",", "vector3", ")", ",", "getTransformedByList", "(", "matrixTetragrid", "[", "1", "]", ",", "vector3", ")", ",", "getTransformedByList", "(", "matrixTetragrid", "[", "2", "]", ",", "vector3", ")", ")" ]
get the vector3 multiplied by a matrix .
train
false
44,324
def status_from_state(vm_state, task_state='default'): task_map = _STATE_MAP.get(vm_state, dict(default='UNKNOWN')) status = task_map.get(task_state, task_map['default']) if (status == 'UNKNOWN'): LOG.error(_LE('status is UNKNOWN from vm_state=%(vm_state)s task_state=%(task_state)s. Bad upgrade or db corrupted?'), {'vm_state': vm_state, 'task_state': task_state}) return status
[ "def", "status_from_state", "(", "vm_state", ",", "task_state", "=", "'default'", ")", ":", "task_map", "=", "_STATE_MAP", ".", "get", "(", "vm_state", ",", "dict", "(", "default", "=", "'UNKNOWN'", ")", ")", "status", "=", "task_map", ".", "get", "(", "task_state", ",", "task_map", "[", "'default'", "]", ")", "if", "(", "status", "==", "'UNKNOWN'", ")", ":", "LOG", ".", "error", "(", "_LE", "(", "'status is UNKNOWN from vm_state=%(vm_state)s task_state=%(task_state)s. Bad upgrade or db corrupted?'", ")", ",", "{", "'vm_state'", ":", "vm_state", ",", "'task_state'", ":", "task_state", "}", ")", "return", "status" ]
given vm_state and task_state .
train
false
44,325
@library.global_function def lists_diff(list1, list2): return list(filter(None.__ne__, (set(list1) - set(list2))))
[ "@", "library", ".", "global_function", "def", "lists_diff", "(", "list1", ",", "list2", ")", ":", "return", "list", "(", "filter", "(", "None", ".", "__ne__", ",", "(", "set", "(", "list1", ")", "-", "set", "(", "list2", ")", ")", ")", ")" ]
get the difference of two list and remove none values .
train
false
44,326
def computePreRec(CM, classNames): numOfClasses = CM.shape[0] if (len(classNames) != numOfClasses): print 'Error in computePreRec! Confusion matrix and classNames list must be of the same size!' return Precision = [] Recall = [] F1 = [] for (i, c) in enumerate(classNames): Precision.append((CM[(i, i)] / numpy.sum(CM[:, i]))) Recall.append((CM[(i, i)] / numpy.sum(CM[i, :]))) F1.append((((2 * Precision[(-1)]) * Recall[(-1)]) / (Precision[(-1)] + Recall[(-1)]))) return (Recall, Precision, F1)
[ "def", "computePreRec", "(", "CM", ",", "classNames", ")", ":", "numOfClasses", "=", "CM", ".", "shape", "[", "0", "]", "if", "(", "len", "(", "classNames", ")", "!=", "numOfClasses", ")", ":", "print", "'Error in computePreRec! Confusion matrix and classNames list must be of the same size!'", "return", "Precision", "=", "[", "]", "Recall", "=", "[", "]", "F1", "=", "[", "]", "for", "(", "i", ",", "c", ")", "in", "enumerate", "(", "classNames", ")", ":", "Precision", ".", "append", "(", "(", "CM", "[", "(", "i", ",", "i", ")", "]", "/", "numpy", ".", "sum", "(", "CM", "[", ":", ",", "i", "]", ")", ")", ")", "Recall", ".", "append", "(", "(", "CM", "[", "(", "i", ",", "i", ")", "]", "/", "numpy", ".", "sum", "(", "CM", "[", "i", ",", ":", "]", ")", ")", ")", "F1", ".", "append", "(", "(", "(", "(", "2", "*", "Precision", "[", "(", "-", "1", ")", "]", ")", "*", "Recall", "[", "(", "-", "1", ")", "]", ")", "/", "(", "Precision", "[", "(", "-", "1", ")", "]", "+", "Recall", "[", "(", "-", "1", ")", "]", ")", ")", ")", "return", "(", "Recall", ",", "Precision", ",", "F1", ")" ]
this function computes the precision .
train
false
44,328
def libvlc_media_player_set_video_title_display(p_mi, position, timeout): f = (_Cfunctions.get('libvlc_media_player_set_video_title_display', None) or _Cfunction('libvlc_media_player_set_video_title_display', ((1,), (1,), (1,)), None, None, MediaPlayer, Position, ctypes.c_int)) return f(p_mi, position, timeout)
[ "def", "libvlc_media_player_set_video_title_display", "(", "p_mi", ",", "position", ",", "timeout", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_player_set_video_title_display'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_player_set_video_title_display'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "Position", ",", "ctypes", ".", "c_int", ")", ")", "return", "f", "(", "p_mi", ",", "position", ",", "timeout", ")" ]
set if .
train
true
44,329
def get_key_filename(vm_): key_filename = config.get_cloud_config_value('ssh_private_key', vm_, __opts__, search_global=False, default=None) if (key_filename is not None): key_filename = os.path.expanduser(key_filename) if (not os.path.isfile(key_filename)): raise SaltCloudConfigError("The defined ssh_private_key '{0}' does not exist".format(key_filename)) return key_filename
[ "def", "get_key_filename", "(", "vm_", ")", ":", "key_filename", "=", "config", ".", "get_cloud_config_value", "(", "'ssh_private_key'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ",", "default", "=", "None", ")", "if", "(", "key_filename", "is", "not", "None", ")", ":", "key_filename", "=", "os", ".", "path", ".", "expanduser", "(", "key_filename", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "key_filename", ")", ")", ":", "raise", "SaltCloudConfigError", "(", "\"The defined ssh_private_key '{0}' does not exist\"", ".", "format", "(", "key_filename", ")", ")", "return", "key_filename" ]
check ssh private key file and return absolute path if exists .
train
true
44,330
def _ellipse_in_shape(shape, center, radii, rotation=0.0): (r_lim, c_lim) = np.ogrid[0:float(shape[0]), 0:float(shape[1])] (r_org, c_org) = center (r_rad, c_rad) = radii rotation %= np.pi (sin_alpha, cos_alpha) = (np.sin(rotation), np.cos(rotation)) (r, c) = ((r_lim - r_org), (c_lim - c_org)) distances = (((((r * cos_alpha) + (c * sin_alpha)) / r_rad) ** 2) + ((((r * sin_alpha) - (c * cos_alpha)) / c_rad) ** 2)) return np.nonzero((distances < 1))
[ "def", "_ellipse_in_shape", "(", "shape", ",", "center", ",", "radii", ",", "rotation", "=", "0.0", ")", ":", "(", "r_lim", ",", "c_lim", ")", "=", "np", ".", "ogrid", "[", "0", ":", "float", "(", "shape", "[", "0", "]", ")", ",", "0", ":", "float", "(", "shape", "[", "1", "]", ")", "]", "(", "r_org", ",", "c_org", ")", "=", "center", "(", "r_rad", ",", "c_rad", ")", "=", "radii", "rotation", "%=", "np", ".", "pi", "(", "sin_alpha", ",", "cos_alpha", ")", "=", "(", "np", ".", "sin", "(", "rotation", ")", ",", "np", ".", "cos", "(", "rotation", ")", ")", "(", "r", ",", "c", ")", "=", "(", "(", "r_lim", "-", "r_org", ")", ",", "(", "c_lim", "-", "c_org", ")", ")", "distances", "=", "(", "(", "(", "(", "(", "r", "*", "cos_alpha", ")", "+", "(", "c", "*", "sin_alpha", ")", ")", "/", "r_rad", ")", "**", "2", ")", "+", "(", "(", "(", "(", "r", "*", "sin_alpha", ")", "-", "(", "c", "*", "cos_alpha", ")", ")", "/", "c_rad", ")", "**", "2", ")", ")", "return", "np", ".", "nonzero", "(", "(", "distances", "<", "1", ")", ")" ]
generate coordinates of points within ellipse bounded by shape .
train
false
44,331
def test_force_signing(monkeypatch): def not_forced(ids, force, reason): assert (not force) monkeypatch.setattr(SIGN_ADDONS, not_forced) call_command('sign_addons', 123) def is_forced(ids, force, reason): assert force monkeypatch.setattr(SIGN_ADDONS, is_forced) call_command('sign_addons', 123, force=True)
[ "def", "test_force_signing", "(", "monkeypatch", ")", ":", "def", "not_forced", "(", "ids", ",", "force", ",", "reason", ")", ":", "assert", "(", "not", "force", ")", "monkeypatch", ".", "setattr", "(", "SIGN_ADDONS", ",", "not_forced", ")", "call_command", "(", "'sign_addons'", ",", "123", ")", "def", "is_forced", "(", "ids", ",", "force", ",", "reason", ")", ":", "assert", "force", "monkeypatch", ".", "setattr", "(", "SIGN_ADDONS", ",", "is_forced", ")", "call_command", "(", "'sign_addons'", ",", "123", ",", "force", "=", "True", ")" ]
you can force signing an addon even if its already signed .
train
false
44,332
def WriteComponent(name='grr-rekall', version='0.4', build_system=None, modules=None, token=None, raw_data=''): components_base = 'grr.client.components.rekall_support.' if (modules is None): modules = [(components_base + 'grr_rekall')] result = rdf_client.ClientComponent(raw_data=raw_data) if build_system: result.build_system = build_system else: with utils.Stubber(platform, 'libc_ver', (lambda : ('glibc', '2.3'))): result.build_system = result.build_system.FromCurrentSystem() result.summary.modules = modules result.summary.name = name result.summary.version = version result.summary.cipher = rdf_crypto.SymmetricCipher.Generate('AES128CBC') with utils.TempDirectory() as tmp_dir: with open(os.path.join(tmp_dir, 'component'), 'wb') as fd: fd.write(result.SerializeToString()) return maintenance_utils.SignComponent(fd.name, token=token)
[ "def", "WriteComponent", "(", "name", "=", "'grr-rekall'", ",", "version", "=", "'0.4'", ",", "build_system", "=", "None", ",", "modules", "=", "None", ",", "token", "=", "None", ",", "raw_data", "=", "''", ")", ":", "components_base", "=", "'grr.client.components.rekall_support.'", "if", "(", "modules", "is", "None", ")", ":", "modules", "=", "[", "(", "components_base", "+", "'grr_rekall'", ")", "]", "result", "=", "rdf_client", ".", "ClientComponent", "(", "raw_data", "=", "raw_data", ")", "if", "build_system", ":", "result", ".", "build_system", "=", "build_system", "else", ":", "with", "utils", ".", "Stubber", "(", "platform", ",", "'libc_ver'", ",", "(", "lambda", ":", "(", "'glibc'", ",", "'2.3'", ")", ")", ")", ":", "result", ".", "build_system", "=", "result", ".", "build_system", ".", "FromCurrentSystem", "(", ")", "result", ".", "summary", ".", "modules", "=", "modules", "result", ".", "summary", ".", "name", "=", "name", "result", ".", "summary", ".", "version", "=", "version", "result", ".", "summary", ".", "cipher", "=", "rdf_crypto", ".", "SymmetricCipher", ".", "Generate", "(", "'AES128CBC'", ")", "with", "utils", ".", "TempDirectory", "(", ")", "as", "tmp_dir", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "tmp_dir", ",", "'component'", ")", ",", "'wb'", ")", "as", "fd", ":", "fd", ".", "write", "(", "result", ".", "SerializeToString", "(", ")", ")", "return", "maintenance_utils", ".", "SignComponent", "(", "fd", ".", "name", ",", "token", "=", "token", ")" ]
create a fake component .
train
false
44,333
def count_iterables(iterables, synchronize=False): if synchronize: op = max else: op = (lambda x, y: (x * y)) return reduce(op, [len(func()) for (_, func) in list(iterables.items())])
[ "def", "count_iterables", "(", "iterables", ",", "synchronize", "=", "False", ")", ":", "if", "synchronize", ":", "op", "=", "max", "else", ":", "op", "=", "(", "lambda", "x", ",", "y", ":", "(", "x", "*", "y", ")", ")", "return", "reduce", "(", "op", ",", "[", "len", "(", "func", "(", ")", ")", "for", "(", "_", ",", "func", ")", "in", "list", "(", "iterables", ".", "items", "(", ")", ")", "]", ")" ]
return the number of iterable expansion nodes .
train
false
44,334
def authenticate_twitch_oauth(): client_id = 'ewvlchtxgqq88ru9gmfp1gmyt6h2b93' redirect_uri = 'http://livestreamer.tanuki.se/en/develop/twitch_oauth.html' url = 'https://api.twitch.tv/kraken/oauth2/authorize/?response_type=token&client_id={0}&redirect_uri={1}&scope=user_read+user_subscriptions'.format(client_id, redirect_uri) console.msg('Attempting to open a browser to let you authenticate Livestreamer with Twitch') try: if (not webbrowser.open_new_tab(url)): raise webbrowser.Error except webbrowser.Error: console.exit('Unable to open a web browser, try accessing this URL manually instead:\n{0}'.format(url))
[ "def", "authenticate_twitch_oauth", "(", ")", ":", "client_id", "=", "'ewvlchtxgqq88ru9gmfp1gmyt6h2b93'", "redirect_uri", "=", "'http://livestreamer.tanuki.se/en/develop/twitch_oauth.html'", "url", "=", "'https://api.twitch.tv/kraken/oauth2/authorize/?response_type=token&client_id={0}&redirect_uri={1}&scope=user_read+user_subscriptions'", ".", "format", "(", "client_id", ",", "redirect_uri", ")", "console", ".", "msg", "(", "'Attempting to open a browser to let you authenticate Livestreamer with Twitch'", ")", "try", ":", "if", "(", "not", "webbrowser", ".", "open_new_tab", "(", "url", ")", ")", ":", "raise", "webbrowser", ".", "Error", "except", "webbrowser", ".", "Error", ":", "console", ".", "exit", "(", "'Unable to open a web browser, try accessing this URL manually instead:\\n{0}'", ".", "format", "(", "url", ")", ")" ]
opens a web browser to allow the user to grant livestreamer access to their twitch account .
train
true
44,335
def meta_kw_extractor(index, msg_mid, msg, msg_size, msg_ts, **kwargs): if (msg_size <= 0): return [] return [('%s:ln2sz' % int(math.log(msg_size, 2)))]
[ "def", "meta_kw_extractor", "(", "index", ",", "msg_mid", ",", "msg", ",", "msg_size", ",", "msg_ts", ",", "**", "kwargs", ")", ":", "if", "(", "msg_size", "<=", "0", ")", ":", "return", "[", "]", "return", "[", "(", "'%s:ln2sz'", "%", "int", "(", "math", ".", "log", "(", "msg_size", ",", "2", ")", ")", ")", "]" ]
create a search term with the floored log2 size of the message .
train
false
44,337
def libvlc_video_set_format(mp, chroma, width, height, pitch): f = (_Cfunctions.get('libvlc_video_set_format', None) or _Cfunction('libvlc_video_set_format', ((1,), (1,), (1,), (1,), (1,)), None, None, MediaPlayer, ctypes.c_char_p, ctypes.c_uint, ctypes.c_uint, ctypes.c_uint)) return f(mp, chroma, width, height, pitch)
[ "def", "libvlc_video_set_format", "(", "mp", ",", "chroma", ",", "width", ",", "height", ",", "pitch", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_video_set_format'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_video_set_format'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "ctypes", ".", "c_char_p", ",", "ctypes", ".", "c_uint", ",", "ctypes", ".", "c_uint", ",", "ctypes", ".", "c_uint", ")", ")", "return", "f", "(", "mp", ",", "chroma", ",", "width", ",", "height", ",", "pitch", ")" ]
set decoded video chroma and dimensions .
train
true
44,338
def test_sp1(): sp = SequencePattern('maxmin_dup') y = sp.generate_output_sequence(range(10)) assert all((y == np.array([9, 0, 2, 3, 4, 5, 6, 7, 8, 9]))) sp = SequencePattern('sorted') y = sp.generate_output_sequence([5, 6, 1, 2, 9]) assert all((y == np.array([1, 2, 5, 6, 9]))) sp = SequencePattern('reversed') y = sp.generate_output_sequence(range(10)) assert all((y == np.array([9, 8, 7, 6, 5, 4, 3, 2, 1, 0])))
[ "def", "test_sp1", "(", ")", ":", "sp", "=", "SequencePattern", "(", "'maxmin_dup'", ")", "y", "=", "sp", ".", "generate_output_sequence", "(", "range", "(", "10", ")", ")", "assert", "all", "(", "(", "y", "==", "np", ".", "array", "(", "[", "9", ",", "0", ",", "2", ",", "3", ",", "4", ",", "5", ",", "6", ",", "7", ",", "8", ",", "9", "]", ")", ")", ")", "sp", "=", "SequencePattern", "(", "'sorted'", ")", "y", "=", "sp", ".", "generate_output_sequence", "(", "[", "5", ",", "6", ",", "1", ",", "2", ",", "9", "]", ")", "assert", "all", "(", "(", "y", "==", "np", ".", "array", "(", "[", "1", ",", "2", ",", "5", ",", "6", ",", "9", "]", ")", ")", ")", "sp", "=", "SequencePattern", "(", "'reversed'", ")", "y", "=", "sp", ".", "generate_output_sequence", "(", "range", "(", "10", ")", ")", "assert", "all", "(", "(", "y", "==", "np", ".", "array", "(", "[", "9", ",", "8", ",", "7", ",", "6", ",", "5", ",", "4", ",", "3", ",", "2", ",", "1", ",", "0", "]", ")", ")", ")" ]
test two different sequencepattern instances .
train
false
44,340
def compute_workload(num_cores, num_flows, spread): sigma = fsum(spread[0:num_cores]) workload = [trunc(((num_flows * x) / sigma)) for x in spread[0:num_cores]] while (sum(workload) < num_flows): finish = (workload[0] / spread[0]) i = 0 for x in range(1, num_cores): t = (workload[x] / spread[x]) if (t < finish): finish = t i = x workload[i] += 1 return workload
[ "def", "compute_workload", "(", "num_cores", ",", "num_flows", ",", "spread", ")", ":", "sigma", "=", "fsum", "(", "spread", "[", "0", ":", "num_cores", "]", ")", "workload", "=", "[", "trunc", "(", "(", "(", "num_flows", "*", "x", ")", "/", "sigma", ")", ")", "for", "x", "in", "spread", "[", "0", ":", "num_cores", "]", "]", "while", "(", "sum", "(", "workload", ")", "<", "num_flows", ")", ":", "finish", "=", "(", "workload", "[", "0", "]", "/", "spread", "[", "0", "]", ")", "i", "=", "0", "for", "x", "in", "range", "(", "1", ",", "num_cores", ")", ":", "t", "=", "(", "workload", "[", "x", "]", "/", "spread", "[", "x", "]", ")", "if", "(", "t", "<", "finish", ")", ":", "finish", "=", "t", "i", "=", "x", "workload", "[", "i", "]", "+=", "1", "return", "workload" ]
compute workload for each individual worker num_flows: total number of flows to be processed num_cores: total number of workers available for processing the flows spread: relative performance of the each worker .
train
false
44,341
def kvp(v, z, n=1): if ((not isinstance(n, int)) or (n < 0)): raise ValueError('n must be a non-negative integer.') if (n == 0): return kv(v, z) else: return (((-1) ** n) * _bessel_diff_formula(v, z, n, kv, 1))
[ "def", "kvp", "(", "v", ",", "z", ",", "n", "=", "1", ")", ":", "if", "(", "(", "not", "isinstance", "(", "n", ",", "int", ")", ")", "or", "(", "n", "<", "0", ")", ")", ":", "raise", "ValueError", "(", "'n must be a non-negative integer.'", ")", "if", "(", "n", "==", "0", ")", ":", "return", "kv", "(", "v", ",", "z", ")", "else", ":", "return", "(", "(", "(", "-", "1", ")", "**", "n", ")", "*", "_bessel_diff_formula", "(", "v", ",", "z", ",", "n", ",", "kv", ",", "1", ")", ")" ]
compute nth derivative of real-order modified bessel function kv(z) kv(z) is the modified bessel function of the second kind .
train
false
44,342
def get_token_status(token, serializer, max_age=None, return_data=False): serializer = getattr(_security, (serializer + '_serializer')) max_age = get_max_age(max_age) (user, data) = (None, None) (expired, invalid) = (False, False) try: data = serializer.loads(token, max_age=max_age) except SignatureExpired: (d, data) = serializer.loads_unsafe(token) expired = True except (BadSignature, TypeError, ValueError): invalid = True if data: user = _datastore.find_user(id=data[0]) expired = (expired and (user is not None)) if return_data: return (expired, invalid, user, data) else: return (expired, invalid, user)
[ "def", "get_token_status", "(", "token", ",", "serializer", ",", "max_age", "=", "None", ",", "return_data", "=", "False", ")", ":", "serializer", "=", "getattr", "(", "_security", ",", "(", "serializer", "+", "'_serializer'", ")", ")", "max_age", "=", "get_max_age", "(", "max_age", ")", "(", "user", ",", "data", ")", "=", "(", "None", ",", "None", ")", "(", "expired", ",", "invalid", ")", "=", "(", "False", ",", "False", ")", "try", ":", "data", "=", "serializer", ".", "loads", "(", "token", ",", "max_age", "=", "max_age", ")", "except", "SignatureExpired", ":", "(", "d", ",", "data", ")", "=", "serializer", ".", "loads_unsafe", "(", "token", ")", "expired", "=", "True", "except", "(", "BadSignature", ",", "TypeError", ",", "ValueError", ")", ":", "invalid", "=", "True", "if", "data", ":", "user", "=", "_datastore", ".", "find_user", "(", "id", "=", "data", "[", "0", "]", ")", "expired", "=", "(", "expired", "and", "(", "user", "is", "not", "None", ")", ")", "if", "return_data", ":", "return", "(", "expired", ",", "invalid", ",", "user", ",", "data", ")", "else", ":", "return", "(", "expired", ",", "invalid", ",", "user", ")" ]
get the status of a token .
train
true
44,343
def d(message): print_log(message, BLUE)
[ "def", "d", "(", "message", ")", ":", "print_log", "(", "message", ",", "BLUE", ")" ]
print a debug log message .
train
false
44,344
def check_support(user_obj, obj): obj_support = check_object_support(obj) (user_support, user_obj) = check_user_support(user_obj) return ((obj_support and user_support), user_obj)
[ "def", "check_support", "(", "user_obj", ",", "obj", ")", ":", "obj_support", "=", "check_object_support", "(", "obj", ")", "(", "user_support", ",", "user_obj", ")", "=", "check_user_support", "(", "user_obj", ")", "return", "(", "(", "obj_support", "and", "user_support", ")", ",", "user_obj", ")" ]
combination of check_object_support and check_user_support .
train
false
44,346
def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): if (alpha < 0): raise ValueError('alpha must be positive') random.seed(seed) G = nx.empty_graph(n, create_using=nx.MultiDiGraph()) weights = Counter({v: alpha for v in G}) for i in range((k * n)): u = random.choice([v for (v, d) in G.out_degree() if (d < k)]) if (not self_loops): adjustment = Counter({u: weights[u]}) else: adjustment = Counter() v = weighted_choice((weights - adjustment)) G.add_edge(u, v) weights[v] += 1 G.name = 'random_k_out_graph({0}, {1}, {2})'.format(n, k, alpha) return G
[ "def", "random_k_out_graph", "(", "n", ",", "k", ",", "alpha", ",", "self_loops", "=", "True", ",", "seed", "=", "None", ")", ":", "if", "(", "alpha", "<", "0", ")", ":", "raise", "ValueError", "(", "'alpha must be positive'", ")", "random", ".", "seed", "(", "seed", ")", "G", "=", "nx", ".", "empty_graph", "(", "n", ",", "create_using", "=", "nx", ".", "MultiDiGraph", "(", ")", ")", "weights", "=", "Counter", "(", "{", "v", ":", "alpha", "for", "v", "in", "G", "}", ")", "for", "i", "in", "range", "(", "(", "k", "*", "n", ")", ")", ":", "u", "=", "random", ".", "choice", "(", "[", "v", "for", "(", "v", ",", "d", ")", "in", "G", ".", "out_degree", "(", ")", "if", "(", "d", "<", "k", ")", "]", ")", "if", "(", "not", "self_loops", ")", ":", "adjustment", "=", "Counter", "(", "{", "u", ":", "weights", "[", "u", "]", "}", ")", "else", ":", "adjustment", "=", "Counter", "(", ")", "v", "=", "weighted_choice", "(", "(", "weights", "-", "adjustment", ")", ")", "G", ".", "add_edge", "(", "u", ",", "v", ")", "weights", "[", "v", "]", "+=", "1", "G", ".", "name", "=", "'random_k_out_graph({0}, {1}, {2})'", ".", "format", "(", "n", ",", "k", ",", "alpha", ")", "return", "G" ]
returns a random k-out graph with preferential attachment .
train
false
44,348
def test_saving_state_incl_entities(hass_recorder): hass = hass_recorder({'include': {'entities': 'test2.recorder'}}) states = _add_entities(hass, ['test.recorder', 'test2.recorder']) assert (len(states) == 1) assert (hass.states.get('test2.recorder') == states[0])
[ "def", "test_saving_state_incl_entities", "(", "hass_recorder", ")", ":", "hass", "=", "hass_recorder", "(", "{", "'include'", ":", "{", "'entities'", ":", "'test2.recorder'", "}", "}", ")", "states", "=", "_add_entities", "(", "hass", ",", "[", "'test.recorder'", ",", "'test2.recorder'", "]", ")", "assert", "(", "len", "(", "states", ")", "==", "1", ")", "assert", "(", "hass", ".", "states", ".", "get", "(", "'test2.recorder'", ")", "==", "states", "[", "0", "]", ")" ]
test saving and restoring a state .
train
false
44,349
def FindMissingInitFiles(cgi_path, module_fullname, isfile=os.path.isfile): missing_init_files = [] if cgi_path.endswith('.py'): module_base = os.path.dirname(cgi_path) else: module_base = cgi_path depth_count = module_fullname.count('.') if (cgi_path.endswith('__init__.py') or (not cgi_path.endswith('.py'))): depth_count += 1 for index in xrange(depth_count): current_init_file = os.path.abspath(os.path.join(module_base, '__init__.py')) if (not isfile(current_init_file)): missing_init_files.append(current_init_file) module_base = os.path.abspath(os.path.join(module_base, os.pardir)) return missing_init_files
[ "def", "FindMissingInitFiles", "(", "cgi_path", ",", "module_fullname", ",", "isfile", "=", "os", ".", "path", ".", "isfile", ")", ":", "missing_init_files", "=", "[", "]", "if", "cgi_path", ".", "endswith", "(", "'.py'", ")", ":", "module_base", "=", "os", ".", "path", ".", "dirname", "(", "cgi_path", ")", "else", ":", "module_base", "=", "cgi_path", "depth_count", "=", "module_fullname", ".", "count", "(", "'.'", ")", "if", "(", "cgi_path", ".", "endswith", "(", "'__init__.py'", ")", "or", "(", "not", "cgi_path", ".", "endswith", "(", "'.py'", ")", ")", ")", ":", "depth_count", "+=", "1", "for", "index", "in", "xrange", "(", "depth_count", ")", ":", "current_init_file", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "module_base", ",", "'__init__.py'", ")", ")", "if", "(", "not", "isfile", "(", "current_init_file", ")", ")", ":", "missing_init_files", ".", "append", "(", "current_init_file", ")", "module_base", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "module_base", ",", "os", ".", "pardir", ")", ")", "return", "missing_init_files" ]
determines which __init__ .
train
false
44,350
def format_output(count_data, total, inaccessible_filepaths, suppress_errors=False): lines = [''] count_data.sort() for c in count_data: if (c[0][0] > 0): lines.append(('%d : %s (Sequence lengths (mean +/- std): %1.4f +/- %1.4f)' % (c[0][0], c[1], c[0][1], c[0][2]))) else: lines.append(('%d : %s' % (c[0][0], c[1]))) lines.append(('%d : Total' % total)) if (inaccessible_filepaths and (not suppress_errors)): lines.append('') lines.append('Some files were not accessible. Do they exist? Do you have read permission?') for inaccessible_filepath in inaccessible_filepaths: lines.append((' %s' % inaccessible_filepath)) lines.append('') return '\n'.join(lines)
[ "def", "format_output", "(", "count_data", ",", "total", ",", "inaccessible_filepaths", ",", "suppress_errors", "=", "False", ")", ":", "lines", "=", "[", "''", "]", "count_data", ".", "sort", "(", ")", "for", "c", "in", "count_data", ":", "if", "(", "c", "[", "0", "]", "[", "0", "]", ">", "0", ")", ":", "lines", ".", "append", "(", "(", "'%d : %s (Sequence lengths (mean +/- std): %1.4f +/- %1.4f)'", "%", "(", "c", "[", "0", "]", "[", "0", "]", ",", "c", "[", "1", "]", ",", "c", "[", "0", "]", "[", "1", "]", ",", "c", "[", "0", "]", "[", "2", "]", ")", ")", ")", "else", ":", "lines", ".", "append", "(", "(", "'%d : %s'", "%", "(", "c", "[", "0", "]", "[", "0", "]", ",", "c", "[", "1", "]", ")", ")", ")", "lines", ".", "append", "(", "(", "'%d : Total'", "%", "total", ")", ")", "if", "(", "inaccessible_filepaths", "and", "(", "not", "suppress_errors", ")", ")", ":", "lines", ".", "append", "(", "''", ")", "lines", ".", "append", "(", "'Some files were not accessible. Do they exist? Do you have read permission?'", ")", "for", "inaccessible_filepath", "in", "inaccessible_filepaths", ":", "lines", ".", "append", "(", "(", "' %s'", "%", "inaccessible_filepath", ")", ")", "lines", ".", "append", "(", "''", ")", "return", "'\\n'", ".", "join", "(", "lines", ")" ]
output formatter .
train
false
44,351
def str_to_seconds(s): return timedelta_to_integral_seconds((pd.Timestamp(s, tz='UTC') - EPOCH))
[ "def", "str_to_seconds", "(", "s", ")", ":", "return", "timedelta_to_integral_seconds", "(", "(", "pd", ".", "Timestamp", "(", "s", ",", "tz", "=", "'UTC'", ")", "-", "EPOCH", ")", ")" ]
convert a pandas-intelligible string to seconds since utc .
train
false
44,353
def _SetRangeRequestNotSatisfiable(response, blob_size): response.status_code = 416 response.status_message = 'Requested Range Not Satisfiable' response.body = cStringIO.StringIO('') response.headers['Content-Length'] = '0' response.headers['Content-Range'] = ('*/%d' % blob_size) del response.headers['Content-Type']
[ "def", "_SetRangeRequestNotSatisfiable", "(", "response", ",", "blob_size", ")", ":", "response", ".", "status_code", "=", "416", "response", ".", "status_message", "=", "'Requested Range Not Satisfiable'", "response", ".", "body", "=", "cStringIO", ".", "StringIO", "(", "''", ")", "response", ".", "headers", "[", "'Content-Length'", "]", "=", "'0'", "response", ".", "headers", "[", "'Content-Range'", "]", "=", "(", "'*/%d'", "%", "blob_size", ")", "del", "response", ".", "headers", "[", "'Content-Type'", "]" ]
short circuit response and return 416 error .
train
false
44,354
@nodes_or_number(0) def path_graph(n, create_using=None): (n_name, nodes) = n G = empty_graph(nodes, create_using) G.name = ('path_graph(%s)' % (n_name,)) G.add_edges_from(nx.utils.pairwise(nodes)) return G
[ "@", "nodes_or_number", "(", "0", ")", "def", "path_graph", "(", "n", ",", "create_using", "=", "None", ")", ":", "(", "n_name", ",", "nodes", ")", "=", "n", "G", "=", "empty_graph", "(", "nodes", ",", "create_using", ")", "G", ".", "name", "=", "(", "'path_graph(%s)'", "%", "(", "n_name", ",", ")", ")", "G", ".", "add_edges_from", "(", "nx", ".", "utils", ".", "pairwise", "(", "nodes", ")", ")", "return", "G" ]
return the path graph p_n of linearly connected nodes .
train
false
44,357
def set_curdoc(doc): _state.document = doc
[ "def", "set_curdoc", "(", "doc", ")", ":", "_state", ".", "document", "=", "doc" ]
configure the current document (returned by curdoc()) .
train
false
44,358
def check_job_edition_permission(authorize_get=False, exception_class=PopupException): def inner(view_func): def decorate(request, *args, **kwargs): if ('workflow' in kwargs): job_type = 'workflow' elif ('coordinator' in kwargs): job_type = 'coordinator' else: job_type = 'bundle' job = kwargs.get(job_type) if ((job is not None) and (not (authorize_get and (request.method == 'GET')))): Job.objects.can_edit_or_exception(request, job, exception_class=exception_class) return view_func(request, *args, **kwargs) return wraps(view_func)(decorate) return inner
[ "def", "check_job_edition_permission", "(", "authorize_get", "=", "False", ",", "exception_class", "=", "PopupException", ")", ":", "def", "inner", "(", "view_func", ")", ":", "def", "decorate", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "'workflow'", "in", "kwargs", ")", ":", "job_type", "=", "'workflow'", "elif", "(", "'coordinator'", "in", "kwargs", ")", ":", "job_type", "=", "'coordinator'", "else", ":", "job_type", "=", "'bundle'", "job", "=", "kwargs", ".", "get", "(", "job_type", ")", "if", "(", "(", "job", "is", "not", "None", ")", "and", "(", "not", "(", "authorize_get", "and", "(", "request", ".", "method", "==", "'GET'", ")", ")", ")", ")", ":", "Job", ".", "objects", ".", "can_edit_or_exception", "(", "request", ",", "job", ",", "exception_class", "=", "exception_class", ")", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wraps", "(", "view_func", ")", "(", "decorate", ")", "return", "inner" ]
decorator ensuring that the user has the permissions to modify a workflow or coordinator .
train
false
44,359
def test_rus_bad_ratio(): ratio = (-1.0) rus = RandomUnderSampler(ratio=ratio) assert_raises(ValueError, rus.fit, X, Y) ratio = 100.0 rus = RandomUnderSampler(ratio=ratio) assert_raises(ValueError, rus.fit, X, Y) ratio = 'rnd' rus = RandomUnderSampler(ratio=ratio) assert_raises(ValueError, rus.fit, X, Y) ratio = [0.5, 0.5] rus = RandomUnderSampler(ratio=ratio) assert_raises(ValueError, rus.fit, X, Y)
[ "def", "test_rus_bad_ratio", "(", ")", ":", "ratio", "=", "(", "-", "1.0", ")", "rus", "=", "RandomUnderSampler", "(", "ratio", "=", "ratio", ")", "assert_raises", "(", "ValueError", ",", "rus", ".", "fit", ",", "X", ",", "Y", ")", "ratio", "=", "100.0", "rus", "=", "RandomUnderSampler", "(", "ratio", "=", "ratio", ")", "assert_raises", "(", "ValueError", ",", "rus", ".", "fit", ",", "X", ",", "Y", ")", "ratio", "=", "'rnd'", "rus", "=", "RandomUnderSampler", "(", "ratio", "=", "ratio", ")", "assert_raises", "(", "ValueError", ",", "rus", ".", "fit", ",", "X", ",", "Y", ")", "ratio", "=", "[", "0.5", ",", "0.5", "]", "rus", "=", "RandomUnderSampler", "(", "ratio", "=", "ratio", ")", "assert_raises", "(", "ValueError", ",", "rus", ".", "fit", ",", "X", ",", "Y", ")" ]
test either if an error is raised with a wrong decimal value for the ratio .
train
false
44,362
def connection_info(): from matplotlib._pylab_helpers import Gcf result = [] for manager in Gcf.get_all_fig_managers(): fig = manager.canvas.figure result.append('{0} - {0}'.format((fig.get_label() or 'Figure {0}'.format(manager.num)), manager.web_sockets)) if (not is_interactive()): result.append('Figures pending show: {0}'.format(len(Gcf._activeQue))) return '\n'.join(result)
[ "def", "connection_info", "(", ")", ":", "from", "matplotlib", ".", "_pylab_helpers", "import", "Gcf", "result", "=", "[", "]", "for", "manager", "in", "Gcf", ".", "get_all_fig_managers", "(", ")", ":", "fig", "=", "manager", ".", "canvas", ".", "figure", "result", ".", "append", "(", "'{0} - {0}'", ".", "format", "(", "(", "fig", ".", "get_label", "(", ")", "or", "'Figure {0}'", ".", "format", "(", "manager", ".", "num", ")", ")", ",", "manager", ".", "web_sockets", ")", ")", "if", "(", "not", "is_interactive", "(", ")", ")", ":", "result", ".", "append", "(", "'Figures pending show: {0}'", ".", "format", "(", "len", "(", "Gcf", ".", "_activeQue", ")", ")", ")", "return", "'\\n'", ".", "join", "(", "result", ")" ]
return a string showing the figure and connection status for the backend .
train
false
44,363
@commands(u'comment') def take_comment(bot, trigger): if (not trigger.sender.is_nick()): return if (not trigger.group(4)): bot.say(u'Usage: .comment <#channel> <comment to add>') return (target, message) = trigger.group(2).split(None, 1) target = Identifier(target) if (not ismeetingrunning(target)): bot.say(u"There's not currently a meeting in that channel.") else: meetings_dict[trigger.group(3)][u'comments'].append((trigger.nick, message)) bot.say(u'Your comment has been recorded. It will be shown when the chairs tell me to show the comments.') bot.msg(meetings_dict[trigger.group(3)][u'head'], u'A new comment has been recorded.')
[ "@", "commands", "(", "u'comment'", ")", "def", "take_comment", "(", "bot", ",", "trigger", ")", ":", "if", "(", "not", "trigger", ".", "sender", ".", "is_nick", "(", ")", ")", ":", "return", "if", "(", "not", "trigger", ".", "group", "(", "4", ")", ")", ":", "bot", ".", "say", "(", "u'Usage: .comment <#channel> <comment to add>'", ")", "return", "(", "target", ",", "message", ")", "=", "trigger", ".", "group", "(", "2", ")", ".", "split", "(", "None", ",", "1", ")", "target", "=", "Identifier", "(", "target", ")", "if", "(", "not", "ismeetingrunning", "(", "target", ")", ")", ":", "bot", ".", "say", "(", "u\"There's not currently a meeting in that channel.\"", ")", "else", ":", "meetings_dict", "[", "trigger", ".", "group", "(", "3", ")", "]", "[", "u'comments'", "]", ".", "append", "(", "(", "trigger", ".", "nick", ",", "message", ")", ")", "bot", ".", "say", "(", "u'Your comment has been recorded. It will be shown when the chairs tell me to show the comments.'", ")", "bot", ".", "msg", "(", "meetings_dict", "[", "trigger", ".", "group", "(", "3", ")", "]", "[", "u'head'", "]", ",", "u'A new comment has been recorded.'", ")" ]
log a comment .
train
false
44,364
@instrumented_task(name='sentry.tasks.process_buffer.process_pending') def process_pending(): from sentry import app lock = app.locks.get('buffer:process_pending', duration=60) try: with lock.acquire(): app.buffer.process_pending() except UnableToAcquireLock as error: logger.warning('process_pending.fail', extra={'error': error})
[ "@", "instrumented_task", "(", "name", "=", "'sentry.tasks.process_buffer.process_pending'", ")", "def", "process_pending", "(", ")", ":", "from", "sentry", "import", "app", "lock", "=", "app", ".", "locks", ".", "get", "(", "'buffer:process_pending'", ",", "duration", "=", "60", ")", "try", ":", "with", "lock", ".", "acquire", "(", ")", ":", "app", ".", "buffer", ".", "process_pending", "(", ")", "except", "UnableToAcquireLock", "as", "error", ":", "logger", ".", "warning", "(", "'process_pending.fail'", ",", "extra", "=", "{", "'error'", ":", "error", "}", ")" ]
process pending buffers .
train
false
44,365
def format_event(event): to_delete = ('id', 'created_at', 'updated_at', 'deleted_at', 'deleted', 'action_id') for key in to_delete: if (key in event): del event[key] if ('start_time' in event): event['start_time'] = str(event['start_time'].replace(tzinfo=None)) if ('finish_time' in event): event['finish_time'] = str(event['finish_time'].replace(tzinfo=None)) return event
[ "def", "format_event", "(", "event", ")", ":", "to_delete", "=", "(", "'id'", ",", "'created_at'", ",", "'updated_at'", ",", "'deleted_at'", ",", "'deleted'", ",", "'action_id'", ")", "for", "key", "in", "to_delete", ":", "if", "(", "key", "in", "event", ")", ":", "del", "event", "[", "key", "]", "if", "(", "'start_time'", "in", "event", ")", ":", "event", "[", "'start_time'", "]", "=", "str", "(", "event", "[", "'start_time'", "]", ".", "replace", "(", "tzinfo", "=", "None", ")", ")", "if", "(", "'finish_time'", "in", "event", ")", ":", "event", "[", "'finish_time'", "]", "=", "str", "(", "event", "[", "'finish_time'", "]", ".", "replace", "(", "tzinfo", "=", "None", ")", ")", "return", "event" ]
remove keys that arent serialized .
train
false
44,366
def delete_probes(probes, test=False, commit=True): return __salt__['net.load_template']('delete_probes', probes=probes, test=test, commit=commit)
[ "def", "delete_probes", "(", "probes", ",", "test", "=", "False", ",", "commit", "=", "True", ")", ":", "return", "__salt__", "[", "'net.load_template'", "]", "(", "'delete_probes'", ",", "probes", "=", "probes", ",", "test", "=", "test", ",", "commit", "=", "commit", ")" ]
removes rpm/sla probes from the network device .
train
false
44,367
def GetFunctionName(f): try: name = f.__name__ if hasattr(f, 'im_class'): name = ((f.im_class.__name__ + '.') + name) return name except: return ''
[ "def", "GetFunctionName", "(", "f", ")", ":", "try", ":", "name", "=", "f", ".", "__name__", "if", "hasattr", "(", "f", ",", "'im_class'", ")", ":", "name", "=", "(", "(", "f", ".", "im_class", ".", "__name__", "+", "'.'", ")", "+", "name", ")", "return", "name", "except", ":", "return", "''" ]
creates a formatted function string for display .
train
false
44,369
def arithmetic_mean(confirmed_measures): return np.mean(confirmed_measures)
[ "def", "arithmetic_mean", "(", "confirmed_measures", ")", ":", "return", "np", ".", "mean", "(", "confirmed_measures", ")" ]
this functoin performs the arithmetic mean aggregation on the output obtained from the confirmation measure module .
train
false
44,370
def _build_path_iterator(path, namespaces): if (namespaces and ((None in namespaces) or ('' in namespaces))): raise ValueError('empty namespace prefix is not supported in ElementPath') if (path[(-1):] == '/'): path += '*' cache_key = (path, ((namespaces and tuple(sorted(namespaces.items()))) or None)) try: return _cache[cache_key] except KeyError: pass if (len(_cache) > 100): _cache.clear() if (path[:1] == '/'): raise SyntaxError('cannot use absolute path on element') stream = iter(xpath_tokenizer(path, namespaces)) try: _next = stream.next except AttributeError: _next = stream.__next__ try: token = _next() except StopIteration: raise SyntaxError('empty path expression') selector = [] while 1: try: selector.append(ops[token[0]](_next, token)) except StopIteration: raise SyntaxError('invalid path') try: token = _next() if (token[0] == '/'): token = _next() except StopIteration: break _cache[cache_key] = selector return selector
[ "def", "_build_path_iterator", "(", "path", ",", "namespaces", ")", ":", "if", "(", "namespaces", "and", "(", "(", "None", "in", "namespaces", ")", "or", "(", "''", "in", "namespaces", ")", ")", ")", ":", "raise", "ValueError", "(", "'empty namespace prefix is not supported in ElementPath'", ")", "if", "(", "path", "[", "(", "-", "1", ")", ":", "]", "==", "'/'", ")", ":", "path", "+=", "'*'", "cache_key", "=", "(", "path", ",", "(", "(", "namespaces", "and", "tuple", "(", "sorted", "(", "namespaces", ".", "items", "(", ")", ")", ")", ")", "or", "None", ")", ")", "try", ":", "return", "_cache", "[", "cache_key", "]", "except", "KeyError", ":", "pass", "if", "(", "len", "(", "_cache", ")", ">", "100", ")", ":", "_cache", ".", "clear", "(", ")", "if", "(", "path", "[", ":", "1", "]", "==", "'/'", ")", ":", "raise", "SyntaxError", "(", "'cannot use absolute path on element'", ")", "stream", "=", "iter", "(", "xpath_tokenizer", "(", "path", ",", "namespaces", ")", ")", "try", ":", "_next", "=", "stream", ".", "next", "except", "AttributeError", ":", "_next", "=", "stream", ".", "__next__", "try", ":", "token", "=", "_next", "(", ")", "except", "StopIteration", ":", "raise", "SyntaxError", "(", "'empty path expression'", ")", "selector", "=", "[", "]", "while", "1", ":", "try", ":", "selector", ".", "append", "(", "ops", "[", "token", "[", "0", "]", "]", "(", "_next", ",", "token", ")", ")", "except", "StopIteration", ":", "raise", "SyntaxError", "(", "'invalid path'", ")", "try", ":", "token", "=", "_next", "(", ")", "if", "(", "token", "[", "0", "]", "==", "'/'", ")", ":", "token", "=", "_next", "(", ")", "except", "StopIteration", ":", "break", "_cache", "[", "cache_key", "]", "=", "selector", "return", "selector" ]
compile selector pattern .
train
true
44,371
def hash_dist(x): return (int(sha1(bytes(x)).hexdigest(), base=16) & 4294967295)
[ "def", "hash_dist", "(", "x", ")", ":", "return", "(", "int", "(", "sha1", "(", "bytes", "(", "x", ")", ")", ".", "hexdigest", "(", ")", ",", "base", "=", "16", ")", "&", "4294967295", ")" ]
for a given distribution .
train
false
44,372
def migration_create(context, values): return IMPL.migration_create(context, values)
[ "def", "migration_create", "(", "context", ",", "values", ")", ":", "return", "IMPL", ".", "migration_create", "(", "context", ",", "values", ")" ]
create a migration record .
train
false
44,374
def setcopyright(): __builtin__.copyright = _Printer('copyright', sys.copyright) if (sys.platform[:4] == 'java'): __builtin__.credits = _Printer('credits', 'Jython is maintained by the Jython developers (www.jython.org).') elif (sys.platform == 'cli'): __builtin__.credits = _Printer('credits', 'IronPython is maintained by the IronPython developers (www.ironpython.net).') else: __builtin__.credits = _Printer('credits', ' Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands\n for supporting Python development. See www.python.org for more information.') here = os.path.dirname(os.__file__) __builtin__.license = _Printer('license', 'See https://www.python.org/psf/license/', ['LICENSE.txt', 'LICENSE'], [os.path.join(here, os.pardir), here, os.curdir])
[ "def", "setcopyright", "(", ")", ":", "__builtin__", ".", "copyright", "=", "_Printer", "(", "'copyright'", ",", "sys", ".", "copyright", ")", "if", "(", "sys", ".", "platform", "[", ":", "4", "]", "==", "'java'", ")", ":", "__builtin__", ".", "credits", "=", "_Printer", "(", "'credits'", ",", "'Jython is maintained by the Jython developers (www.jython.org).'", ")", "elif", "(", "sys", ".", "platform", "==", "'cli'", ")", ":", "__builtin__", ".", "credits", "=", "_Printer", "(", "'credits'", ",", "'IronPython is maintained by the IronPython developers (www.ironpython.net).'", ")", "else", ":", "__builtin__", ".", "credits", "=", "_Printer", "(", "'credits'", ",", "' Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands\\n for supporting Python development. See www.python.org for more information.'", ")", "here", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "__file__", ")", "__builtin__", ".", "license", "=", "_Printer", "(", "'license'", ",", "'See https://www.python.org/psf/license/'", ",", "[", "'LICENSE.txt'", ",", "'LICENSE'", "]", ",", "[", "os", ".", "path", ".", "join", "(", "here", ",", "os", ".", "pardir", ")", ",", "here", ",", "os", ".", "curdir", "]", ")" ]
set copyright and credits in __builtin__ .
train
false
44,375
@statfunc def mc_error(x, batches=5): if (x.ndim > 1): dims = np.shape(x) trace = np.transpose([t.ravel() for t in x]) return np.reshape([mc_error(t, batches) for t in trace], dims[1:]) else: if (batches == 1): return (np.std(x) / np.sqrt(len(x))) try: batched_traces = np.resize(x, (batches, int((len(x) / batches)))) except ValueError: resid = (len(x) % batches) new_shape = (batches, ((len(x) - resid) / batches)) batched_traces = np.resize(x[:(- resid)], new_shape) means = np.mean(batched_traces, 1) return (np.std(means) / np.sqrt(batches))
[ "@", "statfunc", "def", "mc_error", "(", "x", ",", "batches", "=", "5", ")", ":", "if", "(", "x", ".", "ndim", ">", "1", ")", ":", "dims", "=", "np", ".", "shape", "(", "x", ")", "trace", "=", "np", ".", "transpose", "(", "[", "t", ".", "ravel", "(", ")", "for", "t", "in", "x", "]", ")", "return", "np", ".", "reshape", "(", "[", "mc_error", "(", "t", ",", "batches", ")", "for", "t", "in", "trace", "]", ",", "dims", "[", "1", ":", "]", ")", "else", ":", "if", "(", "batches", "==", "1", ")", ":", "return", "(", "np", ".", "std", "(", "x", ")", "/", "np", ".", "sqrt", "(", "len", "(", "x", ")", ")", ")", "try", ":", "batched_traces", "=", "np", ".", "resize", "(", "x", ",", "(", "batches", ",", "int", "(", "(", "len", "(", "x", ")", "/", "batches", ")", ")", ")", ")", "except", "ValueError", ":", "resid", "=", "(", "len", "(", "x", ")", "%", "batches", ")", "new_shape", "=", "(", "batches", ",", "(", "(", "len", "(", "x", ")", "-", "resid", ")", "/", "batches", ")", ")", "batched_traces", "=", "np", ".", "resize", "(", "x", "[", ":", "(", "-", "resid", ")", "]", ",", "new_shape", ")", "means", "=", "np", ".", "mean", "(", "batched_traces", ",", "1", ")", "return", "(", "np", ".", "std", "(", "means", ")", "/", "np", ".", "sqrt", "(", "batches", ")", ")" ]
calculates the simulation standard error .
train
false
44,376
def _SignedVarintSize(value): if (value < 0): return 10 if (value <= 127): return 1 if (value <= 16383): return 2 if (value <= 2097151): return 3 if (value <= 268435455): return 4 if (value <= 34359738367): return 5 if (value <= 4398046511103): return 6 if (value <= 562949953421311): return 7 if (value <= 72057594037927935): return 8 if (value <= 9223372036854775807): return 9 return 10
[ "def", "_SignedVarintSize", "(", "value", ")", ":", "if", "(", "value", "<", "0", ")", ":", "return", "10", "if", "(", "value", "<=", "127", ")", ":", "return", "1", "if", "(", "value", "<=", "16383", ")", ":", "return", "2", "if", "(", "value", "<=", "2097151", ")", ":", "return", "3", "if", "(", "value", "<=", "268435455", ")", ":", "return", "4", "if", "(", "value", "<=", "34359738367", ")", ":", "return", "5", "if", "(", "value", "<=", "4398046511103", ")", ":", "return", "6", "if", "(", "value", "<=", "562949953421311", ")", ":", "return", "7", "if", "(", "value", "<=", "72057594037927935", ")", ":", "return", "8", "if", "(", "value", "<=", "9223372036854775807", ")", ":", "return", "9", "return", "10" ]
compute the size of a signed varint value .
train
true
44,377
def calc_gc_skew(sequence): g = (sequence.count('G') + sequence.count('g')) c = (sequence.count('C') + sequence.count('c')) if ((g + c) == 0): return 0.0 else: return ((g - c) / float((g + c)))
[ "def", "calc_gc_skew", "(", "sequence", ")", ":", "g", "=", "(", "sequence", ".", "count", "(", "'G'", ")", "+", "sequence", ".", "count", "(", "'g'", ")", ")", "c", "=", "(", "sequence", ".", "count", "(", "'C'", ")", "+", "sequence", ".", "count", "(", "'c'", ")", ")", "if", "(", "(", "g", "+", "c", ")", "==", "0", ")", ":", "return", "0.0", "else", ":", "return", "(", "(", "g", "-", "c", ")", "/", "float", "(", "(", "g", "+", "c", ")", ")", ")" ]
returns the / gc skew in a passed sequence .
train
false
44,378
def raw_metadata(recipe_dir): meta_path = os.path.join(recipe_dir, 'meta.yaml') with open(meta_path, 'rb') as fi: data = fi.read() if ('{{' in data): data = render_jinja2(recipe_dir) meta = parse(data, None) return meta
[ "def", "raw_metadata", "(", "recipe_dir", ")", ":", "meta_path", "=", "os", ".", "path", ".", "join", "(", "recipe_dir", ",", "'meta.yaml'", ")", "with", "open", "(", "meta_path", ",", "'rb'", ")", "as", "fi", ":", "data", "=", "fi", ".", "read", "(", ")", "if", "(", "'{{'", "in", "data", ")", ":", "data", "=", "render_jinja2", "(", "recipe_dir", ")", "meta", "=", "parse", "(", "data", ",", "None", ")", "return", "meta" ]
evaluate conda template if needed and return raw metadata for supplied recipe directory .
train
false
44,380
def redirectme(): redirect(URL('hello3'))
[ "def", "redirectme", "(", ")", ":", "redirect", "(", "URL", "(", "'hello3'", ")", ")" ]
redirects to /{{=request .
train
false
44,381
def construct_instance(form, instance, fields=None, exclude=None): from django.db import models opts = instance._meta cleaned_data = form.cleaned_data file_field_list = [] for f in opts.fields: if ((not f.editable) or isinstance(f, models.AutoField) or (not (f.name in cleaned_data))): continue if ((fields is not None) and (f.name not in fields)): continue if (exclude and (f.name in exclude)): continue if isinstance(f, models.FileField): file_field_list.append(f) else: f.save_form_data(instance, cleaned_data[f.name]) for f in file_field_list: f.save_form_data(instance, cleaned_data[f.name]) return instance
[ "def", "construct_instance", "(", "form", ",", "instance", ",", "fields", "=", "None", ",", "exclude", "=", "None", ")", ":", "from", "django", ".", "db", "import", "models", "opts", "=", "instance", ".", "_meta", "cleaned_data", "=", "form", ".", "cleaned_data", "file_field_list", "=", "[", "]", "for", "f", "in", "opts", ".", "fields", ":", "if", "(", "(", "not", "f", ".", "editable", ")", "or", "isinstance", "(", "f", ",", "models", ".", "AutoField", ")", "or", "(", "not", "(", "f", ".", "name", "in", "cleaned_data", ")", ")", ")", ":", "continue", "if", "(", "(", "fields", "is", "not", "None", ")", "and", "(", "f", ".", "name", "not", "in", "fields", ")", ")", ":", "continue", "if", "(", "exclude", "and", "(", "f", ".", "name", "in", "exclude", ")", ")", ":", "continue", "if", "isinstance", "(", "f", ",", "models", ".", "FileField", ")", ":", "file_field_list", ".", "append", "(", "f", ")", "else", ":", "f", ".", "save_form_data", "(", "instance", ",", "cleaned_data", "[", "f", ".", "name", "]", ")", "for", "f", "in", "file_field_list", ":", "f", ".", "save_form_data", "(", "instance", ",", "cleaned_data", "[", "f", ".", "name", "]", ")", "return", "instance" ]
constructs and returns a model instance from the bound forms cleaned_data .
train
false
44,382
@register.filter(is_safe=True) @defaultfilters.stringfilter def shellfilter(value): replacements = {'\\': '\\\\', '`': '\\`', "'": "\\'", '"': '\\"'} for (search, repl) in replacements.items(): value = value.replace(search, repl) return safestring.mark_safe(value)
[ "@", "register", ".", "filter", "(", "is_safe", "=", "True", ")", "@", "defaultfilters", ".", "stringfilter", "def", "shellfilter", "(", "value", ")", ":", "replacements", "=", "{", "'\\\\'", ":", "'\\\\\\\\'", ",", "'`'", ":", "'\\\\`'", ",", "\"'\"", ":", "\"\\\\'\"", ",", "'\"'", ":", "'\\\\\"'", "}", "for", "(", "search", ",", "repl", ")", "in", "replacements", ".", "items", "(", ")", ":", "value", "=", "value", ".", "replace", "(", "search", ",", "repl", ")", "return", "safestring", ".", "mark_safe", "(", "value", ")" ]
replace html chars for shell usage .
train
false
44,383
def _merge_observation(accum_observation, observation): if (observation is None): return None elif (accum_observation is None): return observation accum_observation['vision'] = observation.get('vision') accum_observation['text'] = (accum_observation.get('text', []) + observation.get('text', [])) return accum_observation
[ "def", "_merge_observation", "(", "accum_observation", ",", "observation", ")", ":", "if", "(", "observation", "is", "None", ")", ":", "return", "None", "elif", "(", "accum_observation", "is", "None", ")", ":", "return", "observation", "accum_observation", "[", "'vision'", "]", "=", "observation", ".", "get", "(", "'vision'", ")", "accum_observation", "[", "'text'", "]", "=", "(", "accum_observation", ".", "get", "(", "'text'", ",", "[", "]", ")", "+", "observation", ".", "get", "(", "'text'", ",", "[", "]", ")", ")", "return", "accum_observation" ]
old visual observation is discarded .
train
true
44,384
def squared_error_ridge_gradient(x_i, y_i, beta, alpha): return vector_add(squared_error_gradient(x_i, y_i, beta), ridge_penalty_gradient(beta, alpha))
[ "def", "squared_error_ridge_gradient", "(", "x_i", ",", "y_i", ",", "beta", ",", "alpha", ")", ":", "return", "vector_add", "(", "squared_error_gradient", "(", "x_i", ",", "y_i", ",", "beta", ")", ",", "ridge_penalty_gradient", "(", "beta", ",", "alpha", ")", ")" ]
the gradient corresponding to the ith squared error term including the ridge penalty .
train
false
44,385
def _get_module_instance_for_task(course_id, student, module_descriptor, xmodule_instance_args=None, grade_bucket_type=None, course=None): field_data_cache = FieldDataCache.cache_for_descriptor_descendents(course_id, student, module_descriptor) student_data = KvsFieldData(DjangoKeyValueStore(field_data_cache)) request_info = (xmodule_instance_args.get('request_info', {}) if (xmodule_instance_args is not None) else {}) task_info = {'student': student.username, 'task_id': _get_task_id_from_xmodule_args(xmodule_instance_args)} def make_track_function(): '\n Make a tracking function that logs what happened.\n\n For insertion into ModuleSystem, and used by CapaModule, which will\n provide the event_type (as string) and event (as dict) as arguments.\n The request_info and task_info (and page) are provided here.\n ' return (lambda event_type, event: task_track(request_info, task_info, event_type, event, page='x_module_task')) xqueue_callback_url_prefix = (xmodule_instance_args.get('xqueue_callback_url_prefix', '') if (xmodule_instance_args is not None) else '') return get_module_for_descriptor_internal(user=student, descriptor=module_descriptor, student_data=student_data, course_id=course_id, track_function=make_track_function(), xqueue_callback_url_prefix=xqueue_callback_url_prefix, grade_bucket_type=grade_bucket_type, request_token=None, course=course)
[ "def", "_get_module_instance_for_task", "(", "course_id", ",", "student", ",", "module_descriptor", ",", "xmodule_instance_args", "=", "None", ",", "grade_bucket_type", "=", "None", ",", "course", "=", "None", ")", ":", "field_data_cache", "=", "FieldDataCache", ".", "cache_for_descriptor_descendents", "(", "course_id", ",", "student", ",", "module_descriptor", ")", "student_data", "=", "KvsFieldData", "(", "DjangoKeyValueStore", "(", "field_data_cache", ")", ")", "request_info", "=", "(", "xmodule_instance_args", ".", "get", "(", "'request_info'", ",", "{", "}", ")", "if", "(", "xmodule_instance_args", "is", "not", "None", ")", "else", "{", "}", ")", "task_info", "=", "{", "'student'", ":", "student", ".", "username", ",", "'task_id'", ":", "_get_task_id_from_xmodule_args", "(", "xmodule_instance_args", ")", "}", "def", "make_track_function", "(", ")", ":", "return", "(", "lambda", "event_type", ",", "event", ":", "task_track", "(", "request_info", ",", "task_info", ",", "event_type", ",", "event", ",", "page", "=", "'x_module_task'", ")", ")", "xqueue_callback_url_prefix", "=", "(", "xmodule_instance_args", ".", "get", "(", "'xqueue_callback_url_prefix'", ",", "''", ")", "if", "(", "xmodule_instance_args", "is", "not", "None", ")", "else", "''", ")", "return", "get_module_for_descriptor_internal", "(", "user", "=", "student", ",", "descriptor", "=", "module_descriptor", ",", "student_data", "=", "student_data", ",", "course_id", "=", "course_id", ",", "track_function", "=", "make_track_function", "(", ")", ",", "xqueue_callback_url_prefix", "=", "xqueue_callback_url_prefix", ",", "grade_bucket_type", "=", "grade_bucket_type", ",", "request_token", "=", "None", ",", "course", "=", "course", ")" ]
fetches a studentmodule instance for a given course_id .
train
false
44,386
def compare_token(compare, token): (algorithm, srounds, salt, _) = compare.split(':') hashed = hash_token(token, salt=salt, rounds=int(srounds), algorithm=algorithm).encode('utf8') compare = compare.encode('utf8') if compare_digest(compare, hashed): return True return False
[ "def", "compare_token", "(", "compare", ",", "token", ")", ":", "(", "algorithm", ",", "srounds", ",", "salt", ",", "_", ")", "=", "compare", ".", "split", "(", "':'", ")", "hashed", "=", "hash_token", "(", "token", ",", "salt", "=", "salt", ",", "rounds", "=", "int", "(", "srounds", ")", ",", "algorithm", "=", "algorithm", ")", ".", "encode", "(", "'utf8'", ")", "compare", "=", "compare", ".", "encode", "(", "'utf8'", ")", "if", "compare_digest", "(", "compare", ",", "hashed", ")", ":", "return", "True", "return", "False" ]
compare a token with a hashed token uses the same algorithm and salt of the hashed token for comparison .
train
false
44,387
def _find_allocated_devices(): command = ['/bin/lsblk', '--nodeps', '--noheadings', '--output', 'KNAME'] command_result = check_output(command) existing = [dev for dev in command_result.split('\n') if (dev.startswith('xvd') or dev.startswith('sd'))] return existing
[ "def", "_find_allocated_devices", "(", ")", ":", "command", "=", "[", "'/bin/lsblk'", ",", "'--nodeps'", ",", "'--noheadings'", ",", "'--output'", ",", "'KNAME'", "]", "command_result", "=", "check_output", "(", "command", ")", "existing", "=", "[", "dev", "for", "dev", "in", "command_result", ".", "split", "(", "'\\n'", ")", "if", "(", "dev", ".", "startswith", "(", "'xvd'", ")", "or", "dev", ".", "startswith", "(", "'sd'", ")", ")", "]", "return", "existing" ]
enumerate the allocated device names on this host .
train
false
44,388
def _parse_date_onblog(dateString): m = _korean_onblog_date_re.match(dateString) if (not m): return w3dtfdate = (u'%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % {u'year': m.group(1), u'month': m.group(2), u'day': m.group(3), u'hour': m.group(4), u'minute': m.group(5), u'second': m.group(6), u'zonediff': u'+09:00'}) return _parse_date_w3dtf(w3dtfdate)
[ "def", "_parse_date_onblog", "(", "dateString", ")", ":", "m", "=", "_korean_onblog_date_re", ".", "match", "(", "dateString", ")", "if", "(", "not", "m", ")", ":", "return", "w3dtfdate", "=", "(", "u'%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s'", "%", "{", "u'year'", ":", "m", ".", "group", "(", "1", ")", ",", "u'month'", ":", "m", ".", "group", "(", "2", ")", ",", "u'day'", ":", "m", ".", "group", "(", "3", ")", ",", "u'hour'", ":", "m", ".", "group", "(", "4", ")", ",", "u'minute'", ":", "m", ".", "group", "(", "5", ")", ",", "u'second'", ":", "m", ".", "group", "(", "6", ")", ",", "u'zonediff'", ":", "u'+09:00'", "}", ")", "return", "_parse_date_w3dtf", "(", "w3dtfdate", ")" ]
parse a string according to the onblog 8-bit date format .
train
false
44,389
def test_only_show_modules_with_defined_names(): form = PermissionGroupForm(prefix=None) choices = [name for (name, value) in form.fields['modules'].choices] assert (AdminModule.name not in choices)
[ "def", "test_only_show_modules_with_defined_names", "(", ")", ":", "form", "=", "PermissionGroupForm", "(", "prefix", "=", "None", ")", "choices", "=", "[", "name", "for", "(", "name", ",", "value", ")", "in", "form", ".", "fields", "[", "'modules'", "]", ".", "choices", "]", "assert", "(", "AdminModule", ".", "name", "not", "in", "choices", ")" ]
make sure that only modules with defined names are show as choices in admin .
train
false
44,390
def elem_is_invisible_with_wait(context, elem, wait_time=MAX_WAIT_TIME): try: if (not elem.is_displayed()): return True except StaleElementReferenceException: return True def displayed_condition(driver): try: return (not elem.is_displayed()) except StaleElementReferenceException: return True try: WebDriverWait(context.browser, wait_time).until(displayed_condition) return True except TimeoutException: return False
[ "def", "elem_is_invisible_with_wait", "(", "context", ",", "elem", ",", "wait_time", "=", "MAX_WAIT_TIME", ")", ":", "try", ":", "if", "(", "not", "elem", ".", "is_displayed", "(", ")", ")", ":", "return", "True", "except", "StaleElementReferenceException", ":", "return", "True", "def", "displayed_condition", "(", "driver", ")", ":", "try", ":", "return", "(", "not", "elem", ".", "is_displayed", "(", ")", ")", "except", "StaleElementReferenceException", ":", "return", "True", "try", ":", "WebDriverWait", "(", "context", ".", "browser", ",", "wait_time", ")", ".", "until", "(", "displayed_condition", ")", "return", "True", "except", "TimeoutException", ":", "return", "False" ]
waits for the element to become invisible context: a behave context elem: a webdriver element wait_time: sets the max wait time .
train
false
44,394
def build_galaxy_app(simple_kwargs): log.info('Galaxy database connection: %s', simple_kwargs['database_connection']) simple_kwargs['global_conf'] = get_webapp_global_conf() simple_kwargs['global_conf']['__file__'] = 'config/galaxy.ini.sample' simple_kwargs = load_app_properties(kwds=simple_kwargs) app = GalaxyUniverseApplication(**simple_kwargs) log.info('Embedded Galaxy application started') database_contexts.galaxy_context = app.model.context database_contexts.install_context = app.install_model.context return app
[ "def", "build_galaxy_app", "(", "simple_kwargs", ")", ":", "log", ".", "info", "(", "'Galaxy database connection: %s'", ",", "simple_kwargs", "[", "'database_connection'", "]", ")", "simple_kwargs", "[", "'global_conf'", "]", "=", "get_webapp_global_conf", "(", ")", "simple_kwargs", "[", "'global_conf'", "]", "[", "'__file__'", "]", "=", "'config/galaxy.ini.sample'", "simple_kwargs", "=", "load_app_properties", "(", "kwds", "=", "simple_kwargs", ")", "app", "=", "GalaxyUniverseApplication", "(", "**", "simple_kwargs", ")", "log", ".", "info", "(", "'Embedded Galaxy application started'", ")", "database_contexts", ".", "galaxy_context", "=", "app", ".", "model", ".", "context", "database_contexts", ".", "install_context", "=", "app", ".", "install_model", ".", "context", "return", "app" ]
build a galaxy app object from a simple keyword arguments .
train
false
44,396
@manager.option('-a', '--accounts', dest='accounts', type=unicode, default=u'all') @manager.option('-m', '--monitors', dest='monitors', type=unicode, default=u'all') def delete_unjustified_issues(accounts, monitors): monitor_names = _parse_tech_names(monitors) account_names = _parse_accounts(accounts) from security_monkey.datastore import ItemAudit issues = ItemAudit.query.filter_by(justified=False).all() for issue in issues: del issue.sub_items[:] db.session.delete(issue) db.session.commit()
[ "@", "manager", ".", "option", "(", "'-a'", ",", "'--accounts'", ",", "dest", "=", "'accounts'", ",", "type", "=", "unicode", ",", "default", "=", "u'all'", ")", "@", "manager", ".", "option", "(", "'-m'", ",", "'--monitors'", ",", "dest", "=", "'monitors'", ",", "type", "=", "unicode", ",", "default", "=", "u'all'", ")", "def", "delete_unjustified_issues", "(", "accounts", ",", "monitors", ")", ":", "monitor_names", "=", "_parse_tech_names", "(", "monitors", ")", "account_names", "=", "_parse_accounts", "(", "accounts", ")", "from", "security_monkey", ".", "datastore", "import", "ItemAudit", "issues", "=", "ItemAudit", ".", "query", ".", "filter_by", "(", "justified", "=", "False", ")", ".", "all", "(", ")", "for", "issue", "in", "issues", ":", "del", "issue", ".", "sub_items", "[", ":", "]", "db", ".", "session", ".", "delete", "(", "issue", ")", "db", ".", "session", ".", "commit", "(", ")" ]
allows us to delete unjustified issues .
train
false
44,397
def _get_version_string(parts): return '.'.join((str(x) for x in parts))
[ "def", "_get_version_string", "(", "parts", ")", ":", "return", "'.'", ".", "join", "(", "(", "str", "(", "x", ")", "for", "x", "in", "parts", ")", ")" ]
returns an x .
train
false
44,398
def umurgdk(): reader = urllib2.urlopen('https://api.github.com/repos/sricola/socode/forks') json_text = reader.read() forks = json.loads(json_text) print 'WHO FORKED SOCODE REPOSITORY?' print '-----------------------------' for fork in forks: print fork['owner']['login'], ("<= That's me ^_^" if (fork['owner']['login'] == 'umurgdk') else '')
[ "def", "umurgdk", "(", ")", ":", "reader", "=", "urllib2", ".", "urlopen", "(", "'https://api.github.com/repos/sricola/socode/forks'", ")", "json_text", "=", "reader", ".", "read", "(", ")", "forks", "=", "json", ".", "loads", "(", "json_text", ")", "print", "'WHO FORKED SOCODE REPOSITORY?'", "print", "'-----------------------------'", "for", "fork", "in", "forks", ":", "print", "fork", "[", "'owner'", "]", "[", "'login'", "]", ",", "(", "\"<= That's me ^_^\"", "if", "(", "fork", "[", "'owner'", "]", "[", "'login'", "]", "==", "'umurgdk'", ")", "else", "''", ")" ]
this code prints github usernames who forked this project .
train
false
44,401
def security_group_create(context, values): return IMPL.security_group_create(context, values)
[ "def", "security_group_create", "(", "context", ",", "values", ")", ":", "return", "IMPL", ".", "security_group_create", "(", "context", ",", "values", ")" ]
create a new security group .
train
false
44,402
@verbose def _points_outside_surface(rr, surf, n_jobs=1, verbose=None): rr = np.atleast_2d(rr) assert (rr.shape[1] == 3) (parallel, p_fun, _) = parallel_func(_get_solids, n_jobs) tot_angles = parallel((p_fun(surf['rr'][tris], rr) for tris in np.array_split(surf['tris'], n_jobs))) return (np.abs(((np.sum(tot_angles, axis=0) / (2 * np.pi)) - 1.0)) > 1e-05)
[ "@", "verbose", "def", "_points_outside_surface", "(", "rr", ",", "surf", ",", "n_jobs", "=", "1", ",", "verbose", "=", "None", ")", ":", "rr", "=", "np", ".", "atleast_2d", "(", "rr", ")", "assert", "(", "rr", ".", "shape", "[", "1", "]", "==", "3", ")", "(", "parallel", ",", "p_fun", ",", "_", ")", "=", "parallel_func", "(", "_get_solids", ",", "n_jobs", ")", "tot_angles", "=", "parallel", "(", "(", "p_fun", "(", "surf", "[", "'rr'", "]", "[", "tris", "]", ",", "rr", ")", "for", "tris", "in", "np", ".", "array_split", "(", "surf", "[", "'tris'", "]", ",", "n_jobs", ")", ")", ")", "return", "(", "np", ".", "abs", "(", "(", "(", "np", ".", "sum", "(", "tot_angles", ",", "axis", "=", "0", ")", "/", "(", "2", "*", "np", ".", "pi", ")", ")", "-", "1.0", ")", ")", ">", "1e-05", ")" ]
check whether points are outside a surface .
train
false
44,404
def view_with_argument(request, name): if (name == 'Arthur Dent'): return HttpResponse('Hi, Arthur') else: return HttpResponse(('Howdy, %s' % name))
[ "def", "view_with_argument", "(", "request", ",", "name", ")", ":", "if", "(", "name", "==", "'Arthur Dent'", ")", ":", "return", "HttpResponse", "(", "'Hi, Arthur'", ")", "else", ":", "return", "HttpResponse", "(", "(", "'Howdy, %s'", "%", "name", ")", ")" ]
a view that takes a string argument the purpose of this view is to check that if a space is provided in the argument .
train
false
44,405
def is_mixed_list(value, *args): try: length = len(value) except TypeError: raise VdtTypeError(value) if (length < len(args)): raise VdtValueTooShortError(value) elif (length > len(args)): raise VdtValueTooLongError(value) try: return [fun_dict[arg](val) for (arg, val) in zip(args, value)] except KeyError as e: raise VdtParamError('mixed_list', e)
[ "def", "is_mixed_list", "(", "value", ",", "*", "args", ")", ":", "try", ":", "length", "=", "len", "(", "value", ")", "except", "TypeError", ":", "raise", "VdtTypeError", "(", "value", ")", "if", "(", "length", "<", "len", "(", "args", ")", ")", ":", "raise", "VdtValueTooShortError", "(", "value", ")", "elif", "(", "length", ">", "len", "(", "args", ")", ")", ":", "raise", "VdtValueTooLongError", "(", "value", ")", "try", ":", "return", "[", "fun_dict", "[", "arg", "]", "(", "val", ")", "for", "(", "arg", ",", "val", ")", "in", "zip", "(", "args", ",", "value", ")", "]", "except", "KeyError", "as", "e", ":", "raise", "VdtParamError", "(", "'mixed_list'", ",", "e", ")" ]
check that the value is a list .
train
true
44,406
def compute_words_maxsize(words): max_size = 0 for word in words: if (len(word) > max_size): max_size = len(word) return max_size
[ "def", "compute_words_maxsize", "(", "words", ")", ":", "max_size", "=", "0", "for", "word", "in", "words", ":", "if", "(", "len", "(", "word", ")", ">", "max_size", ")", ":", "max_size", "=", "len", "(", "word", ")", "return", "max_size" ]
compute the maximum word size from a list of words .
train
false
44,407
def request_namespace(k, v): if (k[:5] == 'body.'): setattr(cherrypy.serving.request.body, k[5:], v) else: setattr(cherrypy.serving.request, k, v)
[ "def", "request_namespace", "(", "k", ",", "v", ")", ":", "if", "(", "k", "[", ":", "5", "]", "==", "'body.'", ")", ":", "setattr", "(", "cherrypy", ".", "serving", ".", "request", ".", "body", ",", "k", "[", "5", ":", "]", ",", "v", ")", "else", ":", "setattr", "(", "cherrypy", ".", "serving", ".", "request", ",", "k", ",", "v", ")" ]
attach request attributes declared in config .
train
false
44,408
@task(rate_limit='60/m') def render_document(pk, cache_control, base_url, force=False): document = Document.objects.get(pk=pk) if force: document.render_started_at = None try: document.render(cache_control, base_url) except Exception as e: subject = ('Exception while rendering document %s' % document.pk) mail_admins(subject=subject, message=e) return document.rendered_errors
[ "@", "task", "(", "rate_limit", "=", "'60/m'", ")", "def", "render_document", "(", "pk", ",", "cache_control", ",", "base_url", ",", "force", "=", "False", ")", ":", "document", "=", "Document", ".", "objects", ".", "get", "(", "pk", "=", "pk", ")", "if", "force", ":", "document", ".", "render_started_at", "=", "None", "try", ":", "document", ".", "render", "(", "cache_control", ",", "base_url", ")", "except", "Exception", "as", "e", ":", "subject", "=", "(", "'Exception while rendering document %s'", "%", "document", ".", "pk", ")", "mail_admins", "(", "subject", "=", "subject", ",", "message", "=", "e", ")", "return", "document", ".", "rendered_errors" ]
simple task wrapper for the render() method of the document model .
train
false
44,409
def test_ast_good_do(): can_compile(u'(do)') can_compile(u'(do 1)')
[ "def", "test_ast_good_do", "(", ")", ":", "can_compile", "(", "u'(do)'", ")", "can_compile", "(", "u'(do 1)'", ")" ]
make sure ast can compile valid do .
train
false
44,410
def p4_build_cmd(cmd): real_cmd = ['p4'] if isinstance(cmd, six.string_types): real_cmd = ((' '.join(real_cmd) + ' ') + cmd) else: real_cmd += cmd return real_cmd
[ "def", "p4_build_cmd", "(", "cmd", ")", ":", "real_cmd", "=", "[", "'p4'", "]", "if", "isinstance", "(", "cmd", ",", "six", ".", "string_types", ")", ":", "real_cmd", "=", "(", "(", "' '", ".", "join", "(", "real_cmd", ")", "+", "' '", ")", "+", "cmd", ")", "else", ":", "real_cmd", "+=", "cmd", "return", "real_cmd" ]
build a suitable p4 command line .
train
false
44,411
@cli.group() @click.pass_context @click.option('--store', nargs=1, help='Name of the store to use other than default. Must be SQL.') @click.option('--config', nargs=1, default=DEFAULT_CONFIG, help='Name of slicer.ini configuration file') def sql(ctx, store, config): ctx.obj.workspace = cubes.Workspace(config) ctx.obj.store = ctx.obj.workspace.get_store(store)
[ "@", "cli", ".", "group", "(", ")", "@", "click", ".", "pass_context", "@", "click", ".", "option", "(", "'--store'", ",", "nargs", "=", "1", ",", "help", "=", "'Name of the store to use other than default. Must be SQL.'", ")", "@", "click", ".", "option", "(", "'--config'", ",", "nargs", "=", "1", ",", "default", "=", "DEFAULT_CONFIG", ",", "help", "=", "'Name of slicer.ini configuration file'", ")", "def", "sql", "(", "ctx", ",", "store", ",", "config", ")", ":", "ctx", ".", "obj", ".", "workspace", "=", "cubes", ".", "Workspace", "(", "config", ")", "ctx", ".", "obj", ".", "store", "=", "ctx", ".", "obj", ".", "workspace", ".", "get_store", "(", "store", ")" ]
sql store commands .
train
false
44,413
def _wrapped_call(wrap_controller, func): try: next(wrap_controller) except StopIteration: _raise_wrapfail(wrap_controller, 'did not yield') call_outcome = _CallOutcome(func) try: wrap_controller.send(call_outcome) _raise_wrapfail(wrap_controller, 'has second yield') except StopIteration: pass return call_outcome.get_result()
[ "def", "_wrapped_call", "(", "wrap_controller", ",", "func", ")", ":", "try", ":", "next", "(", "wrap_controller", ")", "except", "StopIteration", ":", "_raise_wrapfail", "(", "wrap_controller", ",", "'did not yield'", ")", "call_outcome", "=", "_CallOutcome", "(", "func", ")", "try", ":", "wrap_controller", ".", "send", "(", "call_outcome", ")", "_raise_wrapfail", "(", "wrap_controller", ",", "'has second yield'", ")", "except", "StopIteration", ":", "pass", "return", "call_outcome", ".", "get_result", "(", ")" ]
wrap calling to a function with a generator which needs to yield exactly once .
train
false
44,414
def diop_general_sum_of_squares(eq, limit=1): (var, coeff, diop_type) = classify_diop(eq, _dict=False) if (diop_type == 'general_sum_of_squares'): return _diop_general_sum_of_squares(var, (- coeff[1]), limit)
[ "def", "diop_general_sum_of_squares", "(", "eq", ",", "limit", "=", "1", ")", ":", "(", "var", ",", "coeff", ",", "diop_type", ")", "=", "classify_diop", "(", "eq", ",", "_dict", "=", "False", ")", "if", "(", "diop_type", "==", "'general_sum_of_squares'", ")", ":", "return", "_diop_general_sum_of_squares", "(", "var", ",", "(", "-", "coeff", "[", "1", "]", ")", ",", "limit", ")" ]
solves the equation x_{1}^2 + x_{2}^2 + .
train
false
44,415
def ReadManifest(jar_file_name): with zipfile.ZipFile(jar_file_name) as jar: try: manifest_string = jar.read(_MANIFEST_NAME) except KeyError: return None return _ParseManifest(manifest_string, jar_file_name)
[ "def", "ReadManifest", "(", "jar_file_name", ")", ":", "with", "zipfile", ".", "ZipFile", "(", "jar_file_name", ")", "as", "jar", ":", "try", ":", "manifest_string", "=", "jar", ".", "read", "(", "_MANIFEST_NAME", ")", "except", "KeyError", ":", "return", "None", "return", "_ParseManifest", "(", "manifest_string", ",", "jar_file_name", ")" ]
read and parse the manifest out of the given jar .
train
false
44,416
def copy_headers_into(from_r, to_r): for (k, v) in from_r.headers.items(): if k.lower().startswith('x-object-meta-'): to_r.headers[k] = v
[ "def", "copy_headers_into", "(", "from_r", ",", "to_r", ")", ":", "for", "(", "k", ",", "v", ")", "in", "from_r", ".", "headers", ".", "items", "(", ")", ":", "if", "k", ".", "lower", "(", ")", ".", "startswith", "(", "'x-object-meta-'", ")", ":", "to_r", ".", "headers", "[", "k", "]", "=", "v" ]
will copy desired headers from from_r to to_r .
train
false
44,417
def is_uuid(u, version=4): try: uuid_obj = UUID(u, version=version) return (str(uuid_obj) == u) except: return False
[ "def", "is_uuid", "(", "u", ",", "version", "=", "4", ")", ":", "try", ":", "uuid_obj", "=", "UUID", "(", "u", ",", "version", "=", "version", ")", "return", "(", "str", "(", "uuid_obj", ")", "==", "u", ")", "except", ":", "return", "False" ]
test if valid v4 uuid .
train
false
44,418
def get_filter_function(): csc = builtins.__xonsh_env__.get('CASE_SENSITIVE_COMPLETIONS') if csc: return _filter_normal else: return _filter_ignorecase
[ "def", "get_filter_function", "(", ")", ":", "csc", "=", "builtins", ".", "__xonsh_env__", ".", "get", "(", "'CASE_SENSITIVE_COMPLETIONS'", ")", "if", "csc", ":", "return", "_filter_normal", "else", ":", "return", "_filter_ignorecase" ]
return an appropriate filtering function for completions .
train
false
44,420
def url_for_security(endpoint, **values): endpoint = get_security_endpoint_name(endpoint) return url_for(endpoint, **values)
[ "def", "url_for_security", "(", "endpoint", ",", "**", "values", ")", ":", "endpoint", "=", "get_security_endpoint_name", "(", "endpoint", ")", "return", "url_for", "(", "endpoint", ",", "**", "values", ")" ]
return a url for the security blueprint .
train
false
44,423
def test_sparkline(Chart, datas): chart = Chart() chart = make_data(chart, datas) assert chart.render_sparkline()
[ "def", "test_sparkline", "(", "Chart", ",", "datas", ")", ":", "chart", "=", "Chart", "(", ")", "chart", "=", "make_data", "(", "chart", ",", "datas", ")", "assert", "chart", ".", "render_sparkline", "(", ")" ]
test sparkline .
train
false
44,424
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
44,426
def test_sip_with_altkey(): with fits.open(get_pkg_data_filename(u'data/sip.fits')) as f: w = wcs.WCS(f[0].header) h1 = w.to_header(relax=True, key=u'A') h2 = w.to_header(relax=False) h1[u'CTYPE1A'] = u'RA---SIN-SIP' h1[u'CTYPE2A'] = u'DEC--SIN-SIP' h1.update(h2) w = wcs.WCS(h1, key=u'A') assert (w.wcs.ctype == np.array([u'RA---SIN-SIP', u'DEC--SIN-SIP'])).all()
[ "def", "test_sip_with_altkey", "(", ")", ":", "with", "fits", ".", "open", "(", "get_pkg_data_filename", "(", "u'data/sip.fits'", ")", ")", "as", "f", ":", "w", "=", "wcs", ".", "WCS", "(", "f", "[", "0", "]", ".", "header", ")", "h1", "=", "w", ".", "to_header", "(", "relax", "=", "True", ",", "key", "=", "u'A'", ")", "h2", "=", "w", ".", "to_header", "(", "relax", "=", "False", ")", "h1", "[", "u'CTYPE1A'", "]", "=", "u'RA---SIN-SIP'", "h1", "[", "u'CTYPE2A'", "]", "=", "u'DEC--SIN-SIP'", "h1", ".", "update", "(", "h2", ")", "w", "=", "wcs", ".", "WCS", "(", "h1", ",", "key", "=", "u'A'", ")", "assert", "(", "w", ".", "wcs", ".", "ctype", "==", "np", ".", "array", "(", "[", "u'RA---SIN-SIP'", ",", "u'DEC--SIN-SIP'", "]", ")", ")", ".", "all", "(", ")" ]
test that when creating a wcs object using a key .
train
false
44,427
def plotFrameIntervals(intervals): from pylab import hist, show, plot if (type(intervals) == str): f = open(intervals, 'r') intervals = eval(('[%s]' % f.readline())) plot(intervals) show()
[ "def", "plotFrameIntervals", "(", "intervals", ")", ":", "from", "pylab", "import", "hist", ",", "show", ",", "plot", "if", "(", "type", "(", "intervals", ")", "==", "str", ")", ":", "f", "=", "open", "(", "intervals", ",", "'r'", ")", "intervals", "=", "eval", "(", "(", "'[%s]'", "%", "f", ".", "readline", "(", ")", ")", ")", "plot", "(", "intervals", ")", "show", "(", ")" ]
plot a histogram of the frame intervals .
train
false
44,428
def instance_info_cache_update(context, instance_uuid, values): return IMPL.instance_info_cache_update(context, instance_uuid, values)
[ "def", "instance_info_cache_update", "(", "context", ",", "instance_uuid", ",", "values", ")", ":", "return", "IMPL", ".", "instance_info_cache_update", "(", "context", ",", "instance_uuid", ",", "values", ")" ]
update an instance info cache record in the table .
train
false
44,429
def _ConvertToList(arg): if isinstance(arg, basestring): return [arg] if (arg is not None): try: return list(iter(arg)) except TypeError: return [arg] return []
[ "def", "_ConvertToList", "(", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "basestring", ")", ":", "return", "[", "arg", "]", "if", "(", "arg", "is", "not", "None", ")", ":", "try", ":", "return", "list", "(", "iter", "(", "arg", ")", ")", "except", "TypeError", ":", "return", "[", "arg", "]", "return", "[", "]" ]
converts arg to a list .
train
false
44,430
def sig_key(s, order): return ((- s[1]), order(s[0]))
[ "def", "sig_key", "(", "s", ",", "order", ")", ":", "return", "(", "(", "-", "s", "[", "1", "]", ")", ",", "order", "(", "s", "[", "0", "]", ")", ")" ]
key for comparing two signatures .
train
false
44,431
def render_formset(formset=None, context=None): if (not context): context = {} context[u'formset'] = formset return render_template_with_form(u'{% bootstrap_formset formset %}', context)
[ "def", "render_formset", "(", "formset", "=", "None", ",", "context", "=", "None", ")", ":", "if", "(", "not", "context", ")", ":", "context", "=", "{", "}", "context", "[", "u'formset'", "]", "=", "formset", "return", "render_template_with_form", "(", "u'{% bootstrap_formset formset %}'", ",", "context", ")" ]
create a template that renders a formset .
train
false
44,432
def compute_alc(valid_repr, test_repr): n_valid = valid_repr.shape[0] n_test = test_repr.shape[0] _labvalid = numpy.hstack((numpy.ones((n_valid, 1)), numpy.zeros((n_valid, 1)))) _labtest = numpy.hstack((numpy.zeros((n_test, 1)), numpy.ones((n_test, 1)))) dataset = numpy.vstack((valid_repr, test_repr)) label = numpy.vstack((_labvalid, _labtest)) logger.info('... computing the ALC') raise NotImplementedError("This got broken by embed no longer being where it used to be (if it even still exists, I haven't looked for it)")
[ "def", "compute_alc", "(", "valid_repr", ",", "test_repr", ")", ":", "n_valid", "=", "valid_repr", ".", "shape", "[", "0", "]", "n_test", "=", "test_repr", ".", "shape", "[", "0", "]", "_labvalid", "=", "numpy", ".", "hstack", "(", "(", "numpy", ".", "ones", "(", "(", "n_valid", ",", "1", ")", ")", ",", "numpy", ".", "zeros", "(", "(", "n_valid", ",", "1", ")", ")", ")", ")", "_labtest", "=", "numpy", ".", "hstack", "(", "(", "numpy", ".", "zeros", "(", "(", "n_test", ",", "1", ")", ")", ",", "numpy", ".", "ones", "(", "(", "n_test", ",", "1", ")", ")", ")", ")", "dataset", "=", "numpy", ".", "vstack", "(", "(", "valid_repr", ",", "test_repr", ")", ")", "label", "=", "numpy", ".", "vstack", "(", "(", "_labvalid", ",", "_labtest", ")", ")", "logger", ".", "info", "(", "'... computing the ALC'", ")", "raise", "NotImplementedError", "(", "\"This got broken by embed no longer being where it used to be (if it even still exists, I haven't looked for it)\"", ")" ]
returns the alc of the valid set vs test set note: this proxy wont work in the case of transductive learning but it seems to be a good proxy in the normal case parameters valid_repr : writeme test_repr : writeme returns writeme .
train
false
44,435
def lowercase_values(mapping): items = mapping.items() for (key, value) in items: mapping[key] = value.lower()
[ "def", "lowercase_values", "(", "mapping", ")", ":", "items", "=", "mapping", ".", "items", "(", ")", "for", "(", "key", ",", "value", ")", "in", "items", ":", "mapping", "[", "key", "]", "=", "value", ".", "lower", "(", ")" ]
converts the values in the mapping dict to lowercase .
train
false
44,437
def getUrls(sheet): for importrule in (r for r in sheet if (r.type == r.IMPORT_RULE)): (yield importrule.href) def styleDeclarations(base): 'recursive generator to find all CSSStyleDeclarations' if hasattr(base, 'cssRules'): for rule in base.cssRules: for s in styleDeclarations(rule): (yield s) elif hasattr(base, 'style'): (yield base.style) for style in styleDeclarations(sheet): for p in style.getProperties(all=True): for v in p.propertyValue: if (v.type == 'URI'): (yield v.uri)
[ "def", "getUrls", "(", "sheet", ")", ":", "for", "importrule", "in", "(", "r", "for", "r", "in", "sheet", "if", "(", "r", ".", "type", "==", "r", ".", "IMPORT_RULE", ")", ")", ":", "(", "yield", "importrule", ".", "href", ")", "def", "styleDeclarations", "(", "base", ")", ":", "if", "hasattr", "(", "base", ",", "'cssRules'", ")", ":", "for", "rule", "in", "base", ".", "cssRules", ":", "for", "s", "in", "styleDeclarations", "(", "rule", ")", ":", "(", "yield", "s", ")", "elif", "hasattr", "(", "base", ",", "'style'", ")", ":", "(", "yield", "base", ".", "style", ")", "for", "style", "in", "styleDeclarations", "(", "sheet", ")", ":", "for", "p", "in", "style", ".", "getProperties", "(", "all", "=", "True", ")", ":", "for", "v", "in", "p", ".", "propertyValue", ":", "if", "(", "v", ".", "type", "==", "'URI'", ")", ":", "(", "yield", "v", ".", "uri", ")" ]
retrieve all url values (in e .
train
false
44,438
def step_3(w): for (suffix, rules) in suffixes3: if w.endswith(suffix): for (A, B) in rules: if w.endswith(A): return ((R1(w).endswith(A) and (w[:(- len(A))] + B)) or w) return w
[ "def", "step_3", "(", "w", ")", ":", "for", "(", "suffix", ",", "rules", ")", "in", "suffixes3", ":", "if", "w", ".", "endswith", "(", "suffix", ")", ":", "for", "(", "A", ",", "B", ")", "in", "rules", ":", "if", "w", ".", "endswith", "(", "A", ")", ":", "return", "(", "(", "R1", "(", "w", ")", ".", "endswith", "(", "A", ")", "and", "(", "w", "[", ":", "(", "-", "len", "(", "A", ")", ")", "]", "+", "B", ")", ")", "or", "w", ")", "return", "w" ]
step 3 replaces -ic .
train
false
44,439
def userfullname(): global _userfullname if (not _userfullname): uid = os.getuid() entry = pwd_from_uid(uid) if entry: _userfullname = (entry[4].split(',')[0] or entry[0]) if (not _userfullname): _userfullname = ('user%d' % uid) return _userfullname
[ "def", "userfullname", "(", ")", ":", "global", "_userfullname", "if", "(", "not", "_userfullname", ")", ":", "uid", "=", "os", ".", "getuid", "(", ")", "entry", "=", "pwd_from_uid", "(", "uid", ")", "if", "entry", ":", "_userfullname", "=", "(", "entry", "[", "4", "]", ".", "split", "(", "','", ")", "[", "0", "]", "or", "entry", "[", "0", "]", ")", "if", "(", "not", "_userfullname", ")", ":", "_userfullname", "=", "(", "'user%d'", "%", "uid", ")", "return", "_userfullname" ]
get the users full name .
train
false
44,440
def get_next_page_of_all_commits(page_size=feconf.COMMIT_LIST_PAGE_SIZE, urlsafe_start_cursor=None): (results, new_urlsafe_start_cursor, more) = collection_models.CollectionCommitLogEntryModel.get_all_commits(page_size, urlsafe_start_cursor) return ([collection_domain.CollectionCommitLogEntry(entry.created_on, entry.last_updated, entry.user_id, entry.username, entry.collection_id, entry.commit_type, entry.commit_message, entry.commit_cmds, entry.version, entry.post_commit_status, entry.post_commit_community_owned, entry.post_commit_is_private) for entry in results], new_urlsafe_start_cursor, more)
[ "def", "get_next_page_of_all_commits", "(", "page_size", "=", "feconf", ".", "COMMIT_LIST_PAGE_SIZE", ",", "urlsafe_start_cursor", "=", "None", ")", ":", "(", "results", ",", "new_urlsafe_start_cursor", ",", "more", ")", "=", "collection_models", ".", "CollectionCommitLogEntryModel", ".", "get_all_commits", "(", "page_size", ",", "urlsafe_start_cursor", ")", "return", "(", "[", "collection_domain", ".", "CollectionCommitLogEntry", "(", "entry", ".", "created_on", ",", "entry", ".", "last_updated", ",", "entry", ".", "user_id", ",", "entry", ".", "username", ",", "entry", ".", "collection_id", ",", "entry", ".", "commit_type", ",", "entry", ".", "commit_message", ",", "entry", ".", "commit_cmds", ",", "entry", ".", "version", ",", "entry", ".", "post_commit_status", ",", "entry", ".", "post_commit_community_owned", ",", "entry", ".", "post_commit_is_private", ")", "for", "entry", "in", "results", "]", ",", "new_urlsafe_start_cursor", ",", "more", ")" ]
returns a page of commits to all collections in reverse time order .
train
false
44,441
def assert_same_object(expected, actual): if (expected is not actual): raise AssertionError(('values not identical, expected %r, actual %r' % (expected, actual)))
[ "def", "assert_same_object", "(", "expected", ",", "actual", ")", ":", "if", "(", "expected", "is", "not", "actual", ")", ":", "raise", "AssertionError", "(", "(", "'values not identical, expected %r, actual %r'", "%", "(", "expected", ",", "actual", ")", ")", ")" ]
asserting object identity .
train
false
44,442
def show_checks(request): ignore = ('ignored' in request.GET) url_params = {} if ignore: url_params['ignored'] = 'true' allchecks = acl_checks(request.user).filter(ignore=ignore) if ('project' in request.GET): allchecks = allchecks.filter(project__slug=request.GET['project']) url_params['project'] = request.GET['project'] if ('language' in request.GET): allchecks = allchecks.filter(language__code=request.GET['language']) url_params['language'] = request.GET['language'] allchecks = allchecks.values('check').annotate(count=Count('id')) return render(request, 'checks.html', {'checks': allchecks, 'title': _('Failing checks'), 'url_params': encode_optional(url_params)})
[ "def", "show_checks", "(", "request", ")", ":", "ignore", "=", "(", "'ignored'", "in", "request", ".", "GET", ")", "url_params", "=", "{", "}", "if", "ignore", ":", "url_params", "[", "'ignored'", "]", "=", "'true'", "allchecks", "=", "acl_checks", "(", "request", ".", "user", ")", ".", "filter", "(", "ignore", "=", "ignore", ")", "if", "(", "'project'", "in", "request", ".", "GET", ")", ":", "allchecks", "=", "allchecks", ".", "filter", "(", "project__slug", "=", "request", ".", "GET", "[", "'project'", "]", ")", "url_params", "[", "'project'", "]", "=", "request", ".", "GET", "[", "'project'", "]", "if", "(", "'language'", "in", "request", ".", "GET", ")", ":", "allchecks", "=", "allchecks", ".", "filter", "(", "language__code", "=", "request", ".", "GET", "[", "'language'", "]", ")", "url_params", "[", "'language'", "]", "=", "request", ".", "GET", "[", "'language'", "]", "allchecks", "=", "allchecks", ".", "values", "(", "'check'", ")", ".", "annotate", "(", "count", "=", "Count", "(", "'id'", ")", ")", "return", "render", "(", "request", ",", "'checks.html'", ",", "{", "'checks'", ":", "allchecks", ",", "'title'", ":", "_", "(", "'Failing checks'", ")", ",", "'url_params'", ":", "encode_optional", "(", "url_params", ")", "}", ")" ]
list of failing checks .
train
false
44,443
def _set_user_requirement_status(attempt, namespace, status, reason=None): checkpoint = None try: checkpoint = VerificationCheckpoint.objects.get(photo_verification=attempt) except VerificationCheckpoint.DoesNotExist: log.error('Unable to find checkpoint for user with id %d', attempt.user.id) if (checkpoint is not None): try: set_credit_requirement_status(attempt.user, checkpoint.course_id, namespace, checkpoint.checkpoint_location, status=status, reason=reason) except Exception: log.error('Unable to add Credit requirement status for user with id %d', attempt.user.id)
[ "def", "_set_user_requirement_status", "(", "attempt", ",", "namespace", ",", "status", ",", "reason", "=", "None", ")", ":", "checkpoint", "=", "None", "try", ":", "checkpoint", "=", "VerificationCheckpoint", ".", "objects", ".", "get", "(", "photo_verification", "=", "attempt", ")", "except", "VerificationCheckpoint", ".", "DoesNotExist", ":", "log", ".", "error", "(", "'Unable to find checkpoint for user with id %d'", ",", "attempt", ".", "user", ".", "id", ")", "if", "(", "checkpoint", "is", "not", "None", ")", ":", "try", ":", "set_credit_requirement_status", "(", "attempt", ".", "user", ",", "checkpoint", ".", "course_id", ",", "namespace", ",", "checkpoint", ".", "checkpoint_location", ",", "status", "=", "status", ",", "reason", "=", "reason", ")", "except", "Exception", ":", "log", ".", "error", "(", "'Unable to add Credit requirement status for user with id %d'", ",", "attempt", ".", "user", ".", "id", ")" ]
sets the status of a credit requirement for the user .
train
false
44,444
@status('Misc/NEWS updated', modal=True) def reported_news(file_paths): return (os.path.join('Misc', 'NEWS') in file_paths)
[ "@", "status", "(", "'Misc/NEWS updated'", ",", "modal", "=", "True", ")", "def", "reported_news", "(", "file_paths", ")", ":", "return", "(", "os", ".", "path", ".", "join", "(", "'Misc'", ",", "'NEWS'", ")", "in", "file_paths", ")" ]
check if misc/news has been changed .
train
false
44,446
@unbox(types.EnumMember) def unbox_enum(typ, obj, c): valobj = c.pyapi.object_getattr_string(obj, 'value') return c.unbox(typ.dtype, valobj)
[ "@", "unbox", "(", "types", ".", "EnumMember", ")", "def", "unbox_enum", "(", "typ", ",", "obj", ",", "c", ")", ":", "valobj", "=", "c", ".", "pyapi", ".", "object_getattr_string", "(", "obj", ",", "'value'", ")", "return", "c", ".", "unbox", "(", "typ", ".", "dtype", ",", "valobj", ")" ]
convert an enum members value to its native value .
train
false
44,447
def _activities_limit(q, limit, offset=None): import ckan.model as model q = q.order_by(desc(model.Activity.timestamp)) if offset: q = q.offset(offset) if limit: q = q.limit(limit) return q
[ "def", "_activities_limit", "(", "q", ",", "limit", ",", "offset", "=", "None", ")", ":", "import", "ckan", ".", "model", "as", "model", "q", "=", "q", ".", "order_by", "(", "desc", "(", "model", ".", "Activity", ".", "timestamp", ")", ")", "if", "offset", ":", "q", "=", "q", ".", "offset", "(", "offset", ")", "if", "limit", ":", "q", "=", "q", ".", "limit", "(", "limit", ")", "return", "q" ]
return an sqlalchemy query for all activities at an offset with a limit .
train
false